diff --git a/cloud/cpi_cmd_runner.go b/cloud/cpi_cmd_runner.go index 0cd79d4fc..a3fc0d074 100644 --- a/cloud/cpi_cmd_runner.go +++ b/cloud/cpi_cmd_runner.go @@ -67,11 +67,10 @@ type CPICmdRunner interface { } type cpiCmdRunner struct { - cmdRunner boshsys.CmdRunner - cpi CPI - logger boshlog.Logger - apiVersion int - logTag string + cmdRunner boshsys.CmdRunner + cpi CPI + logger boshlog.Logger + logTag string } func NewCPICmdRunner( diff --git a/cmd/clean_up.go b/cmd/clean_up.go index 3706164ac..20f2c8e12 100644 --- a/cmd/clean_up.go +++ b/cmd/clean_up.go @@ -20,7 +20,7 @@ func NewCleanUpCmd(ui boshui.UI, director boshdir.Director) CleanUpCmd { func (c CleanUpCmd) Run(opts CleanUpOpts) error { - if opts.DryRun == false { + if !opts.DryRun { err := c.ui.AskForConfirmation() if err != nil { return err @@ -77,7 +77,7 @@ func (c CleanUpCmd) PrintCleanUpTable(resp boshdir.CleanUp) { orphanedVmRows(resp.OrphanedVMs), } - for i, _ := range titles { + for i := range titles { table := boshtbl.Table{ Title: titles[i], Header: headers[i], @@ -85,9 +85,7 @@ func (c CleanUpCmd) PrintCleanUpTable(resp boshdir.CleanUp) { {Column: 0, Asc: true}, }, } - for _, r := range rows[i] { - table.Rows = append(table.Rows, r) - } + table.Rows = append(table.Rows, rows[i]...) c.ui.PrintTable(table) } } diff --git a/cmd/config/fs_config_test.go b/cmd/config/fs_config_test.go index cfff82872..1441ecb89 100644 --- a/cmd/config/fs_config_test.go +++ b/cmd/config/fs_config_test.go @@ -387,7 +387,7 @@ var _ = Describe("FSConfig", func() { }) It("returns creds with username/password if environment is found and basic creds are set", func() { - updatedConfig, err := config.AliasEnvironment("url", "alias", "") //nolint:ineffassign + updatedConfig, err := config.AliasEnvironment("url", "alias", "") //nolint:ineffassign,staticcheck Expect(err).ToNot(HaveOccurred()) updatedConfig = config.SetCredentials("url", Creds{Client: "user", ClientSecret: "pass"}) @@ -410,7 +410,7 @@ var _ = Describe("FSConfig", func() { }) It("returns creds with token if environment is found and token is set", func() { - updatedConfig, err := config.AliasEnvironment("url", "alias", "") //nolint:ineffassign + updatedConfig, err := config.AliasEnvironment("url", "alias", "") //nolint:ineffassign,staticcheck Expect(err).ToNot(HaveOccurred()) updatedConfig = config.SetCredentials("url", Creds{AccessToken: "access", AccessTokenType: "access-type", RefreshToken: "token"}) @@ -433,7 +433,7 @@ var _ = Describe("FSConfig", func() { }) It("returns creds for alias if environment is found and token is set", func() { - updatedConfig, err := config.AliasEnvironment("url", "alias", "") //nolint:ineffassign + updatedConfig, err := config.AliasEnvironment("url", "alias", "") //nolint:ineffassign,staticcheck Expect(err).ToNot(HaveOccurred()) updatedConfig = config.SetCredentials("alias", Creds{AccessToken: "access", AccessTokenType: "access-type", RefreshToken: "token"}) @@ -456,7 +456,7 @@ var _ = Describe("FSConfig", func() { }) It("does not update existing config when creds are set", func() { - updatedConfig, err := config.AliasEnvironment("url", "alias", "") //nolint:ineffassign + updatedConfig, err := config.AliasEnvironment("url", "alias", "") //nolint:ineffassign,staticcheck Expect(err).ToNot(HaveOccurred()) updatedConfig = config.SetCredentials("url", Creds{Client: "user"}) diff --git a/cmd/deployment_manifest_parser.go b/cmd/deployment_manifest_parser.go index b3a8474f8..d169abc0c 100644 --- a/cmd/deployment_manifest_parser.go +++ b/cmd/deployment_manifest_parser.go @@ -78,7 +78,7 @@ func (y deploymentManifestParser) getManifest(path string, vars boshtpl.Variable return bosherr.WrapError(err, "Validating deployment manifest") } - if skipReleaseJobsValidation == false { + if !skipReleaseJobsValidation { err = y.deploymentValidator.ValidateReleaseJobs(deploymentManifest, y.releaseManager) if err != nil { return bosherr.WrapError(err, "Validating deployment jobs refer to jobs in release") diff --git a/cmd/env_factory.go b/cmd/env_factory.go index d9e387380..c05e71e0e 100644 --- a/cmd/env_factory.go +++ b/cmd/env_factory.go @@ -56,8 +56,7 @@ type envFactory struct { vmManagerFactory bivm.ManagerFactory stemcellManagerFactory bistemcell.ManagerFactory - instanceManagerFactory biinstance.ManagerFactory - deploymentManagerFactory bidepl.ManagerFactory + instanceManagerFactory biinstance.ManagerFactory agentClientFactory bihttpagent.AgentClientFactory blobstoreFactory biblobstore.Factory diff --git a/cmd/opts/opts.go b/cmd/opts/opts.go index 2300ceed6..0c10e60bc 100644 --- a/cmd/opts/opts.go +++ b/cmd/opts/opts.go @@ -48,7 +48,7 @@ type BoshOpts struct { UnaliasEnv UnaliasEnvOpts `command:"unalias-env" description:"Remove an aliased environment"` // Authentication - LogIn LogInOpts `command:"log-in" alias:"l" alias:"login" description:"Log in"` + LogIn LogInOpts `command:"log-in" alias:"l" alias:"login" description:"Log in"` //nolint:staticcheck LogOut LogOutOpts `command:"log-out" alias:"logout" description:"Log out"` // Tasks @@ -83,7 +83,7 @@ type BoshOpts struct { // Deployments Deployment DeploymentOpts `command:"deployment" alias:"dep" description:"Show deployment information"` - Deployments DeploymentsOpts `command:"deployments" alias:"ds" alias:"deps" description:"List deployments"` + Deployments DeploymentsOpts `command:"deployments" alias:"ds" alias:"deps" description:"List deployments"` //nolint:staticcheck DeleteDeployment DeleteDeploymentOpts `command:"delete-deployment" alias:"deld" description:"Delete deployment"` Deploy DeployOpts `command:"deploy" alias:"d" description:"Update deployment"` @@ -136,7 +136,7 @@ type BoshOpts struct { UpdateResurrection UpdateResurrectionOpts `command:"update-resurrection" description:"Enable/disable resurrection"` Ignore IgnoreOpts `command:"ignore" description:"Ignore an instance"` Unignore UnignoreOpts `command:"unignore" description:"Unignore an instance"` - CloudCheck CloudCheckOpts `command:"cloud-check" alias:"cck" alias:"cloudcheck" description:"Cloud consistency check and interactive repair"` + CloudCheck CloudCheckOpts `command:"cloud-check" alias:"cck" alias:"cloudcheck" description:"Cloud consistency check and interactive repair"` //nolint:staticcheck OrphanedVMs OrphanedVMsOpts `command:"orphaned-vms" description:"List all the orphaned VMs in all deployments"` // Instance management diff --git a/cmd/opts/opts_test.go b/cmd/opts/opts_test.go index fde0bc0e7..594f1151f 100644 --- a/cmd/opts/opts_test.go +++ b/cmd/opts/opts_test.go @@ -14,7 +14,7 @@ import ( ) var ( - dupSpaces = regexp.MustCompile("\\s{2,}") + dupSpaces = regexp.MustCompile(`\s{2,}`) ) func getStructTagForName(field string, opts interface{}) string { diff --git a/cmd/opts/var_flags.go b/cmd/opts/var_flags.go index ab1252128..cec6d6816 100644 --- a/cmd/opts/var_flags.go +++ b/cmd/opts/var_flags.go @@ -20,19 +20,19 @@ func (f VarFlags) AsVariables() boshtpl.Variables { staticVars := boshtpl.StaticVariables{} - for i, _ := range f.VarsEnvs { + for i := range f.VarsEnvs { for k, v := range f.VarsEnvs[i].Vars { staticVars[k] = v } } - for i, _ := range f.VarsFiles { + for i := range f.VarsFiles { for k, v := range f.VarsFiles[i].Vars { staticVars[k] = v } } - for i, _ := range f.VarFiles { + for i := range f.VarFiles { for k, v := range f.VarFiles[i].Vars { staticVars[k] = v } diff --git a/cmd/run_errand.go b/cmd/run_errand.go index 562031b5f..5400af505 100644 --- a/cmd/run_errand.go +++ b/cmd/run_errand.go @@ -107,10 +107,3 @@ func (c RunErrandCmd) summarize(errandName string, results []boshdir.ErrandResul return errandErr } - -func (c RunErrandCmd) printOutput(title, output string) { - if len(output) > 0 { - c.ui.PrintLinef("%s", title) - c.ui.PrintLinef("%s", output) - } -} diff --git a/cmd/session_test.go b/cmd/session_test.go index 72023f429..9700ad5cf 100644 --- a/cmd/session_test.go +++ b/cmd/session_test.go @@ -145,8 +145,7 @@ var _ = Describe("SessionImpl", func() { auth := req.Header.Get("Authorization") Expect(auth).To(BeEmpty(), "Authorization header must empty") }, - ghttp.RespondWith(http.StatusOK, fmt.Sprintf( - `{"user_authentication":{"type":"basic","options":{}}}`)), + ghttp.RespondWith(http.StatusOK, `{"user_authentication":{"type":"basic","options":{}}}`), ), ghttp.CombineHandlers( ghttp.VerifyRequest("GET", "/locks"), diff --git a/crypto/fakes/fake_digest_calculator.go b/crypto/fakes/fake_digest_calculator.go index 0f151a0ba..31395a75c 100644 --- a/crypto/fakes/fake_digest_calculator.go +++ b/crypto/fakes/fake_digest_calculator.go @@ -30,7 +30,7 @@ func (c *FakeDigestCalculator) CalculateString(data string) string { } var availableData []string - for key, _ := range c.CalculateStringInputs { + for key := range c.CalculateStringInputs { availableData = append(availableData, key) } diff --git a/deployment/instance/state/builder.go b/deployment/instance/state/builder.go index 28cf956f3..884d72046 100644 --- a/deployment/instance/state/builder.go +++ b/deployment/instance/state/builder.go @@ -93,7 +93,7 @@ func (b *builder) Build(jobName string, instanceID int, deploymentManifest bidep return nil, bosherr.WrapErrorf(err, "Compiling job package dependencies for instance '%s/%d'", jobName, instanceID) } - compiledDeploymentPackageRefs := make([]PackageRef, len(compiledPackageRefs), len(compiledPackageRefs)) + compiledDeploymentPackageRefs := make([]PackageRef, len(compiledPackageRefs)) for i, compiledPackageRef := range compiledPackageRefs { compiledDeploymentPackageRefs[i] = PackageRef{ @@ -107,7 +107,7 @@ func (b *builder) Build(jobName string, instanceID int, deploymentManifest bidep } // convert array to array - renderedJobRefs := make([]JobRef, len(releaseJobs), len(releaseJobs)) + renderedJobRefs := make([]JobRef, len(releaseJobs)) for i, releaseJob := range releaseJobs { renderedJobRefs[i] = JobRef{ @@ -168,7 +168,7 @@ func (b *builder) BuildInitialState(jobName string, instanceID int, deploymentMa // FIXME: why do i exist here and in installation/state/builder.go?? func (b *builder) resolveJobs(jobRefs []bideplmanifest.ReleaseJobRef) ([]bireljob.Job, error) { - releaseJobs := make([]bireljob.Job, len(jobRefs), len(jobRefs)) + releaseJobs := make([]bireljob.Job, len(jobRefs)) for i, jobRef := range jobRefs { release, err := b.releaseJobResolver.Resolve(jobRef.Name, jobRef.Release) if err != nil { diff --git a/deployment/instance/state/remote_package_compiler.go b/deployment/instance/state/remote_package_compiler.go index bd0d2d08d..365cf6313 100644 --- a/deployment/instance/state/remote_package_compiler.go +++ b/deployment/instance/state/remote_package_compiler.go @@ -46,7 +46,7 @@ func (c *remotePackageCompiler) Compile(pkg birelpkg.Compilable) (bistatepkg.Com if !pkg.IsCompiled() { // Resolve dependencies from map of previously compiled packages. // Only install the package's immediate dependencies. - packageDependencies := make([]biagentclient.BlobRef, len(pkg.Deps()), len(pkg.Deps())) + packageDependencies := make([]biagentclient.BlobRef, len(pkg.Deps())) for i, pkgDep := range pkg.Deps() { compiledPackageRecord, found, err := c.packageRepo.Find(pkgDep) diff --git a/deployment/instance/state/state.go b/deployment/instance/state/state.go index a70b2639f..fd741068c 100644 --- a/deployment/instance/state/state.go +++ b/deployment/instance/state/state.go @@ -81,7 +81,7 @@ func (s *state) CompiledPackages() []PackageRef { return s.compiledPackages } func (s *state) RenderedJobListArchive() BlobRef { return s.renderedJobListArchive } func (s *state) ToApplySpec() bias.ApplySpec { - jobTemplateList := make([]bias.Blob, len(s.renderedJobs), len(s.renderedJobs)) + jobTemplateList := make([]bias.Blob, len(s.renderedJobs)) for i, renderedJob := range s.renderedJobs { jobTemplateList[i] = bias.Blob{ Name: renderedJob.Name, diff --git a/deployment/manifest/parser.go b/deployment/manifest/parser.go index 57f90e82c..ddafa5ca4 100644 --- a/deployment/manifest/parser.go +++ b/deployment/manifest/parser.go @@ -197,7 +197,7 @@ func (p *parser) parseDeploymentManifest(depManifest manifest, path string) (Man } func (p *parser) parseJobManifests(rawJobs []job) ([]Job, error) { - jobs := make([]Job, len(rawJobs), len(rawJobs)) + jobs := make([]Job, len(rawJobs)) for i, rawJob := range rawJobs { job := Job{ Name: rawJob.Name, @@ -218,7 +218,7 @@ func (p *parser) parseJobManifests(rawJobs []job) ([]Job, error) { } if templates != nil { - releaseJobRefs := make([]ReleaseJobRef, len(templates), len(templates)) + releaseJobRefs := make([]ReleaseJobRef, len(templates)) for i, rawJobRef := range templates { ref := ReleaseJobRef{ Name: rawJobRef.Name, @@ -240,7 +240,7 @@ func (p *parser) parseJobManifests(rawJobs []job) ([]Job, error) { } if rawJob.Networks != nil { - jobNetworks := make([]JobNetwork, len(rawJob.Networks), len(rawJob.Networks)) + jobNetworks := make([]JobNetwork, len(rawJob.Networks)) for i, rawJobNetwork := range rawJob.Networks { jobNetwork := JobNetwork{ Name: rawJobNetwork.Name, @@ -248,7 +248,7 @@ func (p *parser) parseJobManifests(rawJobs []job) ([]Job, error) { } if rawJobNetwork.Defaults != nil { - networkDefaults := make([]NetworkDefault, len(rawJobNetwork.Defaults), len(rawJobNetwork.Defaults)) + networkDefaults := make([]NetworkDefault, len(rawJobNetwork.Defaults)) for i, rawDefaults := range rawJobNetwork.Defaults { networkDefaults[i] = NetworkDefault(rawDefaults) } @@ -275,7 +275,7 @@ func (p *parser) parseJobManifests(rawJobs []job) ([]Job, error) { } func (p *parser) parseNetworkManifests(rawNetworks []network) ([]Network, error) { - networks := make([]Network, len(rawNetworks), len(rawNetworks)) + networks := make([]Network, len(rawNetworks)) for i, rawNetwork := range rawNetworks { network := Network{ Name: rawNetwork.Name, @@ -310,7 +310,7 @@ func (p *parser) parseNetworkManifests(rawNetworks []network) ([]Network, error) } func (p *parser) parseResourcePoolManifests(rawResourcePools []resourcePool, path string) ([]ResourcePool, error) { - resourcePools := make([]ResourcePool, len(rawResourcePools), len(rawResourcePools)) + resourcePools := make([]ResourcePool, len(rawResourcePools)) for i, rawResourcePool := range rawResourcePools { resourcePool := ResourcePool{ Name: rawResourcePool.Name, @@ -342,7 +342,7 @@ func (p *parser) parseResourcePoolManifests(rawResourcePools []resourcePool, pat } func (p *parser) parseDiskPoolManifests(rawDiskPools []diskPool) ([]DiskPool, error) { - diskPools := make([]DiskPool, len(rawDiskPools), len(rawDiskPools)) + diskPools := make([]DiskPool, len(rawDiskPools)) for i, rawDiskPool := range rawDiskPools { diskPool := DiskPool{ Name: rawDiskPool.Name, diff --git a/deployment/manifest/validator.go b/deployment/manifest/validator.go index a0a419b71..8046296fb 100644 --- a/deployment/manifest/validator.go +++ b/deployment/manifest/validator.go @@ -37,6 +37,7 @@ func (v *validator) Validate(deploymentManifest Manifest, releaseSetManifest bir networksErrors := v.validateNetworks(deploymentManifest.Networks) errs = append(errs, networksErrors...) + protocolRegex := regexp.MustCompile("^(file|http|https)://") for idx, resourcePool := range deploymentManifest.ResourcePools { if v.isBlank(resourcePool.Name) { errs = append(errs, bosherr.Errorf("resource_pools[%d].name must be provided", idx)) @@ -51,8 +52,8 @@ func (v *validator) Validate(deploymentManifest Manifest, releaseSetManifest bir errs = append(errs, bosherr.Errorf("resource_pools[%d].stemcell.url must be provided", idx)) } - matched, err := regexp.MatchString("^(file|http|https)://", resourcePool.Stemcell.URL) - if err != nil || !matched { + matched := protocolRegex.MatchString(resourcePool.Stemcell.URL) + if !matched { errs = append(errs, bosherr.Errorf("resource_pools[%d].stemcell.url must be a valid URL (file:// or http(s)://)", idx)) } diff --git a/deployment/vm/manager.go b/deployment/vm/manager.go index 520a3dc3b..dba44a74a 100644 --- a/deployment/vm/manager.go +++ b/deployment/vm/manager.go @@ -6,7 +6,6 @@ import ( "code.cloudfoundry.org/clock" biagentclient "github.com/cloudfoundry/bosh-agent/agentclient" - bihttpagent "github.com/cloudfoundry/bosh-agent/agentclient/http" bicloud "github.com/cloudfoundry/bosh-cli/cloud" biconfig "github.com/cloudfoundry/bosh-cli/config" bideplmanifest "github.com/cloudfoundry/bosh-cli/deployment/manifest" @@ -24,17 +23,16 @@ type Manager interface { } type manager struct { - vmRepo biconfig.VMRepo - stemcellRepo biconfig.StemcellRepo - diskDeployer DiskDeployer - agentClient biagentclient.AgentClient - agentClientFactory bihttpagent.AgentClientFactory - cloud bicloud.Cloud - uuidGenerator boshuuid.Generator - fs boshsys.FileSystem - logger boshlog.Logger - logTag string - timeService Clock + vmRepo biconfig.VMRepo + stemcellRepo biconfig.StemcellRepo + diskDeployer DiskDeployer + agentClient biagentclient.AgentClient + cloud bicloud.Cloud + uuidGenerator boshuuid.Generator + fs boshsys.FileSystem + logger boshlog.Logger + logTag string + timeService Clock } func NewManager( diff --git a/director/cloud_configs_test.go b/director/cloud_configs_test.go index 3f1da8193..6fa8e0ace 100644 --- a/director/cloud_configs_test.go +++ b/director/cloud_configs_test.go @@ -108,9 +108,7 @@ var _ = Describe("Director", func() { }) Describe("DiffCloudConfig", func() { - var expectedDiffResponse ConfigDiff - - expectedDiffResponse = ConfigDiff{ + expectedDiffResponse := ConfigDiff{ Diff: [][]interface{}{ []interface{}{"azs:", nil}, []interface{}{"- name: az2", "removed"}, diff --git a/director/cpi_configs_test.go b/director/cpi_configs_test.go index 296526abd..3be2df02a 100644 --- a/director/cpi_configs_test.go +++ b/director/cpi_configs_test.go @@ -108,9 +108,7 @@ var _ = Describe("Director", func() { }) Describe("DiffCPIConfig", func() { - var expectedDiffResponse ConfigDiff - - expectedDiffResponse = ConfigDiff{ + expectedDiffResponse := ConfigDiff{ Diff: [][]interface{}{ []interface{}{"cpis:", nil}, []interface{}{" name: smurf", "removed"}, diff --git a/director/deployment.go b/director/deployment.go index e63a9bc5b..aac1fbdb8 100644 --- a/director/deployment.go +++ b/director/deployment.go @@ -5,7 +5,6 @@ import ( "fmt" "net/http" "net/url" - gourl "net/url" "strings" bosherr "github.com/cloudfoundry/bosh-utils/errors" @@ -17,8 +16,6 @@ type DeploymentImpl struct { name string cloudConfig string - manifest string - releases []Release stemcells []Stemcell teams []string @@ -187,7 +184,7 @@ func (d DeploymentImpl) ExportRelease(release ReleaseSlug, os OSVersionSlug, job return ExportReleaseResult{}, err } - return ExportReleaseResult{BlobstoreID: resp.BlobstoreID, SHA1: resp.SHA1}, nil + return ExportReleaseResult(resp), nil } func (d DeploymentImpl) Update(manifest []byte, opts UpdateOpts) error { @@ -213,7 +210,7 @@ func (d DeploymentImpl) Delete(force bool) error { } func (d DeploymentImpl) AttachDisk(slug InstanceSlug, diskCID string, diskProperties string) error { - values := gourl.Values{} + values := url.Values{} values.Add("deployment", d.Name()) values.Add("job", slug.Name()) values.Add("instance_id", slug.IndexOrID()) @@ -242,7 +239,7 @@ func (d DeploymentImpl) IsInProgress() (bool, error) { } func (d DeploymentImpl) Variables() ([]VariableResult, error) { - url, err := gourl.Parse(fmt.Sprintf("/deployments/%s/variables", d.name)) + url, err := url.Parse(fmt.Sprintf("/deployments/%s/variables", d.name)) if err != nil { return nil, bosherr.WrapError(err, "Parsing variables path") } @@ -270,7 +267,7 @@ func (c Client) FetchLogs(deploymentName, job, indexOrID string, filters []strin indexOrID = "*" } - query := gourl.Values{} + query := url.Values{} if len(filters) > 0 { query.Add("filters", strings.Join(filters, ",")) @@ -374,7 +371,7 @@ func (c Client) NonConvergingJobAction(action string, deployment string, instanc setHeaders := func(req *http.Request) { req.Header.Add("Content-Type", "text/yaml") } - query := gourl.Values{} + query := url.Values{} if skipDrain { query.Add("skip_drain", "true") } @@ -405,7 +402,7 @@ func (c Client) ChangeJobState(state, deploymentName, job, indexOrID string, ski // allows to have empty job and indexOrID - query := gourl.Values{} + query := url.Values{} query.Add("state", state) @@ -522,7 +519,7 @@ func (c Client) ExportRelease(deploymentName string, release ReleaseSlug, os OSV } func (c Client) UpdateDeployment(manifest []byte, opts UpdateOpts) error { - query := gourl.Values{} + query := url.Values{} if opts.Recreate { query.Add("recreate", "true") @@ -586,7 +583,7 @@ func (c Client) DeleteDeployment(deploymentName string, force bool) error { return bosherr.Error("Expected non-empty deployment name") } - query := gourl.Values{} + query := url.Values{} if force { query.Add("force", "true") diff --git a/director/diff_test.go b/director/diff_test.go index 5710003e4..876cba4fd 100644 --- a/director/diff_test.go +++ b/director/diff_test.go @@ -32,9 +32,7 @@ var _ = Describe("Deployment", func() { }) Context("when diffing manifest with 'no redact' option", func() { - var expectedDiffResponse DeploymentDiffResponse - - expectedDiffResponse = DeploymentDiffResponse{ + expectedDiffResponse := DeploymentDiffResponse{ Context: map[string]interface{}{ "cloud_config_id": 2, "runtime_config_id": nil, diff --git a/director/releases.go b/director/releases.go index 973e8e039..219719fae 100644 --- a/director/releases.go +++ b/director/releases.go @@ -220,7 +220,7 @@ func (d DirectorImpl) ReleaseHasCompiledPackage(releaseSlug ReleaseSlug, osVersi stemcellFoundForPackage = true } } - if stemcellFoundForPackage == false { + if !stemcellFoundForPackage { return false, nil } } diff --git a/director/request_sanitizer.go b/director/request_sanitizer.go index c220422a7..82b289067 100644 --- a/director/request_sanitizer.go +++ b/director/request_sanitizer.go @@ -20,6 +20,4 @@ func (rs RequestSanitizer) sanitizeAuthorization() { rs.Request.Header.Del("Authorization") rs.Request.Header.Add("Authorization", "[removed]") } - - return } diff --git a/director/runtime_configs_test.go b/director/runtime_configs_test.go index d547961db..d0914b85d 100644 --- a/director/runtime_configs_test.go +++ b/director/runtime_configs_test.go @@ -139,9 +139,7 @@ var _ = Describe("Director", func() { }) Describe("DiffRuntimeConfig", func() { - var expectedDiffResponse ConfigDiff - - expectedDiffResponse = ConfigDiff{ + expectedDiffResponse := ConfigDiff{ Diff: [][]interface{}{ []interface{}{"release:", nil}, []interface{}{" version: 0.0.1", "removed"}, diff --git a/director/template/static_vars.go b/director/template/static_vars.go index 0b0175169..8f815e3e7 100644 --- a/director/template/static_vars.go +++ b/director/template/static_vars.go @@ -16,7 +16,7 @@ func (v StaticVariables) Get(varDef VariableDefinition) (interface{}, bool, erro func (v StaticVariables) List() ([]VariableDefinition, error) { var defs []VariableDefinition - for name, _ := range v.processed() { + for name := range v.processed() { defs = append(defs, VariableDefinition{Name: name}) } diff --git a/director/template/template.go b/director/template/template.go index ecb725e41..2f899845b 100644 --- a/director/template/template.go +++ b/director/template/template.go @@ -178,7 +178,7 @@ func (l varsLookup) Get(name string) (interface{}, bool, error) { return nil, false, err } - val, found, err = processPatchPointer(pointer, val) //nolint:ineffassign + val, found, err = processPatchPointer(pointer, val) //nolint:ineffassign,staticcheck if err != nil { tokens := []patch.Token{} for _, token := range pointer.Tokens() { @@ -252,7 +252,7 @@ func (t varsTracker) Get(name string) (interface{}, bool, error) { return nil, false, nil } - for name, _ := range defVarTracker.missing { + for name := range defVarTracker.missing { t.missing[name] = struct{}{} } @@ -273,7 +273,7 @@ func (t varsTracker) scopedVarsTracker(name string) (varsTracker, error) { varsTracker.defs = t.defs varsTracker.visited[name] = struct{}{} - for k, _ := range t.visited { + for k := range t.visited { varsTracker.visited[k] = struct{}{} } @@ -394,7 +394,7 @@ func (t varsTracker) ExtraError() error { func (t varsTracker) multiErr(mapWithNames map[string]struct{}) error { var names []string - for name, _ := range mapWithNames { + for name := range mapWithNames { names = append(names, name) } sort.Strings(names) diff --git a/go.mod b/go.mod index 627be6d22..31d067990 100644 --- a/go.mod +++ b/go.mod @@ -18,7 +18,7 @@ require ( github.com/dustin/go-humanize v1.0.0 github.com/fatih/color v1.13.0 github.com/golang/mock v1.6.0 - github.com/golangci/golangci-lint v1.45.2 + github.com/golangci/golangci-lint v1.46.2 github.com/hashicorp/go-multierror v1.1.1 github.com/jessevdk/go-flags v1.5.0 github.com/mattn/go-isatty v0.0.14 @@ -27,7 +27,8 @@ require ( github.com/onsi/gomega v1.19.0 github.com/vito/go-interact v1.0.1 golang.org/x/crypto v0.0.0-20220513210258-46612604a0f9 - golang.org/x/tools v0.1.10 + golang.org/x/text v0.3.7 + golang.org/x/tools v0.1.11-0.20220316014157-77aa08bb151a gopkg.in/yaml.v2 v2.4.0 ) @@ -38,10 +39,11 @@ require ( cloud.google.com/go/iam v0.3.0 // indirect cloud.google.com/go/storage v1.22.0 // indirect code.cloudfoundry.org/tlsconfig v0.0.0-20211123175040-23cc9f05b6b3 // indirect - github.com/Antonboom/errname v0.1.5 // indirect - github.com/Antonboom/nilnil v0.1.0 // indirect - github.com/BurntSushi/toml v1.0.0 // indirect + github.com/Antonboom/errname v0.1.6 // indirect + github.com/Antonboom/nilnil v0.1.1 // indirect + github.com/BurntSushi/toml v1.1.0 // indirect github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect + github.com/GaijinEntertainment/go-exhaustruct/v2 v2.1.0 // indirect github.com/Masterminds/semver v1.5.0 // indirect github.com/OpenPeeDeeP/depguard v1.1.0 // indirect github.com/VividCortex/ewma v1.2.0 // indirect @@ -51,16 +53,16 @@ require ( github.com/aws/aws-sdk-go v1.44.14 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bkielbasa/cyclop v1.2.0 // indirect - github.com/blizzy78/varnamelen v0.6.1 // indirect + github.com/blizzy78/varnamelen v0.8.0 // indirect github.com/bmatcuk/doublestar v1.3.4 // indirect github.com/bombsimon/wsl/v3 v3.3.0 // indirect - github.com/breml/bidichk v0.2.2 // indirect - github.com/breml/errchkjson v0.2.3 // indirect + github.com/breml/bidichk v0.2.3 // indirect + github.com/breml/errchkjson v0.3.0 // indirect github.com/butuzov/ireturn v0.1.1 // indirect github.com/cespare/xxhash/v2 v2.1.2 // indirect github.com/charithe/durationcheck v0.0.9 // indirect github.com/charlievieth/fs v0.0.2 // indirect - github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af // indirect + github.com/chavacava/garif v0.0.0-20220316182200-5cad0b5181d4 // indirect github.com/cloudfoundry/go-socks5 v0.0.0-20180221174514-54f73bdb8a8e // indirect github.com/daixiang0/gci v0.3.3 // indirect github.com/davecgh/go-spew v1.1.1 // indirect @@ -68,9 +70,10 @@ require ( github.com/esimonov/ifshort v1.0.4 // indirect github.com/ettle/strcase v0.1.1 // indirect github.com/fatih/structtag v1.2.0 // indirect + github.com/firefart/nonamedreturns v1.0.1 // indirect github.com/fsnotify/fsnotify v1.5.4 // indirect - github.com/fzipp/gocyclo v0.4.0 // indirect - github.com/go-critic/go-critic v0.6.2 // indirect + github.com/fzipp/gocyclo v0.5.1 // indirect + github.com/go-critic/go-critic v0.6.3 // indirect github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 // indirect github.com/go-toolsmith/astcast v1.0.0 // indirect github.com/go-toolsmith/astcopy v1.0.0 // indirect @@ -86,7 +89,7 @@ require ( github.com/golang/protobuf v1.5.2 // indirect github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 // indirect github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect - github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 // indirect + github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe // indirect github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a // indirect github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 // indirect github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca // indirect @@ -114,61 +117,65 @@ require ( github.com/julz/importas v0.1.0 // indirect github.com/kisielk/errcheck v1.6.0 // indirect github.com/kisielk/gotool v1.0.0 // indirect - github.com/kulti/thelper v0.5.1 // indirect + github.com/kulti/thelper v0.6.2 // indirect github.com/kunwardeep/paralleltest v1.0.3 // indirect github.com/kyoh86/exportloopref v0.1.8 // indirect - github.com/ldez/gomoddirectives v0.2.2 // indirect + github.com/ldez/gomoddirectives v0.2.3 // indirect github.com/ldez/tagliatelle v0.3.1 // indirect github.com/leonklingele/grouper v1.1.0 // indirect - github.com/magiconair/properties v1.8.5 // indirect + github.com/lufeee/execinquery v1.2.1 // indirect + github.com/magiconair/properties v1.8.6 // indirect github.com/maratori/testpackage v1.0.1 // indirect github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 // indirect github.com/mattn/go-colorable v0.1.12 // indirect github.com/mattn/go-runewidth v0.0.13 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect github.com/mbilski/exhaustivestruct v1.2.0 // indirect - github.com/mgechev/revive v1.1.4 // indirect + github.com/mgechev/revive v1.2.1 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect - github.com/mitchellh/mapstructure v1.4.3 // indirect + github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/moricho/tparallel v0.2.1 // indirect github.com/nakabonne/nestif v0.3.1 // indirect github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 // indirect github.com/nishanths/exhaustive v0.7.11 // indirect - github.com/nishanths/predeclared v0.2.1 // indirect + github.com/nishanths/predeclared v0.2.2 // indirect github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d // indirect github.com/nxadm/tail v1.4.8 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect - github.com/pelletier/go-toml v1.9.4 // indirect + github.com/pelletier/go-toml v1.9.5 // indirect + github.com/pelletier/go-toml/v2 v2.0.0 // indirect github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d // indirect github.com/pivotal-cf/paraphernalia v0.0.0-20180203224945-a64ae2051c20 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/polyfloyd/go-errorlint v0.0.0-20211125173453-6d6d39c5bb8b // indirect - github.com/prometheus/client_golang v1.7.1 // indirect + github.com/polyfloyd/go-errorlint v1.0.0 // indirect + github.com/prometheus/client_golang v1.12.1 // indirect github.com/prometheus/client_model v0.2.0 // indirect - github.com/prometheus/common v0.10.0 // indirect - github.com/prometheus/procfs v0.6.0 // indirect - github.com/quasilyte/go-ruleguard v0.3.15 // indirect - github.com/quasilyte/gogrep v0.0.0-20220103110004-ffaa07af02e3 // indirect + github.com/prometheus/common v0.32.1 // indirect + github.com/prometheus/procfs v0.7.3 // indirect + github.com/quasilyte/go-ruleguard v0.3.16-0.20220213074421-6aa060fab41a // indirect + github.com/quasilyte/gogrep v0.0.0-20220120141003-628d8b3623b5 // indirect github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 // indirect + github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect github.com/rivo/uniseg v0.2.0 // indirect github.com/ryancurrah/gomodguard v1.2.3 // indirect github.com/ryanrolds/sqlclosecheck v0.3.0 // indirect github.com/sanposhiho/wastedassign/v2 v2.0.6 // indirect - github.com/securego/gosec/v2 v2.10.0 // indirect + github.com/securego/gosec/v2 v2.11.0 // indirect github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect github.com/sirupsen/logrus v1.8.1 // indirect github.com/sivchari/containedctx v1.0.2 // indirect - github.com/sivchari/tenv v1.4.7 // indirect + github.com/sivchari/tenv v1.5.0 // indirect github.com/sonatard/noctx v0.0.1 // indirect github.com/sourcegraph/go-diff v0.6.1 // indirect - github.com/spf13/afero v1.6.0 // indirect + github.com/spf13/afero v1.8.2 // indirect github.com/spf13/cast v1.4.1 // indirect github.com/spf13/cobra v1.4.0 // indirect github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/spf13/pflag v1.0.5 // indirect - github.com/spf13/viper v1.10.1 // indirect + github.com/spf13/viper v1.11.0 // indirect github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect + github.com/stbenjam/no-sprintf-host-port v0.1.1 // indirect github.com/stretchr/objx v0.1.1 // indirect github.com/stretchr/testify v1.7.1 // indirect github.com/subosito/gotenv v1.2.0 // indirect @@ -176,33 +183,33 @@ require ( github.com/tdakkota/asciicheck v0.1.1 // indirect github.com/tetafro/godot v1.4.11 // indirect github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144 // indirect - github.com/tomarrell/wrapcheck/v2 v2.5.0 // indirect + github.com/tomarrell/wrapcheck/v2 v2.6.1 // indirect github.com/tommy-muehle/go-mnd/v2 v2.5.0 // indirect github.com/ultraware/funlen v0.0.3 // indirect github.com/ultraware/whitespace v0.0.5 // indirect github.com/uudashr/gocognit v1.0.5 // indirect github.com/yagipy/maintidx v1.0.0 // indirect - github.com/yeya24/promlinter v0.1.1-0.20210918184747-d757024714a1 // indirect + github.com/yeya24/promlinter v0.2.0 // indirect gitlab.com/bosi/decorder v0.2.1 // indirect go.opencensus.io v0.23.0 // indirect + golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e // indirect golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect golang.org/x/net v0.0.0-20220513224357-95641704303c // indirect golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5 // indirect golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect golang.org/x/sys v0.0.0-20220513210249-45d2b4557a2a // indirect golang.org/x/term v0.0.0-20220411215600-e5f449aeb171 // indirect - golang.org/x/text v0.3.7 // indirect golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect google.golang.org/api v0.79.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20220505152158-f39f71e6c8f3 // indirect google.golang.org/grpc v1.46.2 // indirect google.golang.org/protobuf v1.28.0 // indirect - gopkg.in/ini.v1 v1.66.2 // indirect + gopkg.in/ini.v1 v1.66.4 // indirect gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect - honnef.co/go/tools v0.2.2 // indirect - mvdan.cc/gofumpt v0.3.0 // indirect + honnef.co/go/tools v0.3.1 // indirect + mvdan.cc/gofumpt v0.3.1 // indirect mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed // indirect mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect mvdan.cc/unparam v0.0.0-20211214103731-d0ef000c54e5 // indirect diff --git a/go.sum b/go.sum index 6ee214872..52e1dd6ad 100644 --- a/go.sum +++ b/go.sum @@ -6,6 +6,7 @@ cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMT cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.44.3/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= @@ -19,6 +20,7 @@ cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOY cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.75.0/go.mod h1:VGuuCn7PG0dwsd5XPVm2Mm3wlh3EL55/79EKB6hlPTY= cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= @@ -47,7 +49,7 @@ cloud.google.com/go/compute v1.6.1 h1:2sMmt8prCn7DPaG4Pmh0N3Inmc8cT8ae5k1M6VJ9Wq cloud.google.com/go/compute v1.6.1/go.mod h1:g85FgpzFvNULZ+S8AYq87axRKuf2Kh7deLqV/jJ3thU= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= -cloud.google.com/go/firestore v1.6.0/go.mod h1:afJwI0vaXwAG54kI7A//lP/lSPDkQORQuMkv56TxEPU= +cloud.google.com/go/firestore v1.6.1/go.mod h1:asNXNOzBdyVQmEU+ggO8UPodTkEVFW5Qx+rwHnAz+EY= cloud.google.com/go/iam v0.3.0 h1:exkAomrVUuzx9kWFI1wm3KI0uoDeUFPB4kKGzx6x+Gc= cloud.google.com/go/iam v0.3.0/go.mod h1:XzJPvDayI+9zsASAFO68Hk07u3z+f+JrT2xXNdp4bnY= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= @@ -61,6 +63,7 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +cloud.google.com/go/storage v1.14.0/go.mod h1:GrKmX003DSIwi9o29oFT7YDnHYwZoctc3fOKtUw0Xmo= cloud.google.com/go/storage v1.22.0 h1:NUV0NNp9nkBuW66BFRLuMgldN60C57ET3dhbwLIYio8= cloud.google.com/go/storage v1.22.0/go.mod h1:GbaLEoMqbVm6sx3Z0R++gSiBlgMv6yUi2q1DeGFKQgE= code.cloudfoundry.org/clock v1.0.0 h1:kFXWQM4bxYvdBw2X8BbBeXwQNgfoWv1vqAk2ZZyBN2o= @@ -71,16 +74,19 @@ code.cloudfoundry.org/workpool v0.0.0-20200131000409-2ac56b354115 h1:xm2g6uJ31Uk code.cloudfoundry.org/workpool v0.0.0-20200131000409-2ac56b354115/go.mod h1:O9HdfntfyDvYRH9nh03XdpnGMbjyZVi8nb2Kh+6hDho= contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= -github.com/Antonboom/errname v0.1.5 h1:IM+A/gz0pDhKmlt5KSNTVAvfLMb+65RxavBXpRtCUEg= -github.com/Antonboom/errname v0.1.5/go.mod h1:DugbBstvPFQbv/5uLcRRzfrNqKE9tVdVCqWCLp6Cifo= -github.com/Antonboom/nilnil v0.1.0 h1:DLDavmg0a6G/F4Lt9t7Enrbgb3Oph6LnDE6YVsmTt74= -github.com/Antonboom/nilnil v0.1.0/go.mod h1:PhHLvRPSghY5Y7mX4TW+BHZQYo1A8flE5H20D3IPZBo= +github.com/Antonboom/errname v0.1.6 h1:LzIJZlyLOCSu51o3/t2n9Ck7PcoP9wdbrdaW6J8fX24= +github.com/Antonboom/errname v0.1.6/go.mod h1:7lz79JAnuoMNDAWE9MeeIr1/c/VpSUWatBv2FH9NYpI= +github.com/Antonboom/nilnil v0.1.1 h1:PHhrh5ANKFWRBh7TdYmyyq2gyT2lotnvFvvFbylF81Q= +github.com/Antonboom/nilnil v0.1.1/go.mod h1:L1jBqoWM7AOeTD+tSquifKSesRHs4ZdaxvZR+xdJEaI= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/BurntSushi/toml v1.0.0 h1:dtDWrepsVPfW9H/4y7dDgFc2MBUSeJhlaDtK13CxFlU= -github.com/BurntSushi/toml v1.0.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= +github.com/BurntSushi/toml v1.1.0 h1:ksErzDEI1khOiGPgpwuI7x2ebx/uXQNw7xJpn9Eq1+I= +github.com/BurntSushi/toml v1.1.0/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= +github.com/GaijinEntertainment/go-exhaustruct/v2 v2.1.0 h1:LAPPhJ4KR5Z8aKVZF5S48csJkxL5RMKmE/98fMs1u5M= +github.com/GaijinEntertainment/go-exhaustruct/v2 v2.1.0/go.mod h1:LGOGuvEgCfCQsy3JF2tRmpGDpzA53iZfyGEWSPwQ6/4= github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= @@ -97,6 +103,7 @@ github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuy github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= @@ -105,6 +112,7 @@ github.com/aokoli/goutils v1.0.1/go.mod h1:SijmP0QR8LtwsmDs8Yii5Z/S4trXFGFC2oO5g github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/ashanbrown/forbidigo v1.3.0 h1:VkYIwb/xxdireGAdJNZoo24O4lmnEWkactplBlWTShc= @@ -123,16 +131,16 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bkielbasa/cyclop v1.2.0 h1:7Jmnh0yL2DjKfw28p86YTd/B4lRGcNuu12sKE35sM7A= github.com/bkielbasa/cyclop v1.2.0/go.mod h1:qOI0yy6A7dYC4Zgsa72Ppm9kONl0RoIlPbzot9mhmeI= -github.com/blizzy78/varnamelen v0.6.1 h1:kttPCLzXFa+0nt++Cw9fb7GrSSM4KkyIAoX/vXsbuqA= -github.com/blizzy78/varnamelen v0.6.1/go.mod h1:zy2Eic4qWqjrxa60jG34cfL0VXcSwzUrIx68eJPb4Q8= +github.com/blizzy78/varnamelen v0.8.0 h1:oqSblyuQvFsW1hbBHh1zfwrKe3kcSj0rnXkKzsQ089M= +github.com/blizzy78/varnamelen v0.8.0/go.mod h1:V9TzQZ4fLJ1DSrjVDfl89H7aMnTvKkApdHeyESmyR7k= github.com/bmatcuk/doublestar v1.3.4 h1:gPypJ5xD31uhX6Tf54sDPUOBXTqKH4c9aPY66CyQrS0= github.com/bmatcuk/doublestar v1.3.4/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= github.com/bombsimon/wsl/v3 v3.3.0 h1:Mka/+kRLoQJq7g2rggtgQsjuI/K5Efd87WX96EWFxjM= github.com/bombsimon/wsl/v3 v3.3.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= -github.com/breml/bidichk v0.2.2 h1:w7QXnpH0eCBJm55zGCTJveZEkQBt6Fs5zThIdA6qQ9Y= -github.com/breml/bidichk v0.2.2/go.mod h1:zbfeitpevDUGI7V91Uzzuwrn4Vls8MoBMrwtt78jmso= -github.com/breml/errchkjson v0.2.3 h1:97eGTmR/w0paL2SwfRPI1jaAZHaH/fXnxWTw2eEIqE0= -github.com/breml/errchkjson v0.2.3/go.mod h1:jZEATw/jF69cL1iy7//Yih8yp/mXp2CBoBr9GJwCAsY= +github.com/breml/bidichk v0.2.3 h1:qe6ggxpTfA8E75hdjWPZ581sY3a2lnl0IRxLQFelECI= +github.com/breml/bidichk v0.2.3/go.mod h1:8u2C6DnAy0g2cEq+k/A2+tr9O1s+vHGxWn0LTc70T2A= +github.com/breml/errchkjson v0.3.0 h1:YdDqhfqMT+I1vIxPSas44P+9Z9HzJwCeAzjB8PxP1xw= +github.com/breml/errchkjson v0.3.0/go.mod h1:9Cogkyv9gcT8HREpzi3TiqBxCqDzo8awa92zSDFcofU= github.com/butuzov/ireturn v0.1.1 h1:QvrO2QF2+/Cx1WA/vETCIYBKtRjc30vesdoPUNo1EbY= github.com/butuzov/ireturn v0.1.1/go.mod h1:Wh6Zl3IMtTpaIKbmwzqi6olnM9ptYQxxVacMsOEFPoc= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= @@ -144,13 +152,15 @@ github.com/charithe/durationcheck v0.0.9 h1:mPP4ucLrf/rKZiIG/a9IPXHGlh8p4CzgpyTy github.com/charithe/durationcheck v0.0.9/go.mod h1:SSbRIBVfMjCi/kEB6K65XEA83D6prSM8ap1UCpNKtgg= github.com/charlievieth/fs v0.0.2 h1:vu3+e3KRrIguFN4HoYqkeM9TSjduugtsfHZwFvj/PBg= github.com/charlievieth/fs v0.0.2/go.mod h1:74vroF06jvR8XMafvi2CYzs8WruHL1axh/qFx7XN5Xw= -github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af h1:spmv8nSH9h5oCQf40jt/ufBCt9j0/58u4G+rkeMqXGI= -github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af/go.mod h1:Qjyv4H3//PWVzTeCezG2b9IRn6myJxJSr4TD/xo6ojU= +github.com/chavacava/garif v0.0.0-20220316182200-5cad0b5181d4 h1:tFXjAxje9thrTF4h57Ckik+scJjTWdwAtZqZPtOT48M= +github.com/chavacava/garif v0.0.0-20220316182200-5cad0b5181d4/go.mod h1:W8EnPSQ8Nv4fUjc/v1/8tHFqhuOJXnRub0dTfuAQktU= github.com/cheggaaa/pb/v3 v3.0.8 h1:bC8oemdChbke2FHIIGy9mn4DPJ2caZYQnfbRqwmdCoA= github.com/cheggaaa/pb/v3 v3.0.8/go.mod h1:UICbiLec/XO6Hw6k+BHEtHeQFzzBH4i2/qk/ow1EJTA= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= +github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cloudfoundry/bosh-agent v2.367.0+incompatible h1:cdpYNIqi69cIl1JP8D/3nvQcnPP0su0xapMzzBMXOSw= github.com/cloudfoundry/bosh-agent v2.367.0+incompatible/go.mod h1:7UvVn5vc/d6icLrBx6GhBlpSMwe2+x1C2A7x4TbPhiU= @@ -232,26 +242,29 @@ github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= +github.com/firefart/nonamedreturns v1.0.1 h1:fSvcq6ZpK/uBAgJEGMvzErlzyM4NELLqqdTofVjVNag= +github.com/firefart/nonamedreturns v1.0.1/go.mod h1:D3dpIBojGGNh5UfElmwPu73SwDCm+VKhHYqwlNOk2uQ= github.com/frankban/quicktest v1.14.2 h1:SPb1KFFmM+ybpEjPUhCCkZOM5xlovT5UbrMvWnXyBns= -github.com/frankban/quicktest v1.14.2/go.mod h1:mgiwOwqx65TmIk1wJ6Q7wvnVMocbUorkibMOrVTHZps= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.1/go.mod h1:T3375wBYaZdLLcVNkcVbzGHY7f1l/uK5T5Ai1i3InKU= github.com/fsnotify/fsnotify v1.5.4 h1:jRbGcIw6P2Meqdwuo0H1p6JVLbL5DHKAKlYndzMwVZI= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/fullstorydev/grpcurl v1.6.0/go.mod h1:ZQ+ayqbKMJNhzLmbpCiurTVlaK2M/3nqZCxaQ2Ze/sM= -github.com/fzipp/gocyclo v0.4.0 h1:IykTnjwh2YLyYkGa0y92iTTEQcnyAz0r9zOo15EbJ7k= -github.com/fzipp/gocyclo v0.4.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= +github.com/fzipp/gocyclo v0.5.1 h1:L66amyuYogbxl0j2U+vGqJXusPF2IkduvXLnYD5TFgw= +github.com/fzipp/gocyclo v0.5.1/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/go-critic/go-critic v0.6.2 h1:L5SDut1N4ZfsWZY0sH4DCrsHLHnhuuWak2wa165t9gs= -github.com/go-critic/go-critic v0.6.2/go.mod h1:td1s27kfmLpe5G/DPjlnFI7o1UCzePptwU7Az0V5iCM= +github.com/go-critic/go-critic v0.6.3 h1:abibh5XYBTASawfTQ0rA7dVtQT+6KzpGqb/J+DxRDaw= +github.com/go-critic/go-critic v0.6.3/go.mod h1:c6b3ZP1MQ7o6lPR7Rv3lEf7pYQUmAcx8ABHgdZCQt/k= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= @@ -328,12 +341,12 @@ github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 h1:23T5iq8rbUYlhpt5 github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= -github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw= -github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= +github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe h1:6RGUuS7EGotKx6J5HIP8ZtyMdiDscjMLfRBSPuzVVeo= +github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe/go.mod h1:gjqyPShc/m8pEMpk0a3SeagVb0kaqvhscv+i9jI5ZhQ= github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks= github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= -github.com/golangci/golangci-lint v1.45.2 h1:9I3PzkvscJkFAQpTQi5Ga0V4qWdJERajX1UZ7QqkW+I= -github.com/golangci/golangci-lint v1.45.2/go.mod h1:f20dpzMmUTRp+oYnX0OGjV1Au3Jm2JeI9yLqHq1/xsI= +github.com/golangci/golangci-lint v1.46.2 h1:o90t/Xa6dhJbvy8Bz2RpzUXqrkigp19DLStMolTZbyo= +github.com/golangci/golangci-lint v1.46.2/go.mod h1:3DkdHnxn9eoTTrpT2gB0TEv8KSziuoqe9FitgQLHvAY= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= @@ -381,6 +394,7 @@ github.com/google/pprof v0.0.0-20200507031123-427632fa3b1c/go.mod h1:ZgVRPoUq/hf github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201218002935-b9804c9f04c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= @@ -404,6 +418,7 @@ github.com/googleapis/gax-go/v2 v2.4.0 h1:dS9eYAjhrE2RjmzYw2XAPvcXfmcQLtFEQWn0CR github.com/googleapis/gax-go/v2 v2.4.0/go.mod h1:XOTVJ59hdnfJLIP/dh8n5CGryZR2LxK9wbMD5+iXC6c= github.com/googleapis/go-type-adapters v1.0.0 h1:9XdMn+d/G57qq1s8dNc5IesGCXHf6V2HZ2JwRxfA2tA= github.com/googleapis/go-type-adapters v1.0.0/go.mod h1:zHW75FOG2aur7gAO2B+MLby+cLsWGBF62rFAi7WjWO4= +github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gookit/color v1.5.0/go.mod h1:43aQb+Zerm/BWh2GnrgOQm7ffz7tvQXEKV6BFMl7wAo= github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= github.com/gordonklaus/ineffassign v0.0.0-20210914165742-4cc7213b9bc8 h1:PVRE9d4AQKmbelZ7emNig1+NT27DUmKZn5qXxfio54U= @@ -415,7 +430,6 @@ github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/ad github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= -github.com/gostaticanalysis/analysisutil v0.4.1/go.mod h1:18U/DLpRgIUd459wGxVHE0fRgmo1UgHDcbw7F5idXu0= github.com/gostaticanalysis/analysisutil v0.7.1 h1:ZMCjoue3DtDWQ5WyU16YbjbQEQ3VuzwxALrpYd+HeKk= github.com/gostaticanalysis/analysisutil v0.7.1/go.mod h1:v21E3hY37WKMGSnbsw2S/ojApNWb6C1//mXO48CXbVc= github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= @@ -435,19 +449,24 @@ github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgf github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.12.1/go.mod h1:8XEsbTttt/W+VvjtQhLACqCisSPWTxCZ7sBRjU6iH9c= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= -github.com/hashicorp/consul/api v1.10.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= +github.com/hashicorp/consul/api v1.12.0/go.mod h1:6pVBMo0ebnYdt2S3H87XhekM/HHrUoTD2XXb/VrZVy0= github.com/hashicorp/consul/sdk v0.8.0/go.mod h1:GBvyrGALthsZObzUGsfgHZQDXjg4lOjagTIwIR1vPms= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-hclog v0.12.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= +github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ= github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= +github.com/hashicorp/go-retryablehttp v0.5.3/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= @@ -462,9 +481,10 @@ github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uG github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= -github.com/hashicorp/mdns v1.0.1/go.mod h1:4gW7WsVCke5TE7EPeYliwHlRUyBtfCwuFwuMg2DmyNY= -github.com/hashicorp/memberlist v0.2.2/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= -github.com/hashicorp/serf v0.9.5/go.mod h1:UWDWwZeL5cuWDJdl0C6wrvrUwEqtQ4ZKBKKENpqIUyk= +github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc= +github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE= +github.com/hashicorp/serf v0.9.6/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= +github.com/hashicorp/serf v0.9.7/go.mod h1:TXZNMjZQijwlDvp+r0b63xZ45H7JmCmgg4gpTwn9UV4= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg= github.com/howeyc/gopass v0.0.0-20170109162249-bf9dde6d0d2c/go.mod h1:lADxMC39cJJqL93Duh1xhAs4I2Zs8mKS89XWXFGp9cs= @@ -498,12 +518,15 @@ github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2E github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/juju/ratelimit v1.0.1/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/julz/importas v0.1.0 h1:F78HnrsjY3cR7j0etXy5+TU1Zuy7Xt08X/1aJnH5xXY= github.com/julz/importas v0.1.0/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= @@ -516,27 +539,27 @@ github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/pty v1.1.8 h1:AkaSdXYQOWeaO3neb8EM634ahkXXe3jYbVh/F9lq+GI= github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kulti/thelper v0.5.1 h1:Uf4CUekH0OvzQTFPrWkstJvXgm6pnNEtQu3HiqEkpB0= -github.com/kulti/thelper v0.5.1/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= +github.com/kulti/thelper v0.6.2 h1:K4xulKkwOCnT1CDms6Ex3uG1dvSMUUQe9zxgYQgbRXs= +github.com/kulti/thelper v0.6.2/go.mod h1:DsqKShOvP40epevkFrvIwkCMNYxMeTNjdWL4dqWHZ6I= github.com/kunwardeep/paralleltest v1.0.3 h1:UdKIkImEAXjR1chUWLn+PNXqWUGs//7tzMeWuP7NhmI= github.com/kunwardeep/paralleltest v1.0.3/go.mod h1:vLydzomDFpk7yu5UX02RmP0H8QfRPOV/oFhWN85Mjb4= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/kyoh86/exportloopref v0.1.8 h1:5Ry/at+eFdkX9Vsdw3qU4YkvGtzuVfzT4X7S77LoN/M= github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= -github.com/ldez/gomoddirectives v0.2.2 h1:p9/sXuNFArS2RLc+UpYZSI4KQwGMEDWC/LbtF5OPFVg= -github.com/ldez/gomoddirectives v0.2.2/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= +github.com/ldez/gomoddirectives v0.2.3 h1:y7MBaisZVDYmKvt9/l1mjNCiSA1BVn34U0ObUcJwlhA= +github.com/ldez/gomoddirectives v0.2.3/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= github.com/ldez/tagliatelle v0.3.1 h1:3BqVVlReVUZwafJUwQ+oxbx2BEX2vUG4Yu/NOfMiKiM= github.com/ldez/tagliatelle v0.3.1/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88= github.com/leonklingele/grouper v1.1.0 h1:tC2y/ygPbMFSBOs3DcyaEMKnnwH7eYKzohOtRrf0SAg= @@ -546,10 +569,11 @@ github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.4/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= +github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM= +github.com/lufeee/execinquery v1.2.1/go.mod h1:EC7DrEKView09ocscGHC+apXMIaorh4xqSxS/dy8SbM= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= -github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= +github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/maratori/testpackage v1.0.1 h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ= github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU= github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 h1:pWxk9e//NbPwfxat7RXkts09K+dEBJWakUWwICVqYbA= @@ -586,11 +610,11 @@ github.com/maxbrunsfeld/counterfeiter/v6 v6.4.1/go.mod h1:DK1Cjkc0E49ShgRVs5jy5A github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwgOdMUQePUo= github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= -github.com/mgechev/revive v1.1.4 h1:sZOjY6GU35Kr9jKa/wsKSHgrFz8eASIB5i3tqWZMp0A= -github.com/mgechev/revive v1.1.4/go.mod h1:ZZq2bmyssGh8MSPz3VVziqRNIMYTJXzP8MUKG90vZ9A= -github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mgechev/revive v1.2.1 h1:GjFml7ZsoR0IrQ2E2YIvWFNS5GPDV7xNwvA5GM1HZC4= +github.com/mgechev/revive v1.2.1/go.mod h1:+Ro3wqY4vakcYNtkBWdZC7dBg1xSB6sp054wWwmeFm0= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= +github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= @@ -600,21 +624,23 @@ github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrk github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.4.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mitchellh/mapstructure v1.4.3 h1:OVowDSCllw/YjdLkam3/sm7wEtOy59d8ndGgCcyj8cs= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= +github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= github.com/moricho/tparallel v0.2.1 h1:95FytivzT6rYzdJLdtfn6m1bfFJylOJK41+lgv/EHf4= github.com/moricho/tparallel v0.2.1/go.mod h1:fXEIZxG2vdfl0ZF8b42f5a78EhjjD5mX8qUplsoSU4k= github.com/mozilla/scribe v0.0.0-20180711195314-fb71baf557c1/go.mod h1:FIczTrinKo8VaLxe6PWTPEXRXDIHz2QAwiaBaP5/4a8= github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5/go.mod h1:FUqVoUPHSEdDR0MnFM3Dh8AU0pZHLXUD127SAJGER/s= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007/go.mod h1:m2XC9Qq0AlmmVksL6FktJCdTYyLk7V3fKyp0sl1yWQo= github.com/mwitkow/go-proto-validators v0.2.0/go.mod h1:ZfA1hW+UH/2ZHOWvQ3HnQaU0DtnpXu850MZiy+YUgcc= github.com/nakabonne/nestif v0.3.1 h1:wm28nZjhQY5HyYPx+weN3Q65k6ilSBxDb8v5S81B81U= @@ -625,8 +651,8 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLA github.com/nishanths/exhaustive v0.7.11 h1:xV/WU3Vdwh5BUH4N06JNUznb6d5zhRPOnlgCrpNYNKA= github.com/nishanths/exhaustive v0.7.11/go.mod h1:gX+MP7DWMKJmNa1HfMozK+u04hQd3na9i0hyqf3/dOI= github.com/nishanths/predeclared v0.0.0-20190419143655-18a43bb90ffc/go.mod h1:62PewwiQTlm/7Rj+cxVYqZvDIUc+JjZq6GHAC1fsObQ= -github.com/nishanths/predeclared v0.2.1 h1:1TXtjmy4f3YCFjTxRd8zcFHOmoUir+gp0ESzjFzG2sw= -github.com/nishanths/predeclared v0.2.1/go.mod h1:HvkGJcA3naj4lOwnFXFDkFxVtSqQMB9sbB1usJ+xjQE= +github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk= +github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d h1:VhgPp6v9qf9Agr/56bj7Y/xa04UccTW04VP0Qed4vnQ= github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= @@ -662,9 +688,14 @@ github.com/otiai10/curr v1.0.0/go.mod h1:LskTG5wDwr8Rs+nNQ+1LlxRjAtTZZjtJW4rMXl6 github.com/otiai10/mint v1.3.0/go.mod h1:F5AjcsTsWUqX+Na9fpHb52P8pcRX2CI6A3ctIT91xUo= github.com/otiai10/mint v1.3.1/go.mod h1:/yxELlJQ0ufhjUwhshSj+wFjZ78CnZ48/1wtmBH1OTc= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pelletier/go-toml v1.9.4 h1:tjENF6MfZAg8e4ZmZTeWaWiT2vXtsoO6+iuOjFhECwM= github.com/pelletier/go-toml v1.9.4/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= +github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.0-beta.8/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= +github.com/pelletier/go-toml/v2 v2.0.0 h1:P7Bq0SaI8nsexyay5UAyDo+ICWy5MQPgEZ5+l8JQTKo= +github.com/pelletier/go-toml/v2 v2.0.0/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA= github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= @@ -675,52 +706,60 @@ github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pkg/sftp v1.13.1/go.mod h1:3HaPG6Dq1ILlpPZRO0HVMrsydcdLt6HRDccSgb87qRg= github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/polyfloyd/go-errorlint v0.0.0-20211125173453-6d6d39c5bb8b h1:/BDyEJWLnDUYKGWdlNx/82qSaVu2bUok/EvPUtIGuvw= -github.com/polyfloyd/go-errorlint v0.0.0-20211125173453-6d6d39c5bb8b/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= +github.com/polyfloyd/go-errorlint v1.0.0 h1:pDrQG0lrh68e602Wfp68BlUTRFoHn8PZYAjLgt2LFsM= +github.com/polyfloyd/go-errorlint v1.0.0/go.mod h1:KZy4xxPJyy88/gldCe5OdW6OQRtNO3EZE7hXzmnebgA= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= -github.com/prometheus/client_golang v1.7.1 h1:NTGy1Ja9pByO+xAeH/qiWnLrKtr3hJPNjaVUwnjpdpA= +github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1 h1:ZiaPsmm9uiBeaSMRznKsCDNtPCS0T3JVDGF+06gjBzk= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= -github.com/prometheus/common v0.10.0 h1:RyRA7RzGXQZiW+tGMr7sxa85G1z0yOpM1qq5c8lNawc= +github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A= github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= -github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/pseudomuto/protoc-gen-doc v1.3.2/go.mod h1:y5+P6n3iGrbKG+9O04V5ld71in3v/bX88wUwgt+U8EA= github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= github.com/quasilyte/go-ruleguard v0.3.1-0.20210203134552-1b5a410e1cc8/go.mod h1:KsAh3x0e7Fkpgs+Q9pNLS5XpFSvYCEVl5gP9Pp1xp30= -github.com/quasilyte/go-ruleguard v0.3.15 h1:iWYzp1z72IlXTioET0+XI6SjQdPfMGfuAiZiKznOt7g= -github.com/quasilyte/go-ruleguard v0.3.15/go.mod h1:NhuWhnlVEM1gT1A4VJHYfy9MuYSxxwHgxWoPsn9llB4= +github.com/quasilyte/go-ruleguard v0.3.16-0.20220213074421-6aa060fab41a h1:sWFavxtIctGrVs5SYZ5Ml1CvrDAs8Kf5kx2PI3C41dA= +github.com/quasilyte/go-ruleguard v0.3.16-0.20220213074421-6aa060fab41a/go.mod h1:VMX+OnnSw4LicdiEGtRSD/1X8kW7GuEscjYNr4cOIT4= github.com/quasilyte/go-ruleguard/dsl v0.3.0/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/dsl v0.3.12-0.20220101150716-969a394a9451/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= -github.com/quasilyte/go-ruleguard/dsl v0.3.12/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard/dsl v0.3.16/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= github.com/quasilyte/go-ruleguard/rules v0.0.0-20201231183845-9e62ed36efe1/go.mod h1:7JTjp89EGyU1d6XfBiXihJNG37wB2VRkd125Q1u7Plc= github.com/quasilyte/go-ruleguard/rules v0.0.0-20211022131956-028d6511ab71/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= -github.com/quasilyte/gogrep v0.0.0-20220103110004-ffaa07af02e3 h1:P4QPNn+TK49zJjXKERt/vyPbv/mCHB/zQ4flDYOMN+M= -github.com/quasilyte/gogrep v0.0.0-20220103110004-ffaa07af02e3/go.mod h1:wSEyW6O61xRV6zb6My3HxrQ5/8ke7NE2OayqCHa3xRM= +github.com/quasilyte/gogrep v0.0.0-20220120141003-628d8b3623b5 h1:PDWGei+Rf2bBiuZIbZmM20J2ftEy9IeUCHA8HbQqed8= +github.com/quasilyte/gogrep v0.0.0-20220120141003-628d8b3623b5/go.mod h1:wSEyW6O61xRV6zb6My3HxrQ5/8ke7NE2OayqCHa3xRM= github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 h1:L8QM9bvf68pVdQ3bCFZMDmnt9yqcMBro1pC7F+IPYMY= github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= +github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= +github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= github.com/rivo/uniseg v0.1.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg= github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= @@ -732,15 +771,15 @@ github.com/ryancurrah/gomodguard v1.2.3/go.mod h1:rYbA/4Tg5c54mV1sv4sQTP5WOPBcoL github.com/ryanrolds/sqlclosecheck v0.3.0 h1:AZx+Bixh8zdUBxUA1NxbxVAS78vTPq4rCb8OUZI9xFw= github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/sagikazarmark/crypt v0.1.0/go.mod h1:B/mN0msZuINBtQ1zZLEQcegFJJf9vnYIR88KRMEuODE= +github.com/sagikazarmark/crypt v0.5.0/go.mod h1:l+nzl7KWh51rpzp2h7t4MZWyiEWdhNpOAnclKvg+mdA= github.com/sanposhiho/wastedassign/v2 v2.0.6 h1:+6/hQIHKNJAUixEj6EmOngGIisyeI+T3335lYTyxRoA= github.com/sanposhiho/wastedassign/v2 v2.0.6/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw= github.com/sclevine/spec v1.4.0 h1:z/Q9idDcay5m5irkZ28M7PtQM4aOISzOpj4bUPkDee8= github.com/sclevine/spec v1.4.0/go.mod h1:LvpgJaFyvQzRvc1kaDs0bulYwzC70PbiYjC4QnFHkOM= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= -github.com/securego/gosec/v2 v2.10.0 h1:l6BET4EzWtyUXCpY2v7N92v0DDCas0L7ngg3bpqbr8g= -github.com/securego/gosec/v2 v2.10.0/go.mod h1:PVq8Ewh/nCN8l/kKC6zrGXSr7m2NmEK6ITIAWMtIaA0= +github.com/securego/gosec/v2 v2.11.0 h1:+PDkpzR41OI2jrw1q6AdXZCbsNGNGT7pQjal0H0cArI= +github.com/securego/gosec/v2 v2.11.0/go.mod h1:SX8bptShuG8reGC0XS09+a4H2BoWSJi+fscA+Pulbpo= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= @@ -749,13 +788,14 @@ github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOms github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sivchari/containedctx v1.0.2 h1:0hLQKpgC53OVF1VT7CeoFHk9YKstur1XOgfYIc1yrHI= github.com/sivchari/containedctx v1.0.2/go.mod h1:PwZOeqm4/DLoJOqMSIJs3aKqXRX4YO+uXww087KZ7Bw= -github.com/sivchari/tenv v1.4.7 h1:FdTpgRlTue5eb5nXIYgS/lyVXSjugU8UUVDwhP1NLU8= -github.com/sivchari/tenv v1.4.7/go.mod h1:5nF+bITvkebQVanjU6IuMbvIot/7ReNsUV7I5NbprB0= +github.com/sivchari/tenv v1.5.0 h1:wxW0mFpKI6DIb3s6m1jCDYvkWXCskrimXMuGd0K/kSQ= +github.com/sivchari/tenv v1.5.0/go.mod h1:64yStXKSOxDfX47NlhVwND4dHwfZDdbp2Lyl018Icvg= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/sonatard/noctx v0.0.1 h1:VC1Qhl6Oxx9vvWo3UDgrGXYCeKCe3Wbw7qAWL6FrmTY= github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= @@ -763,8 +803,8 @@ github.com/sourcegraph/go-diff v0.6.1 h1:hmA1LzxW0n1c3Q4YbrFgg4P99GSnebYa3x8gr0H github.com/sourcegraph/go-diff v0.6.1/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= -github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/afero v1.8.2 h1:xehSyVa0YnHWsJ49JFljMpg1HX19V6NDZ1fkm1Xznbo= +github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfAqwo= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.4.1 h1:s0hze+J0196ZfEMTs80N7UlFt0BDuQ7Q+JDnHiMWKdA= github.com/spf13/cast v1.4.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= @@ -780,13 +820,14 @@ github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnIn github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/spf13/viper v1.9.0/go.mod h1:+i6ajR7OX2XaiBkrcZJFK21htRk7eDeLg7+O6bhUPP4= -github.com/spf13/viper v1.10.1 h1:nuJZuYpG7gTj/XqiUwg8bA0cp1+M2mC3J4g5luUYBKk= -github.com/spf13/viper v1.10.1/go.mod h1:IGlFPqhNAPKRxohIzWpI5QEy4kuI7tcl5WvR+8qy1rU= +github.com/spf13/viper v1.11.0 h1:7OX/1FS6n7jHD1zGrZTM7WtY13ZELRyosK4k93oPr44= +github.com/spf13/viper v1.11.0/go.mod h1:djo0X/bA5+tYVoCn+C7cAYJGcVn/qYLFTG8gdUsX7Zk= github.com/square/certstrap v1.2.0 h1:ecgyABrbFLr8jSbOC6oTBmBek0t/HqtgrMUZCPuyfdw= github.com/square/certstrap v1.2.0/go.mod h1:CUHqV+fxJW0Y5UQFnnbYwQ7bpKXO1AKbic9g73799yw= github.com/ssgreg/nlreturn/v2 v2.2.1 h1:X4XDI7jstt3ySqGU86YGAURbxw3oTDPK9sPEi6YEwQ0= github.com/ssgreg/nlreturn/v2 v2.2.1/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= +github.com/stbenjam/no-sprintf-host-port v0.1.1 h1:tYugd/yrm1O0dV+ThCbaKZh195Dfm07ysF0U6JQXczc= +github.com/stbenjam/no-sprintf-host-port v0.1.1/go.mod h1:TLhvtIvONRzdmkFiio4O8LHsN9N74I+PhRquPsxpL0I= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1 h1:2vfRuCMp5sSVIDSqO8oNnWJq7mPa6KVP3iPIwFBuy8A= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -818,11 +859,12 @@ github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144/go.mod h1:Qimiff github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tomarrell/wrapcheck/v2 v2.5.0 h1:g27SGGHNoQdvHz4KZA9o4v09RcWzylR+b1yueE5ECiw= -github.com/tomarrell/wrapcheck/v2 v2.5.0/go.mod h1:68bQ/eJg55BROaRTbMjC7vuhL2OgfoG8bLp9ZyoBfyY= +github.com/tomarrell/wrapcheck/v2 v2.6.1 h1:Cf4a/iwuMp9s7kKrh74GTgijRVim0wEpKjgAsT7Wctw= +github.com/tomarrell/wrapcheck/v2 v2.6.1/go.mod h1:Eo+Opt6pyMW1b6cNllOcDSSoHO0aTJ+iF6BfCUbHltA= github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= github.com/tommy-muehle/go-mnd/v2 v2.5.0 h1:iAj0a8e6+dXSL7Liq0aXPox36FiN1dBbjA6lt9fl65s= github.com/tommy-muehle/go-mnd/v2 v2.5.0/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= +github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ultraware/funlen v0.0.3 h1:5ylVWm8wsNwH5aWo9438pwvsK0QiqVuUrt9bn7S/iLA= github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= @@ -841,8 +883,8 @@ github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1z github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/yagipy/maintidx v1.0.0 h1:h5NvIsCz+nRDapQ0exNv4aJ0yXSI0420omVANTv3GJM= github.com/yagipy/maintidx v1.0.0/go.mod h1:0qNf/I/CCZXSMhsRsrEPDZ+DkekpKLXAJfsTACwgXLk= -github.com/yeya24/promlinter v0.1.1-0.20210918184747-d757024714a1 h1:YAaOqqMTstELMMGblt6yJ/fcOt4owSYuw3IttMnKfAM= -github.com/yeya24/promlinter v0.1.1-0.20210918184747-d757024714a1/go.mod h1:rs5vtZzeBHqqMwXqFScncpCF6u06lezhZepno9AB1Oc= +github.com/yeya24/promlinter v0.2.0 h1:xFKDQ82orCU5jQujdaD8stOHiv8UN68BSdn2a8u8Y3o= +github.com/yeya24/promlinter v0.2.0/go.mod h1:u54lkmBOZrpEbQQ6gox2zWKKLKu2SGe+2KOiextY+IA= github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= @@ -858,9 +900,9 @@ gitlab.com/bosi/decorder v0.2.1/go.mod h1:6C/nhLSbF6qZbYD8bRmISBwc6vcWdNsiIBkRvj go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/etcd v0.0.0-20200513171258-e048e166ab9c/go.mod h1:xCI7ZzBfRuGgBXyXO6yfWfDmlWd35khcWpUa4L0xI/k= -go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= -go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= -go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.etcd.io/etcd/api/v3 v3.5.2/go.mod h1:5GB2vv4A4AOn3yk7MftYGHkUfGtDHnEraIjym4dYz5A= +go.etcd.io/etcd/client/pkg/v3 v3.5.2/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.2/go.mod h1:2D7ZejHVMIfog1221iLSYlQRzrtECw3kz4I4VAQm3qI= go.mozilla.org/mozlog v0.0.0-20170222151521-4bb13139d403/go.mod h1:jHoPAGnDrCy6kaI2tAze5Prf0Nr0w/oNkROt2lw3n3o= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= @@ -885,19 +927,20 @@ go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= golang.org/x/crypto v0.0.0-20180501155221-613d6eafa307/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181127143415-eb0de9b17e85/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= -golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220214200702-86341886e292/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.0.0-20220313003712-b769efc7c000/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.0.0-20220411220226-7b82a4e95df4/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220513210258-46612604a0f9 h1:NUzdAbFtCJSXU20AOXgeqaUwg8Ypg4MPYmL+d+rsB5c= golang.org/x/crypto v0.0.0-20220513210258-46612604a0f9/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -911,6 +954,8 @@ golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e h1:qyrTQ++p1afMkO4DPEeLGq/3oTsdlvdH4vqZUBWzUKM= +golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -942,7 +987,6 @@ golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2 golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -980,13 +1024,16 @@ golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201202161906-c7110b5ffcbb/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210331212208-0fccb6fa2b5c/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= @@ -1012,6 +1059,7 @@ golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20210628180205-a41e5a781914/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210805134026-6f1e6394065a/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211104180415-d3ed0bb246c8/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/oauth2 v0.0.0-20220309155454-6242fa91716a/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= @@ -1034,7 +1082,6 @@ golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181128092732-4ed8d59d0b35/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1078,6 +1125,7 @@ golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1088,14 +1136,18 @@ golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210225134936-a50acf3fe073/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210514084401-e8d321eab015/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603125802-9665404d3644/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1105,17 +1157,20 @@ golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211105183446-c75c47738b0c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211210111614-af8b64212486/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211213223007-03aa0b5f6827/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220128215802-99c3d69c2c27/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220227234510-4e6760a101f9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220328115105-d36c6a25d886/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220422013727-9388b58f7150/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220502124256-b6088ccd6cba/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220513210249-45d2b4557a2a h1:N2T1jUrTQE9Re6TFF5PhvEHXHCguynGhKjWVsIUt5cY= @@ -1217,13 +1272,13 @@ golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4X golang.org/x/tools v0.0.0-20201002184944-ecd9fd270d5d/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.0.0-20201114224030-61ea331ec02b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201230224404-63754364767c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210108195828-e2f9c7f1fc8e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1-0.20210205202024-ef80cdb6ec6d/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= golang.org/x/tools v0.1.1-0.20210302220138-2ac05c832e1a/go.mod h1:9bzcO0MWcOuT0tm1iBGzDVPshzfwoVvREIui8C+MHqU= @@ -1236,8 +1291,9 @@ golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo= golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.9-0.20211228192929-ee1ca4ffc4da/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU= -golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20= golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= +golang.org/x/tools v0.1.11-0.20220316014157-77aa08bb151a h1:ofrrl6c6NG5/IOSx/R1cyiQxxjqlur0h/TvbUhkH0II= +golang.org/x/tools v0.1.11-0.20220316014157-77aa08bb151a/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1274,6 +1330,7 @@ google.golang.org/api v0.54.0/go.mod h1:7C4bFFOvVDGXjfDTAsgGwDgAxRDeQ4X8NvUedIt6 google.golang.org/api v0.55.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.56.0/go.mod h1:38yMfeP1kfjsl8isn0tliTjIb1rJXcQi4UXlbqivdVE= google.golang.org/api v0.57.0/go.mod h1:dVPlbZyBo2/OjBpmvNdpn2GRm6rPy75jyU7bmhdrMgI= +google.golang.org/api v0.59.0/go.mod h1:sT2boj7M9YJxZzgeZqXogmhfmRWDtPzT31xkieUbuZU= google.golang.org/api v0.61.0/go.mod h1:xQRti5UdCmoCEqFxcz93fTl338AVqDgyaDRuOZ3hg9I= google.golang.org/api v0.63.0/go.mod h1:gs4ij2ffTRXwuzzgJl/56BdwJaA194ijkfn++9tDuPo= google.golang.org/api v0.67.0/go.mod h1:ShHKP8E60yPsKNw/w8w+VYaj9H6buA5UqDp8dhbQZ6g= @@ -1335,7 +1392,9 @@ google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6D google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210108203827-ffc7fda8c3d7/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210226172003-ab064af71705/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= @@ -1357,6 +1416,8 @@ google.golang.org/genproto v0.0.0-20210831024726-fe130286e0e2/go.mod h1:eFjDcFEc google.golang.org/genproto v0.0.0-20210903162649-d08c68adba83/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210909211513-a8c4777a87af/go.mod h1:eFjDcFEctNawg4eG61bRv87N7iHBWyVhJu7u1kqDUXY= google.golang.org/genproto v0.0.0-20210924002016-3dee208752a0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211008145708-270636b82663/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= +google.golang.org/genproto v0.0.0-20211028162531-8db9c33dc351/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211118181313-81c1377c94b1/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211206160659-862468c7d6e0/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/genproto v0.0.0-20211208223120-3a66f561d7aa/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= @@ -1440,9 +1501,8 @@ gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qS gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= -gopkg.in/ini.v1 v1.63.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/ini.v1 v1.66.2 h1:XfR1dOYubytKy4Shzc2LHrrGhU0lDCfDGG1yLPmpgsI= -gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.66.4 h1:SsAcf+mM7mRZo2nJNGt8mZCjG8ZRaNGMURJw7BsIST4= +gopkg.in/ini.v1 v1.66.4/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= @@ -1468,10 +1528,10 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= -honnef.co/go/tools v0.2.2 h1:MNh1AVMyVX23VUHE2O27jm6lNj3vjO5DexS4A1xvnzk= -honnef.co/go/tools v0.2.2/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY= -mvdan.cc/gofumpt v0.3.0 h1:kTojdZo9AcEYbQYhGuLf/zszYthRdhDNDUi2JKTxas4= -mvdan.cc/gofumpt v0.3.0/go.mod h1:0+VyGZWleeIj5oostkOex+nDBA0eyavuDnDusAJ8ylo= +honnef.co/go/tools v0.3.1 h1:1kJlrWJLkaGXgcaeosRXViwviqjI7nkBvU2+sZW0AYc= +honnef.co/go/tools v0.3.1/go.mod h1:vlRD9XErLMGT+mDuofSr0mMMquscM/1nQqtRSsh6m70= +mvdan.cc/gofumpt v0.3.1 h1:avhhrOmv0IuvQVK7fvwV91oFSGAk5/6Po8GXTzICeu8= +mvdan.cc/gofumpt v0.3.1/go.mod h1:w3ymliuxvzVx8DAutBnVyDqYb1Niy/yCJt/lk821YCE= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= diff --git a/index/file_index.go b/index/file_index.go index 3cde4872b..a0ecfc99f 100644 --- a/index/file_index.go +++ b/index/file_index.go @@ -138,46 +138,6 @@ func (ri FileIndex) structToMap(s interface{}) (map[string]interface{}, error) { return res, nil } -func (ri FileIndex) mapToStruct(m map[string]interface{}, t interface{}) (reflect.Value, error) { - return ri.mapToNewStruct(m, reflect.ValueOf(t).Elem().Type()) -} - -func (ri FileIndex) mapToStructFromSlice(m map[string]interface{}, t interface{}) (reflect.Value, error) { - slice := reflect.ValueOf(t).Elem() - - if slice.Kind() != reflect.Slice { - return reflect.Value{}, bosherr.Errorf( - "Must be reflect.Slice: %#v (%#v)", - slice, ri.kindToStr(slice.Kind()), - ) - } - - return ri.mapToNewStruct(m, slice.Type().Elem()) -} - -func (ri FileIndex) mapToNewStruct(m map[string]interface{}, t reflect.Type) (reflect.Value, error) { - if t.Kind() != reflect.Struct { - return reflect.Value{}, bosherr.Errorf( - "Must be reflect.Struct: %#v (%#v)", - t, ri.kindToStr(t.Kind()), - ) - } - - newStruct := reflect.New(t).Elem() - - for k, v := range m { - f := newStruct.FieldByName(k) - if f.IsValid() && f.CanSet() { - // todo float64 -> int - // todo pointer values - // todo slices - f.Set(reflect.ValueOf(v)) - } - } - - return newStruct, nil -} - var kindToStrMap = map[reflect.Kind]string{ reflect.Invalid: "Invalid", reflect.Bool: "Bool", diff --git a/installation/job_resolver.go b/installation/job_resolver.go index b08002ccf..d43049523 100644 --- a/installation/job_resolver.go +++ b/installation/job_resolver.go @@ -27,7 +27,7 @@ func (b *jobResolver) From(installationManifest biinstallmanifest.Manifest) ([]b // installation only ever has one job: the cpi job. jobsReferencesInRelease := []biinstallmanifest.ReleaseJobRef{installationManifest.Template} - releaseJobs := make([]bireljob.Job, len(jobsReferencesInRelease), len(jobsReferencesInRelease)) + releaseJobs := make([]bireljob.Job, len(jobsReferencesInRelease)) for i, jobRef := range jobsReferencesInRelease { release, err := b.releaseJobResolver.Resolve(jobRef.Name, jobRef.Release) if err != nil { diff --git a/installation/package_compiler.go b/installation/package_compiler.go index 7624f97cd..8bf5cd2ff 100644 --- a/installation/package_compiler.go +++ b/installation/package_compiler.go @@ -40,7 +40,7 @@ func (b *packageCompiler) For(jobs []bireljob.Job, stage biui.Stage) ([]Compiled return nil, bosherr.WrapError(err, "Compiling job package dependencies for installation") } - compiledInstallationPackageRefs := make([]CompiledPackageRef, len(compiledPackageRefs), len(compiledPackageRefs)) + compiledInstallationPackageRefs := make([]CompiledPackageRef, len(compiledPackageRefs)) for i, compiledPackageRef := range compiledPackageRefs { compiledInstallationPackageRefs[i] = CompiledPackageRef{ Name: compiledPackageRef.Name, diff --git a/release/job/dir_reader.go b/release/job/dir_reader.go index e2d406e2a..ffe3fbc6e 100644 --- a/release/job/dir_reader.go +++ b/release/job/dir_reader.go @@ -73,7 +73,7 @@ func (r DirReaderImpl) collectFiles(path string) (boshjobman.Manifest, []File, e files = append(files, NewFile(monitPath, path)) } - for src, _ := range manifest.Templates { + for src := range manifest.Templates { srcPath := filepath.Join(path, "templates", src) files = append(files, NewFile(srcPath, path)) } diff --git a/release/set/manifest/parser.go b/release/set/manifest/parser.go index 6caa07eff..4e681aa0b 100644 --- a/release/set/manifest/parser.go +++ b/release/set/manifest/parser.go @@ -67,9 +67,7 @@ func (p *parser) Parse(path string, vars boshtpl.Variables, op patch.Op) (Manife } } - releaseSetManifest := Manifest{ - Releases: comboManifest.Releases, - } + releaseSetManifest := Manifest(comboManifest) err = p.validator.Validate(releaseSetManifest) if err != nil { diff --git a/release/set/manifest/validator.go b/release/set/manifest/validator.go index b33a3dfe3..40445aa8c 100644 --- a/release/set/manifest/validator.go +++ b/release/set/manifest/validator.go @@ -27,6 +27,7 @@ func (v *validator) Validate(manifest Manifest) error { errs = append(errs, bosherr.Errorf("releases must contain at least 1 release")) } + protocolRegex := regexp.MustCompile("^(file|http|https)://") for releaseIdx, release := range manifest.Releases { if v.isBlank(release.Name) { errs = append(errs, bosherr.Errorf("releases[%d].name must be provided", releaseIdx)) @@ -41,8 +42,8 @@ func (v *validator) Validate(manifest Manifest) error { errs = append(errs, bosherr.Errorf("releases[%d].url must be provided", releaseIdx)) } - matched, err := regexp.MatchString("^(file|http|https)://", release.URL) - if err != nil || !matched { + matched := protocolRegex.MatchString(release.URL) + if !matched { errs = append(errs, bosherr.Errorf("releases[%d].url must be a valid URL (file:// or http(s)://)", releaseIdx)) } diff --git a/releasedir/fs_release_dir.go b/releasedir/fs_release_dir.go index 431feca4e..e7e4c5c25 100644 --- a/releasedir/fs_release_dir.go +++ b/releasedir/fs_release_dir.go @@ -35,7 +35,7 @@ type FSReleaseDir struct { finalIndicies boshrel.ArchiveIndicies releaseReader boshrel.Reader - releaseArchiveWriter boshrel.Writer + releaseArchiveWriter boshrel.Writer //nolint:unused timeService clock.Clock fs boshsys.FileSystem @@ -241,7 +241,7 @@ func (d FSReleaseDir) VendorPackage(pkg *boshpkg.Package) error { d.collectDependentPackages(pkg, allInterestingPkgs) - for pkg2, _ := range allInterestingPkgs { + for pkg2 := range allInterestingPkgs { err := pkg2.Finalize(d.finalIndicies.Packages) if err != nil { return bosherr.WrapErrorf(err, "Finalizing vendored package") diff --git a/releasedir/fs_release_index.go b/releasedir/fs_release_index.go index 668ad5f3c..0a776c1e9 100644 --- a/releasedir/fs_release_index.go +++ b/releasedir/fs_release_index.go @@ -44,10 +44,10 @@ var _ yaml.Marshaler = fsReleaseIndexSchema_SortedEntries{} func (e fsReleaseIndexSchema_SortedEntries) MarshalYAML() (interface{}, error) { var keys []string - for k, _ := range e { + for k := range e { keys = append(keys, k) } - sort.Sort(sort.StringSlice(keys)) + sort.Strings(keys) var sortedEntries []yaml.MapItem for _, k := range keys { sortedEntries = append(sortedEntries, yaml.MapItem{Key: k, Value: e[k]}) @@ -59,7 +59,7 @@ type fsReleaseIndexSchema_Entry struct { Version string `yaml:"version"` } -type releaseIndexEntry struct { //nolint:deadcode +type releaseIndexEntry struct { //nolint:deadcode,unused Version semver.Version } diff --git a/releasedir/index/fs_index.go b/releasedir/index/fs_index.go index 9ac1653d9..2381687fb 100644 --- a/releasedir/index/fs_index.go +++ b/releasedir/index/fs_index.go @@ -73,10 +73,10 @@ var _ yaml.Marshaler = fsIndexSchema_SortedEntries{} func (e fsIndexSchema_SortedEntries) MarshalYAML() (interface{}, error) { var keys []string - for k, _ := range e { + for k := range e { keys = append(keys, k) } - sort.Sort(sort.StringSlice(keys)) + sort.Strings(keys) var sortedEntries []yaml.MapItem for _, k := range keys { sortedEntries = append(sortedEntries, yaml.MapItem{Key: k, Value: e[k]}) diff --git a/ssh/combo_runner_test.go b/ssh/combo_runner_test.go index 3867507c1..073ceeb08 100644 --- a/ssh/combo_runner_test.go +++ b/ssh/combo_runner_test.go @@ -3,7 +3,6 @@ package ssh_test import ( "bytes" "errors" - "fmt" "os" "strings" "syscall" @@ -109,7 +108,7 @@ var _ = Describe("ComboRunner", func() { It("provides ssh arguments to customize cmd", func() { cmdFactory = func(host boshdir.Host, args SSHArgs) boshsys.Command { optsStr := strings.Join(args.LoginForHost(host), " ") - opts := make([]string, 0) + var opts []string switch { case strings.Contains(optsStr, "127.0.0.1"): opts = []string{"ip-1"} @@ -136,8 +135,8 @@ var _ = Describe("ComboRunner", func() { session.StartReturns(sshArgs, nil) - cmdRunner.AddProcess(fmt.Sprintf("cmd ip-1"), &fakesys.FakeProcess{}) - cmdRunner.AddProcess(fmt.Sprintf("cmd ip-2"), &fakesys.FakeProcess{}) + cmdRunner.AddProcess("cmd ip-1", &fakesys.FakeProcess{}) + cmdRunner.AddProcess("cmd ip-2", &fakesys.FakeProcess{}) err := comboRunner.Run(connOpts, result, cmdFactory) Expect(err).ToNot(HaveOccurred()) diff --git a/ssh/host.go b/ssh/host.go index 641ae20f5..a26869cc0 100644 --- a/ssh/host.go +++ b/ssh/host.go @@ -40,7 +40,7 @@ func (h *hostBuilder) BuildHost(slug boshdir.AllOrInstanceGroupOrInstanceSlug, u var targetVM boshdir.VMInfo indexOrId := slug.IndexOrID() for _, vm := range vms { - if vm.Active == nil || *vm.Active == false { + if vm.Active == nil || !*vm.Active { continue } if vm.JobName == slug.Name() { diff --git a/state/pkg/resolve_dependencies.go b/state/pkg/resolve_dependencies.go index 85c3d4494..e462aa883 100644 --- a/state/pkg/resolve_dependencies.go +++ b/state/pkg/resolve_dependencies.go @@ -16,9 +16,7 @@ func resolveInner(pkg birelpkg.Compilable, noFollow []birelpkg.Compilable) []bir all = append(all, depPkg) tDeps := resolveInner(depPkg, joinUnique(all, noFollow)) - for _, tDepPkg := range tDeps { - all = append(all, tDepPkg) - } + all = append(all, tDeps...) } } diff --git a/templatescompiler/rendered_job_list.go b/templatescompiler/rendered_job_list.go index 7429e07cc..782af9302 100644 --- a/templatescompiler/rendered_job_list.go +++ b/templatescompiler/rendered_job_list.go @@ -15,7 +15,6 @@ type RenderedJobList interface { type renderedJobList struct { renderedJobs []RenderedJob - logTag string } func NewRenderedJobList() RenderedJobList { diff --git a/ui/fmt/error.go b/ui/fmt/error.go index cc759fa4e..55878eb85 100644 --- a/ui/fmt/error.go +++ b/ui/fmt/error.go @@ -24,7 +24,7 @@ func prefixingMultilineError(err error, prefix string, bullet string) string { case bosherr.ComplexError: return currPrefix + specificErr.Err.Error() + ":\n" + prefixingMultilineError(specificErr.Cause, prefix+Indent, "") case bosherr.MultiError: - lines := make([]string, len(specificErr.Errors), len(specificErr.Errors)) + lines := make([]string, len(specificErr.Errors)) for i, sibling := range specificErr.Errors { lines[i] = prefixingMultilineError(sibling, prefix, Bullet) } diff --git a/ui/json_ui.go b/ui/json_ui.go index 60328f1c0..a054d4a47 100644 --- a/ui/json_ui.go +++ b/ui/json_ui.go @@ -123,7 +123,7 @@ func (ui *jsonUI) printTableHeader(table *Table) tableResp { } } else if len(table.AsRows()) > 0 { var rawHeaders []Header - for i, _ := range table.AsRows()[0] { + for i := range table.AsRows()[0] { val := Header{ Key: fmt.Sprintf("col_%d", i), Hidden: false, diff --git a/ui/table/headers.go b/ui/table/headers.go index 8d201ab90..8d41a9547 100644 --- a/ui/table/headers.go +++ b/ui/table/headers.go @@ -24,7 +24,7 @@ func NewHeadersFromStrings(titles []string) (headers []Header) { } func (t *Table) SetColumnVisibility(headers []Header) error { - for tableHeaderIdx, _ := range t.Header { + for tableHeaderIdx := range t.Header { t.Header[tableHeaderIdx].Hidden = true } @@ -50,7 +50,7 @@ func (t *Table) SetColumnVisibility(headers []Header) error { } func (t *Table) SetColumnVisibilityFiltered(headers []Header, filterHeaders []Header) error { - for tableHeaderIdx, _ := range t.Header { + for tableHeaderIdx := range t.Header { t.Header[tableHeaderIdx].Hidden = true } diff --git a/ui/table/table.go b/ui/table/table.go index df7ea7e6f..b116de9a6 100644 --- a/ui/table/table.go +++ b/ui/table/table.go @@ -16,7 +16,7 @@ func (t Table) AsRows() [][]Value { for _, s := range t.Sections { if s.FirstColumn != nil && len(s.FirstColumn.String()) > 0 { if len(s.Rows) > 0 && len(s.Rows[0]) > 0 { - for i, _ := range s.Rows { + for i := range s.Rows { s.Rows[i][0] = s.FirstColumn } } @@ -24,18 +24,14 @@ func (t Table) AsRows() [][]Value { totalRows += len(s.Rows) - for _, r := range s.Rows { - rows = append(rows, r) - } + rows = append(rows, s.Rows...) } } if len(t.Rows) > 0 { totalRows += len(t.Rows) //nolint:ineffassign - for _, r := range t.Rows { - rows = append(rows, r) - } + rows = append(rows, t.Rows...) } // Fill in nils diff --git a/ui/table/values.go b/ui/table/values.go index b3d42af71..673b6bb94 100644 --- a/ui/table/values.go +++ b/ui/table/values.go @@ -128,7 +128,7 @@ func (t ValueBool) Compare(other Value) int { switch { case t.B == otherB: return 0 - case t.B == false && otherB == true: + case !t.B && otherB: return -1 default: return 1 diff --git a/ui/task/reporter.go b/ui/task/reporter.go index 0647a39aa..52f4ee09c 100644 --- a/ui/task/reporter.go +++ b/ui/task/reporter.go @@ -7,6 +7,8 @@ import ( "sync" boshui "github.com/cloudfoundry/bosh-cli/ui" + "golang.org/x/text/cases" + "golang.org/x/text/language" ) type ReporterImpl struct { @@ -69,7 +71,7 @@ func (r *ReporterImpl) TaskFinished(id int, state string) { } if r.noOutputSinceTaskStarted(id) { - r.ui.EndLinef(". %s", strings.Title(state)) + r.ui.EndLinef(". %s", cases.Title(language.English).String(state)) } else { r.ui.BeginLinef("\nTask %d %s\n", id, state) } diff --git a/ui/ui.go b/ui/ui.go index 62230f04d..99fdac0b6 100644 --- a/ui/ui.go +++ b/ui/ui.go @@ -152,7 +152,7 @@ func (ui *WriterUI) AskForConfirmation() error { return bosherr.WrapError(err, "Asking for confirmation") } - if falseByDefault == false { + if !falseByDefault { return errors.New("Stopped") } diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go index 3cbd3ffc2..71a9ddf40 100644 --- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go +++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go @@ -99,7 +99,7 @@ func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) { return true }) - return nil, nil + return nil, nil //nolint:nilnil } func (n *nilNil) isDangerNilField(f *ast.Field, typeSpecs typeSpecByName) bool { diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md index cc13f8667..a999c356f 100644 --- a/vendor/github.com/BurntSushi/toml/README.md +++ b/vendor/github.com/BurntSushi/toml/README.md @@ -56,7 +56,7 @@ And then decoded with: ```go var conf Config -err := toml.Decode(tomlData, &conf) +_, err := toml.Decode(tomlData, &conf) // handle error ``` diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go index e24f0c5d5..6a8715074 100644 --- a/vendor/github.com/BurntSushi/toml/decode.go +++ b/vendor/github.com/BurntSushi/toml/decode.go @@ -1,6 +1,7 @@ package toml import ( + "bytes" "encoding" "fmt" "io" @@ -18,11 +19,29 @@ type Unmarshaler interface { } // Unmarshal decodes the contents of `p` in TOML format into a pointer `v`. -func Unmarshal(p []byte, v interface{}) error { - _, err := Decode(string(p), v) +func Unmarshal(data []byte, v interface{}) error { + _, err := NewDecoder(bytes.NewReader(data)).Decode(v) return err } +// Decode the TOML data in to the pointer v. +// +// See the documentation on Decoder for a description of the decoding process. +func Decode(data string, v interface{}) (MetaData, error) { + return NewDecoder(strings.NewReader(data)).Decode(v) +} + +// DecodeFile is just like Decode, except it will automatically read the +// contents of the file at path and decode it for you. +func DecodeFile(path string, v interface{}) (MetaData, error) { + fp, err := os.Open(path) + if err != nil { + return MetaData{}, err + } + defer fp.Close() + return NewDecoder(fp).Decode(v) +} + // Primitive is a TOML value that hasn't been decoded into a Go value. // // This type can be used for any value, which will cause decoding to be delayed. @@ -42,27 +61,10 @@ type Primitive struct { // The significand precision for float32 and float64 is 24 and 53 bits; this is // the range a natural number can be stored in a float without loss of data. const ( - maxSafeFloat32Int = 16777215 // 2^24-1 - maxSafeFloat64Int = 9007199254740991 // 2^53-1 + maxSafeFloat32Int = 16777215 // 2^24-1 + maxSafeFloat64Int = int64(9007199254740991) // 2^53-1 ) -// PrimitiveDecode is just like the other `Decode*` functions, except it -// decodes a TOML value that has already been parsed. Valid primitive values -// can *only* be obtained from values filled by the decoder functions, -// including this method. (i.e., `v` may contain more `Primitive` -// values.) -// -// Meta data for primitive values is included in the meta data returned by -// the `Decode*` functions with one exception: keys returned by the Undecoded -// method will only reflect keys that were decoded. Namely, any keys hidden -// behind a Primitive will be considered undecoded. Executing this method will -// update the undecoded keys in the meta data. (See the example.) -func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { - md.context = primValue.context - defer func() { md.context = nil }() - return md.unify(primValue.undecoded, rvalue(v)) -} - // Decoder decodes TOML data. // // TOML tables correspond to Go structs or maps (dealer's choice – they can be @@ -158,22 +160,21 @@ func (dec *Decoder) Decode(v interface{}) (MetaData, error) { return md, md.unify(p.mapping, rv) } -// Decode the TOML data in to the pointer v. +// PrimitiveDecode is just like the other `Decode*` functions, except it +// decodes a TOML value that has already been parsed. Valid primitive values +// can *only* be obtained from values filled by the decoder functions, +// including this method. (i.e., `v` may contain more `Primitive` +// values.) // -// See the documentation on Decoder for a description of the decoding process. -func Decode(data string, v interface{}) (MetaData, error) { - return NewDecoder(strings.NewReader(data)).Decode(v) -} - -// DecodeFile is just like Decode, except it will automatically read the -// contents of the file at path and decode it for you. -func DecodeFile(path string, v interface{}) (MetaData, error) { - fp, err := os.Open(path) - if err != nil { - return MetaData{}, err - } - defer fp.Close() - return NewDecoder(fp).Decode(v) +// Meta data for primitive values is included in the meta data returned by +// the `Decode*` functions with one exception: keys returned by the Undecoded +// method will only reflect keys that were decoded. Namely, any keys hidden +// behind a Primitive will be considered undecoded. Executing this method will +// update the undecoded keys in the meta data. (See the example.) +func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { + md.context = primValue.context + defer func() { md.context = nil }() + return md.unify(primValue.undecoded, rvalue(v)) } // unify performs a sort of type unification based on the structure of `rv`, diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go index dee4e6d31..e7d4eeb48 100644 --- a/vendor/github.com/BurntSushi/toml/encode.go +++ b/vendor/github.com/BurntSushi/toml/encode.go @@ -212,7 +212,7 @@ func (enc *Encoder) eElement(rv reflect.Value) { if err != nil { encPanic(err) } - enc.writeQuoted(string(s)) + enc.w.Write(s) return case encoding.TextMarshaler: s, err := v.MarshalText() @@ -398,6 +398,10 @@ func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { if f.PkgPath != "" && !f.Anonymous { /// Skip unexported fields. continue } + opts := getOptions(f.Tag) + if opts.skip { + continue + } frv := rv.Field(i) diff --git a/vendor/github.com/BurntSushi/toml/error.go b/vendor/github.com/BurntSushi/toml/error.go index 36edc4655..d5728a413 100644 --- a/vendor/github.com/BurntSushi/toml/error.go +++ b/vendor/github.com/BurntSushi/toml/error.go @@ -10,7 +10,7 @@ import ( // For example invalid syntax, duplicate keys, etc. // // In addition to the error message itself, you can also print detailed location -// information with context by using ErrorWithLocation(): +// information with context by using ErrorWithPosition(): // // toml: error: Key 'fruit' was already created and cannot be used as an array. // diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go index 63ef20f47..ce7f546b4 100644 --- a/vendor/github.com/BurntSushi/toml/lex.go +++ b/vendor/github.com/BurntSushi/toml/lex.go @@ -128,6 +128,11 @@ func (lx lexer) getPos() Position { } func (lx *lexer) emit(typ itemType) { + // Needed for multiline strings ending with an incomplete UTF-8 sequence. + if lx.start > lx.pos { + lx.error(errLexUTF8{lx.input[lx.pos]}) + return + } lx.items <- item{typ: typ, pos: lx.getPos(), val: lx.current()} lx.start = lx.pos } diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go index 8269cca17..0420b5ba4 100644 --- a/vendor/github.com/BurntSushi/toml/parse.go +++ b/vendor/github.com/BurntSushi/toml/parse.go @@ -220,7 +220,7 @@ func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) { case itemString: return p.replaceEscapes(it, it.val), p.typeOfPrimitive(it) case itemMultilineString: - return p.replaceEscapes(it, stripFirstNewline(stripEscapedNewlines(it.val))), p.typeOfPrimitive(it) + return p.replaceEscapes(it, stripFirstNewline(p.stripEscapedNewlines(it.val))), p.typeOfPrimitive(it) case itemRawString: return it.val, p.typeOfPrimitive(it) case itemRawMultilineString: @@ -647,7 +647,7 @@ func stripFirstNewline(s string) string { } // Remove newlines inside triple-quoted strings if a line ends with "\". -func stripEscapedNewlines(s string) string { +func (p *parser) stripEscapedNewlines(s string) string { split := strings.Split(s, "\n") if len(split) < 1 { return s @@ -679,6 +679,10 @@ func stripEscapedNewlines(s string) string { continue } + if i == len(split)-1 { + p.panicf("invalid escape: '\\ '") + } + split[i] = line[:len(line)-1] // Remove \ if len(split)-1 > i { split[i+1] = strings.TrimLeft(split[i+1], " \t\r") diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v2/LICENSE b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v2/LICENSE new file mode 100644 index 000000000..6698196c5 --- /dev/null +++ b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Gaijin Entertainment + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/GaijinEntertainment/go-exhaustruct/v2/pkg/analyzer/analyzer.go b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v2/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..6719ac5c5 --- /dev/null +++ b/vendor/github.com/GaijinEntertainment/go-exhaustruct/v2/pkg/analyzer/analyzer.go @@ -0,0 +1,307 @@ +package analyzer + +import ( + "errors" + "flag" + "fmt" + "go/ast" + "go/types" + "regexp" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var ( + ErrEmptyPattern = errors.New("pattern can't be empty") +) + +type analyzer struct { + include PatternsList + exclude PatternsList +} + +// NewAnalyzer returns a go/analysis-compatible analyzer. +// -i arguments adds include patterns +// -e arguments adds exclude patterns +func NewAnalyzer(include []string, exclude []string) (*analysis.Analyzer, error) { + a := analyzer{} + + var err error + + a.include, err = newPatternsList(include) + if err != nil { + return nil, err + } + + a.exclude, err = newPatternsList(exclude) + if err != nil { + return nil, err + } + + return &analysis.Analyzer{ + Name: "exhaustruct", + Doc: "Checks if all structure fields are initialized", + Run: a.run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: a.newFlagSet(), + }, nil +} + +func (a *analyzer) newFlagSet() flag.FlagSet { + fs := flag.NewFlagSet("exhaustruct flags", flag.PanicOnError) + + fs.Var( + &reListVar{values: &a.include}, + "i", + "Regular expression to match struct packages and names, can receive multiple flags", + ) + fs.Var( + &reListVar{values: &a.exclude}, + "e", + "Regular expression to exclude struct packages and names, can receive multiple flags", + ) + + return *fs +} + +func (a *analyzer) run(pass *analysis.Pass) (interface{}, error) { + insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) //nolint:forcetypeassert + + nodeTypes := []ast.Node{ + (*ast.CompositeLit)(nil), + (*ast.ReturnStmt)(nil), + } + + insp.Preorder(nodeTypes, a.newVisitor(pass)) + + return nil, nil //nolint:nilnil +} + +//nolint:funlen,cyclop +func (a *analyzer) newVisitor(pass *analysis.Pass) func(node ast.Node) { + var ret *ast.ReturnStmt + + return func(node ast.Node) { + if retLit, ok := node.(*ast.ReturnStmt); ok { + // save return statement for future (to detect error-containing returns) + ret = retLit + + return + } + + lit, _ := node.(*ast.CompositeLit) + if lit.Type == nil { + // we're not interested in non-typed literals + return + } + + typ := pass.TypesInfo.TypeOf(lit.Type) + if typ == nil { + return + } + + strct, ok := typ.Underlying().(*types.Struct) + if !ok { + // we also not interested in non-structure literals + return + } + + strctName := exprName(lit.Type) + if strctName == "" { + return + } + + if len(a.include) > 0 { + if !a.include.MatchesAny(typ.String()) { + return + } + } + + if a.exclude.MatchesAny(typ.String()) { + return + } + + if len(lit.Elts) == 0 && ret != nil { + if ret.End() < lit.Pos() { + // we're outside last return statement + ret = nil + } else if returnContainsLiteral(ret, lit) && returnContainsError(ret, pass) { + // we're okay with empty literals in return statements with non-nil errors, like + // `return my.Struct{}, fmt.Errorf("non-nil error!")` + return + } + } + + missingFields := structMissingFields(lit, strct, typ, pass) + + if len(missingFields) == 1 { + pass.Reportf(node.Pos(), "%s is missing in %s", missingFields[0], strctName) + } else if len(missingFields) > 1 { + pass.Reportf(node.Pos(), "%s are missing in %s", strings.Join(missingFields, ", "), strctName) + } + } +} + +func returnContainsLiteral(ret *ast.ReturnStmt, lit *ast.CompositeLit) bool { + for _, result := range ret.Results { + if l, ok := result.(*ast.CompositeLit); ok { + if lit == l { + return true + } + } + } + + return false +} + +func returnContainsError(ret *ast.ReturnStmt, pass *analysis.Pass) bool { + for _, result := range ret.Results { + if pass.TypesInfo.TypeOf(result).String() == "error" { + return true + } + } + + return false +} + +func structMissingFields(lit *ast.CompositeLit, strct *types.Struct, typ types.Type, pass *analysis.Pass) []string { + isSamePackage := strings.HasPrefix(typ.String(), pass.Pkg.Path()+".") + + keys, unnamed := literalKeys(lit) + fields := structFields(strct, isSamePackage) + + if unnamed { + return fields[len(keys):] + } + + return difference(fields, keys) +} + +func literalKeys(lit *ast.CompositeLit) (keys []string, unnamed bool) { + for _, elt := range lit.Elts { + if k, ok := elt.(*ast.KeyValueExpr); ok { + if ident, ok := k.Key.(*ast.Ident); ok { + keys = append(keys, ident.Name) + } + + continue + } + + // in case we deal with unnamed initialization - no need to iterate over all + // elements - simply create slice with proper size + unnamed = true + keys = make([]string, len(lit.Elts)) + + break + } + + return keys, unnamed +} + +func structFields(strct *types.Struct, withPrivate bool) (keys []string) { + for i := 0; i < strct.NumFields(); i++ { + f := strct.Field(i) + + if !f.Exported() && !withPrivate { + continue + } + + keys = append(keys, f.Name()) + } + + return keys +} + +// difference returns elements that are in `a` and not in `b`. +func difference(a, b []string) (diff []string) { + mb := make(map[string]bool, len(b)) + for _, x := range b { + mb[x] = true + } + + for _, x := range a { + if _, found := mb[x]; !found { + diff = append(diff, x) + } + } + + return diff +} + +func exprName(expr ast.Expr) string { + if i, ok := expr.(*ast.Ident); ok { + return i.Name + } + + s, ok := expr.(*ast.SelectorExpr) + if !ok { + return "" + } + + return s.Sel.Name +} + +type PatternsList []*regexp.Regexp + +// MatchesAny matches provided string against all regexps in a slice. +func (l PatternsList) MatchesAny(str string) bool { + for _, r := range l { + if r.MatchString(str) { + return true + } + } + + return false +} + +// newPatternsList parses slice of strings to a slice of compiled regular +// expressions. +func newPatternsList(in []string) (PatternsList, error) { + list := PatternsList{} + + for _, str := range in { + re, err := strToRegexp(str) + if err != nil { + return nil, err + } + + list = append(list, re) + } + + return list, nil +} + +type reListVar struct { + values *PatternsList +} + +func (v *reListVar) Set(value string) error { + re, err := strToRegexp(value) + if err != nil { + return err + } + + *v.values = append(*v.values, re) + + return nil +} + +func (v *reListVar) String() string { + return "" +} + +func strToRegexp(str string) (*regexp.Regexp, error) { + if str == "" { + return nil, ErrEmptyPattern + } + + re, err := regexp.Compile(str) + if err != nil { + return nil, fmt.Errorf("unable to compile %s as regular expression: %w", str, err) + } + + return re, nil +} diff --git a/vendor/github.com/blizzy78/varnamelen/.golangci.yml b/vendor/github.com/blizzy78/varnamelen/.golangci.yml index 92d895881..566572363 100644 --- a/vendor/github.com/blizzy78/varnamelen/.golangci.yml +++ b/vendor/github.com/blizzy78/varnamelen/.golangci.yml @@ -1,3 +1,18 @@ +# https://github.com/golangci/golangci-lint/issues/456#issuecomment-617470264 +issues: + exclude-use-default: false + exclude: + # errcheck: Almost all programs ignore errors on these functions and in most cases it's ok + - Error return value of .((os\.)?std(out|err)\..*|.*print(f|ln)?|os\.(Un)?Setenv). is not checked + # golint: False positive when tests are defined in package 'test' + - func name will be used as test\.Test.* by other packages, and that stutters; consider calling this + # gosec: Duplicated errcheck checks + - G104 + # gosec: Too many issues in popular repos + - (Expect directory permissions to be 0750 or less|Expect file permissions to be 0600 or less) + # gosec: False positive is triggered by 'src, err := ioutil.ReadFile(filename)' + - Potential file inclusion via variable + linters: enable: - asciicheck diff --git a/vendor/github.com/blizzy78/varnamelen/LICENSE b/vendor/github.com/blizzy78/varnamelen/LICENSE index 810693c84..c45156a2c 100644 --- a/vendor/github.com/blizzy78/varnamelen/LICENSE +++ b/vendor/github.com/blizzy78/varnamelen/LICENSE @@ -1,4 +1,4 @@ -Copyright 2021 Maik Schreiber +Copyright 2021-2022 Maik Schreiber Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in diff --git a/vendor/github.com/blizzy78/varnamelen/README.md b/vendor/github.com/blizzy78/varnamelen/README.md index d13ff39ef..02131ff29 100644 --- a/vendor/github.com/blizzy78/varnamelen/README.md +++ b/vendor/github.com/blizzy78/varnamelen/README.md @@ -4,13 +4,14 @@ varnamelen ========== -A Go Analyzer checking that the length of a variable's name matches its usage scope. +A Go Analyzer that checks that the length of a variable's name matches its usage scope: -A variable with a short name can be hard to use if the variable is used over a longer span of lines of code. +Variables with short names can be hard to use if the variable is used over a longer span of lines of code. A longer variable name may be easier to comprehend. -The analyzer can check variable names, method receiver names, as well as named return values. +The analyzer also checks method receiver names, named return values, and type parameter names. +Arbitrary declarations such as `f *foo` can be ignored, as well as idiomatic `ok` variables. Conventional Go parameters such as `ctx context.Context` or `t *testing.T` will always be ignored. **Example output** @@ -41,10 +42,12 @@ linters-settings: # The minimum length of a variable's name that is considered "long." (defaults to 3) # Variable names that are at least this long will be ignored. min-name-length: 3 - # Check method receiver names. (defaults to false) + # Check method receiver. (defaults to false) check-receiver: false # Check named return values. (defaults to false) check-return: false + # Check type parameters. (defaults to false) + check-type-param: false # Ignore "ok" variables that hold the bool return value of a type assertion. (defaults to false) ignore-type-assert-ok: false # Ignore "ok" variables that hold the bool return value of a map index. (defaults to false) @@ -55,8 +58,14 @@ linters-settings: ignore-names: - err # Optional list of variable declarations that should be ignored completely. (defaults to empty list) - # Entries must be in the form of " " or " *" for - # variables, or "const " for constants. + # Entries must be in one of the following forms (see below for examples): + # - for variables, parameters, or named return values: + # - + # - * + # - for type parameters: + # - + # - for constants: + # - const ignore-decls: - c echo.Context - t testing.T @@ -64,6 +73,7 @@ linters-settings: - e error - i int - const C + - T any ``` @@ -97,6 +107,8 @@ Flags: check method receiver names -checkReturn check named return values + -checkTypeParam + check type parameter names -cpuprofile string write CPU profile to this file -debug string diff --git a/vendor/github.com/blizzy78/varnamelen/doc.go b/vendor/github.com/blizzy78/varnamelen/doc.go new file mode 100644 index 000000000..d63c71cfb --- /dev/null +++ b/vendor/github.com/blizzy78/varnamelen/doc.go @@ -0,0 +1,3 @@ +// Package varnamelen implements an analyzer checking that the length of a variable's name +// matches its usage scope. +package varnamelen diff --git a/vendor/github.com/blizzy78/varnamelen/flags.go b/vendor/github.com/blizzy78/varnamelen/flags.go index a4aeb1818..ee80774f9 100644 --- a/vendor/github.com/blizzy78/varnamelen/flags.go +++ b/vendor/github.com/blizzy78/varnamelen/flags.go @@ -96,3 +96,14 @@ func (dv *declarationsValue) matchParameter(param parameter) bool { return false } + +// matchParameter returns true if param matches any of the declarations in dv. +func (dv *declarationsValue) matchTypeParameter(param typeParam) bool { + for _, decl := range dv.Values { + if param.match(decl) { + return true + } + } + + return false +} diff --git a/vendor/github.com/blizzy78/varnamelen/typeparam.go b/vendor/github.com/blizzy78/varnamelen/typeparam.go new file mode 100644 index 000000000..a1f3de99a --- /dev/null +++ b/vendor/github.com/blizzy78/varnamelen/typeparam.go @@ -0,0 +1,35 @@ +//go:build go1.18 +// +build go1.18 + +package varnamelen + +import "go/ast" + +// isTypeParam returns true if field is a type parameter of any of the given funcs. +func isTypeParam(field *ast.Field, funcs []*ast.FuncDecl, funcLits []*ast.FuncLit) bool { //nolint:gocognit // it's not that complicated + for _, f := range funcs { + if f.Type.TypeParams == nil { + continue + } + + for _, p := range f.Type.TypeParams.List { + if p == field { + return true + } + } + } + + for _, f := range funcLits { + if f.Type.TypeParams == nil { + continue + } + + for _, p := range f.Type.TypeParams.List { + if p == field { + return true + } + } + } + + return false +} diff --git a/vendor/github.com/blizzy78/varnamelen/typeparam_go1.16.go b/vendor/github.com/blizzy78/varnamelen/typeparam_go1.16.go new file mode 100644 index 000000000..7856651b9 --- /dev/null +++ b/vendor/github.com/blizzy78/varnamelen/typeparam_go1.16.go @@ -0,0 +1,11 @@ +//go:build (go1.16 && !go1.18) || (go1.17 && !go1.18) +// +build go1.16,!go1.18 go1.17,!go1.18 + +package varnamelen + +import "go/ast" + +// isTypeParam returns true if field is a type parameter of any of the given funcs. +func isTypeParam(_ *ast.Field, _ []*ast.FuncDecl, _ []*ast.FuncLit) bool { + return false +} diff --git a/vendor/github.com/blizzy78/varnamelen/varnamelen.go b/vendor/github.com/blizzy78/varnamelen/varnamelen.go index 8fdf5b79c..a5b960311 100644 --- a/vendor/github.com/blizzy78/varnamelen/varnamelen.go +++ b/vendor/github.com/blizzy78/varnamelen/varnamelen.go @@ -4,6 +4,7 @@ import ( "go/ast" "go/token" "go/types" + "sort" "strings" "golang.org/x/tools/go/analysis" @@ -24,7 +25,7 @@ type varNameLen struct { // ignoreNames is an optional list of variable names that should be ignored completely. ignoreNames stringsValue - // checkReceiver determines whether a method receiver's name should be checked. + // checkReceiver determines whether method receivers should be checked. checkReceiver bool // checkReturn determines whether named return values should be checked. @@ -41,6 +42,9 @@ type varNameLen struct { // ignoreDeclarations is an optional list of variable declarations that should be ignored completely. ignoreDeclarations declarationsValue + + // checkTypeParameters determines whether type parameters should be checked. + checkTypeParameters bool } // variable represents a declared variable. @@ -73,6 +77,18 @@ type parameter struct { field *ast.Field } +// typeParam represents a declared type parameter. +type typeParam struct { + // name is the name of the type parameter. + name string + + // typ is the type of the type parameter. + typ string + + // field is the field that declares the type parameter. + field *ast.Field +} + // declaration is a variable declaration. type declaration struct { // name is the name of the variable. @@ -120,7 +136,7 @@ var conventionalDecls = []declaration{ parseDeclaration("tb testing.TB"), } -// NewAnalyzer returns a new analyzer that checks variable name length. +// NewAnalyzer returns a new analyzer. func NewAnalyzer() *analysis.Analyzer { vnl := varNameLen{ maxDistance: defaultMaxDistance, @@ -137,7 +153,7 @@ func NewAnalyzer() *analysis.Analyzer { "to comprehend.", Run: func(pass *analysis.Pass) (interface{}, error) { - vnl.run(pass) + (&vnl).run(pass) return nil, nil }, @@ -149,27 +165,29 @@ func NewAnalyzer() *analysis.Analyzer { analyzer.Flags.IntVar(&vnl.maxDistance, "maxDistance", defaultMaxDistance, "maximum number of lines of variable usage scope considered 'short'") analyzer.Flags.IntVar(&vnl.minNameLength, "minNameLength", defaultMinNameLength, "minimum length of variable name considered 'long'") analyzer.Flags.Var(&vnl.ignoreNames, "ignoreNames", "comma-separated list of ignored variable names") - analyzer.Flags.BoolVar(&vnl.checkReceiver, "checkReceiver", false, "check method receiver names") + analyzer.Flags.BoolVar(&vnl.checkReceiver, "checkReceiver", false, "check method receivers") analyzer.Flags.BoolVar(&vnl.checkReturn, "checkReturn", false, "check named return values") analyzer.Flags.BoolVar(&vnl.ignoreTypeAssertOk, "ignoreTypeAssertOk", false, "ignore 'ok' variables that hold the bool return value of a type assertion") analyzer.Flags.BoolVar(&vnl.ignoreMapIndexOk, "ignoreMapIndexOk", false, "ignore 'ok' variables that hold the bool return value of a map index") analyzer.Flags.BoolVar(&vnl.ignoreChannelReceiveOk, "ignoreChanRecvOk", false, "ignore 'ok' variables that hold the bool return value of a channel receive") analyzer.Flags.Var(&vnl.ignoreDeclarations, "ignoreDecls", "comma-separated list of ignored variable declarations") + analyzer.Flags.BoolVar(&vnl.checkTypeParameters, "checkTypeParam", false, "check type parameters") return &analyzer } // Run applies v to a package, according to pass. func (v *varNameLen) run(pass *analysis.Pass) { - varToDist, paramToDist, returnToDist := v.distances(pass) + varToDist, paramToDist, returnToDist, typeParamToDist := v.distances(pass) v.checkVariables(pass, varToDist) v.checkParams(pass, paramToDist) v.checkReturns(pass, returnToDist) + v.checkTypeParams(pass, typeParamToDist) } // checkVariables applies v to variables in varToDist. -func (v *varNameLen) checkVariables(pass *analysis.Pass, varToDist map[variable]int) { +func (v *varNameLen) checkVariables(pass *analysis.Pass, varToDist map[variable]int) { //nolint:gocognit // it's not that complicated for variable, dist := range varToDist { if v.ignoreNames.contains(variable.name) { continue @@ -195,6 +213,10 @@ func (v *varNameLen) checkVariables(pass *analysis.Pass, varToDist map[variable] continue } + if variable.isConventional() { + continue + } + if variable.assign != nil { pass.Reportf(variable.assign.Pos(), "%s name '%s' is too short for the scope of its usage", variable.kindName(), variable.name) continue @@ -246,6 +268,25 @@ func (v *varNameLen) checkReturns(pass *analysis.Pass, returnToDist map[paramete } } +// checkTypeParams applies v to type parameters in paramToDist. +func (v *varNameLen) checkTypeParams(pass *analysis.Pass, paramToDist map[typeParam]int) { + for param, dist := range paramToDist { + if v.ignoreNames.contains(param.name) { + continue + } + + if v.ignoreDeclarations.matchTypeParameter(param) { + continue + } + + if v.checkNameAndDistance(param.name, dist) { + continue + } + + pass.Reportf(param.field.Pos(), "type parameter name '%s' is too short for the scope of its usage", param.name) + } +} + // checkNameAndDistance returns true if name or dist are considered "short". func (v *varNameLen) checkNameAndDistance(name string, dist int) bool { if len(name) >= v.minNameLength { @@ -277,18 +318,25 @@ func (v *varNameLen) checkChannelReceiveOk(vari variable) bool { return v.ignoreChannelReceiveOk && vari.isChannelReceiveOk() } -// distances returns maps of variables, parameters, and return values mapping to their longest usage distances. -func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[parameter]int, map[parameter]int) { - assignIdents, valueSpecIdents, paramIdents, returnIdents, imports := v.identsAndImports(pass) +// distances returns maps of variables, parameters, return values, and type parameters mapping to their longest usage distances. +func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[parameter]int, map[parameter]int, map[typeParam]int) { + assignIdents, valueSpecIdents, paramIdents, returnIdents, typeParamIdents, imports, switches := v.identsAndImports(pass) varToDist := map[variable]int{} for _, ident := range assignIdents { assign := ident.Obj.Decl.(*ast.AssignStmt) //nolint:forcetypeassert // check is done in identsAndImports + var typ string + if isTypeSwitchAssign(assign, switches) { + typ = "" + } else { + typ = shortTypeName(pass.TypesInfo.TypeOf(ident), imports) + } + variable := variable{ name: ident.Name, - typ: shortTypeName(pass.TypesInfo.TypeOf(identAssignExpr(ident, assign)), imports), + typ: typ, assign: assign, } @@ -303,7 +351,7 @@ func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[param variable := variable{ name: ident.Name, constant: ident.Obj.Kind == ast.Con, - typ: shortTypeName(pass.TypesInfo.TypeOf(valueSpec.Type), imports), + typ: shortTypeName(pass.TypesInfo.TypeOf(ident), imports), valueSpec: valueSpec, } @@ -335,7 +383,7 @@ func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[param param := parameter{ name: ident.Name, - typ: shortTypeName(pass.TypesInfo.TypeOf(field.Type), imports), + typ: shortTypeName(pass.TypesInfo.TypeOf(ident), imports), field: field, } @@ -344,28 +392,52 @@ func (v *varNameLen) distances(pass *analysis.Pass) (map[variable]int, map[param returnToDist[param] = useLine - declLine } - return varToDist, paramToDist, returnToDist + typeParamToDist := map[typeParam]int{} + + for _, ident := range typeParamIdents { + field := ident.Obj.Decl.(*ast.Field) //nolint:forcetypeassert // check is done in identsAndImports + + param := typeParam{ + name: ident.Name, + typ: shortTypeName(pass.TypesInfo.TypeOf(field.Type), imports), + field: field, + } + + useLine := pass.Fset.Position(ident.NamePos).Line + declLine := pass.Fset.Position(field.Pos()).Line + typeParamToDist[param] = useLine - declLine + } + + return varToDist, paramToDist, returnToDist, typeParamToDist } -// identsAndImports returns Idents referencing assign statements, value specifications, parameters, and return values, respectively, -// as well as import declarations. -func (v *varNameLen) identsAndImports(pass *analysis.Pass) ([]*ast.Ident, []*ast.Ident, []*ast.Ident, []*ast.Ident, []importDeclaration) { //nolint:gocognit,cyclop // this is complex stuff +// identsAndImports returns Idents referencing assign statements, value specifications, parameters, +// return values, and type parameters, respectively, as well as import declarations, and type switch statements. +func (v *varNameLen) identsAndImports(pass *analysis.Pass) ([]*ast.Ident, []*ast.Ident, []*ast.Ident, []*ast.Ident, //nolint:gocognit,cyclop // this is complex stuff + []*ast.Ident, []importDeclaration, []*ast.TypeSwitchStmt) { inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) //nolint:forcetypeassert // inspect.Analyzer always returns *inspector.Inspector filter := []ast.Node{ (*ast.ImportSpec)(nil), (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + (*ast.CompositeLit)(nil), + (*ast.TypeSwitchStmt)(nil), (*ast.Ident)(nil), } - funcs := []*ast.FuncDecl{} - methods := []*ast.FuncDecl{} - - imports := []importDeclaration{} assignIdents := []*ast.Ident{} valueSpecIdents := []*ast.Ident{} paramIdents := []*ast.Ident{} returnIdents := []*ast.Ident{} + typeParamIdents := []*ast.Ident{} + imports := []importDeclaration{} + switches := []*ast.TypeSwitchStmt{} + + funcs := []*ast.FuncDecl{} + methods := []*ast.FuncDecl{} + funcLits := []*ast.FuncLit{} + compositeLits := []*ast.CompositeLit{} inspector.Preorder(filter, func(node ast.Node) { switch node2 := node.(type) { @@ -386,11 +458,24 @@ func (v *varNameLen) identsAndImports(pass *analysis.Pass) ([]*ast.Ident, []*ast methods = append(methods, node2) + case *ast.FuncLit: + funcLits = append(funcLits, node2) + + case *ast.CompositeLit: + compositeLits = append(compositeLits, node2) + + case *ast.TypeSwitchStmt: + switches = append(switches, node2) + case *ast.Ident: if node2.Obj == nil { return } + if isCompositeLitKey(node2, compositeLits) { + return + } + switch objDecl := node2.Obj.Decl.(type) { case *ast.AssignStmt: assignIdents = append(assignIdents, node2) @@ -399,21 +484,31 @@ func (v *varNameLen) identsAndImports(pass *analysis.Pass) ([]*ast.Ident, []*ast valueSpecIdents = append(valueSpecIdents, node2) case *ast.Field: - if isReceiver(objDecl, methods) && !v.checkReceiver { - return - } + switch { + case isReceiver(objDecl, methods): + if !v.checkReceiver { + return + } + + paramIdents = append(paramIdents, node2) - if isReturn(objDecl, funcs) { + case isReturn(objDecl, funcs, funcLits): if !v.checkReturn { return } returnIdents = append(returnIdents, node2) - return - } + case isTypeParam(objDecl, funcs, funcLits): + if !v.checkTypeParameters { + return + } + + typeParamIdents = append(typeParamIdents, node2) - paramIdents = append(paramIdents, node2) + case isParam(objDecl, funcs, funcLits, methods): + paramIdents = append(paramIdents, node2) + } } } }) @@ -423,7 +518,12 @@ func (v *varNameLen) identsAndImports(pass *analysis.Pass) ([]*ast.Ident, []*ast self: true, }) - return assignIdents, valueSpecIdents, paramIdents, returnIdents, imports + sort.Slice(imports, func(a, b int) bool { + // reversed: longest path first + return len(imports[a].path) > len(imports[b].path) + }) + + return assignIdents, valueSpecIdents, paramIdents, returnIdents, typeParamIdents, imports, switches } func importSpecToDecl(spec *ast.ImportSpec, imports []*types.Package) (importDeclaration, bool) { @@ -555,6 +655,18 @@ func (v variable) isChannelReceiveOk() bool { return true } +// isConventional returns true if v matches a conventional Go variable/parameter name and type, +// such as "ctx context.Context" or "t *testing.T". +func (v variable) isConventional() bool { + for _, decl := range conventionalDecls { + if v.match(decl) { + return true + } + } + + return false +} + // match returns true if v matches decl. func (v variable) match(decl declaration) bool { if v.name != decl.name { @@ -599,7 +711,7 @@ func isReceiver(field *ast.Field, methods []*ast.FuncDecl) bool { } // isReturn returns true if field is a return value of any of the given funcs. -func isReturn(field *ast.Field, funcs []*ast.FuncDecl) bool { +func isReturn(field *ast.Field, funcs []*ast.FuncDecl, funcLits []*ast.FuncLit) bool { //nolint:gocognit // it's not that complicated for _, f := range funcs { if f.Type.Results == nil { continue @@ -612,11 +724,97 @@ func isReturn(field *ast.Field, funcs []*ast.FuncDecl) bool { } } + for _, f := range funcLits { + if f.Type.Results == nil { + continue + } + + for _, r := range f.Type.Results.List { + if r == field { + return true + } + } + } + + return false +} + +// isParam returns true if field is a parameter of any of the given funcs. +func isParam(field *ast.Field, funcs []*ast.FuncDecl, funcLits []*ast.FuncLit, methods []*ast.FuncDecl) bool { //nolint:gocognit,cyclop // it's not that complicated + for _, f := range funcs { + if f.Type.Params == nil { + continue + } + + for _, p := range f.Type.Params.List { + if p == field { + return true + } + } + } + + for _, f := range funcLits { + if f.Type.Params == nil { + continue + } + + for _, p := range f.Type.Params.List { + if p == field { + return true + } + } + } + + for _, m := range methods { + if m.Type.Params == nil { + continue + } + + for _, p := range m.Type.Params.List { + if p == field { + return true + } + } + } + return false } -// isConventional returns true if p is a conventional Go parameter, such as "ctx context.Context" or -// "t *testing.T". +// isCompositeLitKey returns true if ident is a key of any of the given composite literals. +func isCompositeLitKey(ident *ast.Ident, compositeLits []*ast.CompositeLit) bool { + for _, cl := range compositeLits { + if _, ok := cl.Type.(*ast.MapType); ok { + continue + } + + for _, kvExpr := range cl.Elts { + kv, ok := kvExpr.(*ast.KeyValueExpr) + if !ok { + continue + } + + if kv.Key == ident { + return true + } + } + } + + return false +} + +// isTypeSwitchAssign returns true if assign is an assign statement of any of the given type switch statements. +func isTypeSwitchAssign(assign *ast.AssignStmt, switches []*ast.TypeSwitchStmt) bool { + for _, s := range switches { + if s.Assign == assign { + return true + } + } + + return false +} + +// isConventional returns true if v matches a conventional Go variable/parameter name and type, +// such as "ctx context.Context" or "t *testing.T". func (p parameter) isConventional() bool { for _, decl := range conventionalDecls { if p.match(decl) { @@ -636,6 +834,15 @@ func (p parameter) match(decl declaration) bool { return decl.matchType(p.typ) } +// match returns whether p matches decl. +func (p typeParam) match(decl declaration) bool { + if p.name != decl.name { + return false + } + + return decl.matchType(p.typ) +} + // parseDeclaration parses and returns a variable declaration parsed from decl. func parseDeclaration(decl string) declaration { if strings.HasPrefix(decl, "const ") { @@ -658,17 +865,6 @@ func (d declaration) matchType(typ string) bool { return d.typ == typ } -// identAssignExpr returns the expression that is assigned to ident. -// -// TODO: This currently only works for simple one-to-one assignments without the use of multi-values. -func identAssignExpr(_ *ast.Ident, assign *ast.AssignStmt) ast.Expr { - if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { - return nil - } - - return assign.Rhs[0] -} - // shortTypeName returns the short name of typ, with respect to imports. // For example, if package github.com/matryer/is is imported with alias "x", // and typ represents []*github.com/matryer/is.I, shortTypeName will return "[]*x.I". @@ -683,12 +879,12 @@ func shortTypeName(typ types.Type, imports []importDeclaration) string { for _, imp := range imports { prefix := imp.path + "." - if imp.self { - typStr = strings.ReplaceAll(typStr, prefix, "") - continue + replace := "" + if !imp.self { + replace = imp.name + "." } - typStr = strings.ReplaceAll(typStr, prefix, imp.name+".") + typStr = strings.ReplaceAll(typStr, prefix, replace) } return typStr diff --git a/vendor/github.com/breml/errchkjson/README.md b/vendor/github.com/breml/errchkjson/README.md index fe2461c1d..197959738 100644 --- a/vendor/github.com/breml/errchkjson/README.md +++ b/vendor/github.com/breml/errchkjson/README.md @@ -2,7 +2,7 @@ [![Test Status](https://github.com/breml/errchkjson/actions/workflows/ci.yml/badge.svg)](https://github.com/breml/errchkjson/actions/workflows/ci.yml) [![Go Report Card](https://goreportcard.com/badge/github.com/breml/errchkjson)](https://goreportcard.com/report/github.com/breml/errchkjson) [![License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE) -Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omited. +Checks types passed to the json encoding functions. Reports unsupported types and reports occurrences where the check for the returned error can be omitted. Consider this [http.Handler](https://pkg.go.dev/net/http#Handler): @@ -114,6 +114,12 @@ Forbidden basic types: Any composed types (struct, map, slice, array) containing a forbidden basic type. Any map using a key with a forbidden type (`bool`, `float32`, `float64`, `struct`). +## Accepted edge case + +For `encoding/json.MarshalIndent`, there is a (pathological) edge case, where this +function could [return an error](https://cs.opensource.google/go/go/+/refs/tags/go1.18:src/encoding/json/scanner.go;drc=refs%2Ftags%2Fgo1.18;l=181) for an otherwise safe argument, if the argument has +a nesting depth larger than [`10000`](https://cs.opensource.google/go/go/+/refs/tags/go1.18:src/encoding/json/scanner.go;drc=refs%2Ftags%2Fgo1.18;l=144) (as of Go 1.18). + ## Bugs found during development During the development of `errcheckjson`, the following issues in package `encoding/json` of the Go standard library have been found and PR have been merged: diff --git a/vendor/github.com/breml/errchkjson/errchkjson.go b/vendor/github.com/breml/errchkjson/errchkjson.go index 5a5b6d1a5..746709c76 100644 --- a/vendor/github.com/breml/errchkjson/errchkjson.go +++ b/vendor/github.com/breml/errchkjson/errchkjson.go @@ -59,11 +59,11 @@ func (e *errchkjson) run(pass *analysis.Pass) (interface{}, error) { switch fn.FullName() { case "encoding/json.Marshal", "encoding/json.MarshalIndent": - e.handleJSONMarshal(pass, ce, fn.FullName(), true) + e.handleJSONMarshal(pass, ce, fn.FullName(), blankIdentifier, e.omitSafe) case "(*encoding/json.Encoder).Encode": - e.handleJSONMarshal(pass, ce, fn.FullName(), true) + e.handleJSONMarshal(pass, ce, fn.FullName(), blankIdentifier, true) default: - return true + e.inspectArgs(pass, ce.Args) } return false } @@ -85,9 +85,9 @@ func (e *errchkjson) run(pass *analysis.Pass) (interface{}, error) { switch fn.FullName() { case "encoding/json.Marshal", "encoding/json.MarshalIndent": - e.handleJSONMarshal(pass, ce, fn.FullName(), blankIdentifier(as.Lhs[1])) + e.handleJSONMarshal(pass, ce, fn.FullName(), evaluateMarshalErrorTarget(as.Lhs[1]), e.omitSafe) case "(*encoding/json.Encoder).Encode": - e.handleJSONMarshal(pass, ce, fn.FullName(), blankIdentifier(as.Lhs[0])) + e.handleJSONMarshal(pass, ce, fn.FullName(), evaluateMarshalErrorTarget(as.Lhs[0]), true) default: return true } @@ -98,20 +98,28 @@ func (e *errchkjson) run(pass *analysis.Pass) (interface{}, error) { return nil, nil } -func blankIdentifier(n ast.Expr) bool { +func evaluateMarshalErrorTarget(n ast.Expr) marshalErrorTarget { if errIdent, ok := n.(*ast.Ident); ok { if errIdent.Name == "_" { - return true + return blankIdentifier } } - return false + return variableAssignment } -func (e *errchkjson) handleJSONMarshal(pass *analysis.Pass, ce *ast.CallExpr, fnName string, blankIdentifier bool) { +type marshalErrorTarget int + +const ( + blankIdentifier = iota // the returned error from the JSON marshal function is assigned to the blank identifier "_". + variableAssignment // the returned error from the JSON marshal function is assigned to a variable. + functionArgument // the returned error from the JSON marshal function is passed to an other function as argument. +) + +func (e *errchkjson) handleJSONMarshal(pass *analysis.Pass, ce *ast.CallExpr, fnName string, errorTarget marshalErrorTarget, omitSafe bool) { t := pass.TypesInfo.TypeOf(ce.Args[0]) if t == nil { // Not sure, if this is at all possible - if blankIdentifier { + if errorTarget == blankIdentifier { pass.Reportf(ce.Pos(), "Type of argument to `%s` could not be evaluated and error return value is not checked", fnName) } return @@ -131,15 +139,15 @@ func (e *errchkjson) handleJSONMarshal(pass *analysis.Pass, ce *ast.CallExpr, fn pass.Reportf(ce.Pos(), "Error argument passed to `%s` does not contain any exported field", fnName) } // Only care about unsafe types if they are assigned to the blank identifier. - if blankIdentifier { + if errorTarget == blankIdentifier { pass.Reportf(ce.Pos(), "Error return value of `%s` is not checked: %v", fnName, err) } } - if err == nil && !blankIdentifier && !e.omitSafe { + if err == nil && errorTarget == variableAssignment && !omitSafe { pass.Reportf(ce.Pos(), "Error return value of `%s` is checked but passed argument is safe", fnName) } // Report an error, if err for json.Marshal is not checked and safe types are omitted - if err == nil && blankIdentifier && e.omitSafe { + if err == nil && errorTarget == blankIdentifier && omitSafe { pass.Reportf(ce.Pos(), "Error return value of `%s` is not checked", fnName) } } @@ -269,6 +277,36 @@ func jsonSafeMapKey(t types.Type) error { } } +func (e *errchkjson) inspectArgs(pass *analysis.Pass, args []ast.Expr) { + for _, a := range args { + ast.Inspect(a, func(n ast.Node) bool { + if n == nil { + return true + } + + ce, ok := n.(*ast.CallExpr) + if !ok { + return false + } + + fn, _ := typeutil.Callee(pass.TypesInfo, ce).(*types.Func) + if fn == nil { + return true + } + + switch fn.FullName() { + case "encoding/json.Marshal", "encoding/json.MarshalIndent": + e.handleJSONMarshal(pass, ce, fn.FullName(), functionArgument, e.omitSafe) + case "(*encoding/json.Encoder).Encode": + e.handleJSONMarshal(pass, ce, fn.FullName(), functionArgument, true) + default: + e.inspectArgs(pass, ce.Args) + } + return false + }) + } +} + // Construct *types.Interface for interface encoding.TextMarshaler // type TextMarshaler interface { // MarshalText() (text []byte, err error) diff --git a/vendor/github.com/firefart/nonamedreturns/LICENSE b/vendor/github.com/firefart/nonamedreturns/LICENSE new file mode 100644 index 000000000..f288702d2 --- /dev/null +++ b/vendor/github.com/firefart/nonamedreturns/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go b/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go new file mode 100644 index 000000000..465cce2d4 --- /dev/null +++ b/vendor/github.com/firefart/nonamedreturns/analyzer/analyzer.go @@ -0,0 +1,60 @@ +package analyzer + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var Analyzer = &analysis.Analyzer{ + Name: "nonamedreturns", + Doc: "Reports all named returns", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // only filter function defintions + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + } + + inspector.Preorder(nodeFilter, func(node ast.Node) { + var funcResults *ast.FieldList + + switch n := node.(type) { + case *ast.FuncLit: + funcResults = n.Type.Results + case *ast.FuncDecl: + funcResults = n.Type.Results + default: + return + } + + // no return values + if funcResults == nil { + return + } + + resultsList := funcResults.List + + for _, p := range resultsList { + if len(p.Names) == 0 { + // all good, the parameter is not named + continue + } + + for _, n := range p.Names { + pass.Reportf(node.Pos(), "named return %q with type %q found", n.Name, types.ExprString(p.Type)) + } + } + }) + + return nil, nil +} diff --git a/vendor/github.com/fzipp/gocyclo/CHANGELOG.md b/vendor/github.com/fzipp/gocyclo/CHANGELOG.md index efbc9febe..774a61122 100644 --- a/vendor/github.com/fzipp/gocyclo/CHANGELOG.md +++ b/vendor/github.com/fzipp/gocyclo/CHANGELOG.md @@ -4,6 +4,15 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.5.1] - 2022-04-06 +### Fixed +- Don't skip directories `.` and `..` + +## [0.5.0] - 2022-03-22 +### Changed +- Ignore `vendor` and `testdata` directories and directories with names + that begin with `.` or `_` + ## [0.4.0] - 2021-12-19 ### Added - Support method receivers with type parameters introduced in Go 1.18 diff --git a/vendor/github.com/fzipp/gocyclo/analyze.go b/vendor/github.com/fzipp/gocyclo/analyze.go index 1f3479ab5..2d8bcff25 100644 --- a/vendor/github.com/fzipp/gocyclo/analyze.go +++ b/vendor/github.com/fzipp/gocyclo/analyze.go @@ -41,6 +41,9 @@ func Analyze(paths []string, ignore *regexp.Regexp) Stats { func analyzeDir(dirname string, ignore *regexp.Regexp, stats Stats) Stats { filepath.WalkDir(dirname, func(path string, entry fs.DirEntry, err error) error { + if isSkipDir(entry) { + return filepath.SkipDir + } if err == nil && isGoFile(entry) { stats = analyzeFile(path, ignore, stats) } @@ -49,6 +52,17 @@ func analyzeDir(dirname string, ignore *regexp.Regexp, stats Stats) Stats { return stats } +var skipDirs = map[string]bool{ + "testdata": true, + "vendor": true, +} + +func isSkipDir(entry fs.DirEntry) bool { + return entry.IsDir() && (skipDirs[entry.Name()] || + (strings.HasPrefix(entry.Name(), ".") && entry.Name() != "." && entry.Name() != "..") || + strings.HasPrefix(entry.Name(), "_")) +} + func isGoFile(entry fs.DirEntry) bool { return !entry.IsDir() && strings.HasSuffix(entry.Name(), ".go") } diff --git a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go index 8e931cc4b..b4000a8ce 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go @@ -1,7 +1,6 @@ package checkers import ( - "fmt" "go/ast" "go/token" "strconv" @@ -295,7 +294,7 @@ func (c *boolExprSimplifyChecker) foldRanges(cur *astutil.Cursor) bool { if match(&comb) { lhs.Op = token.EQL v := c1 + comb.resDelta - lhs.Y.(*ast.BasicLit).Value = fmt.Sprint(v) + lhs.Y.(*ast.BasicLit).Value = strconv.FormatInt(v, 10) cur.Replace(lhs) return true } @@ -317,7 +316,7 @@ func (c *boolExprSimplifyChecker) foldRanges(cur *astutil.Cursor) bool { if match(&comb) { lhs.Op = token.NEQ v := c1 + comb.resDelta - lhs.Y.(*ast.BasicLit).Value = fmt.Sprint(v) + lhs.Y.(*ast.BasicLit).Value = strconv.FormatInt(v, 10) cur.Replace(lhs) return true } diff --git a/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go b/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go index e37fdf14a..d9ede0bcc 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go +++ b/vendor/github.com/go-critic/go-critic/checkers/embedded_rules.go @@ -7,9 +7,10 @@ import ( "go/token" "os" + "github.com/quasilyte/go-ruleguard/ruleguard" + "github.com/go-critic/go-critic/checkers/rulesdata" "github.com/go-critic/go-critic/framework/linter" - "github.com/quasilyte/go-ruleguard/ruleguard" ) //go:generate go run ./rules/precompile.go -rules ./rules/rules.go -o ./rulesdata/rulesdata.go @@ -65,8 +66,8 @@ func init() { collection.AddChecker(info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { parseContext := &ruleguard.LoadContext{ Fset: fset, - GroupFilter: func(name string) bool { - return name == g.Name + GroupFilter: func(gr *ruleguard.GoRuleGroup) bool { + return gr.Name == g.Name }, DebugImports: ruleguardDebug, DebugPrint: func(s string) { @@ -95,9 +96,10 @@ type embeddedRuleguardChecker struct { func (c *embeddedRuleguardChecker) WalkFile(f *ast.File) { runRuleguardEngine(c.ctx, f, c.engine, &ruleguard.RunContext{ - Pkg: c.ctx.Pkg, - Types: c.ctx.TypesInfo, - Sizes: c.ctx.SizesInfo, - Fset: c.ctx.FileSet, + Pkg: c.ctx.Pkg, + Types: c.ctx.TypesInfo, + Sizes: c.ctx.SizesInfo, + Fset: c.ctx.FileSet, + TruncateLen: 100, }) } diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go index b7dd15948..5b15e05ed 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go @@ -8,9 +8,10 @@ import ( "strings" "unicode/utf8" + "github.com/quasilyte/regex/syntax" + "github.com/go-critic/go-critic/checkers/internal/astwalk" "github.com/go-critic/go-critic/framework/linter" - "github.com/quasilyte/regex/syntax" ) func init() { @@ -497,9 +498,9 @@ func (c *regexpSimplifyChecker) simplifyCharRange(rng syntax.Expr) string { case 0: return lo case 1: - return fmt.Sprintf("%s%s", lo, hi) + return lo + hi case 2: - return fmt.Sprintf("%s%s%s", lo, string(lo[0]+1), hi) + return lo + string(lo[0]+1) + hi } } diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go index 41d8754ae..35c6a6449 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go @@ -135,38 +135,77 @@ func newRuleguardChecker(info *linter.CheckerInfo, ctx *linter.CheckerContext) ( enabledGroups := make(map[string]bool) disabledGroups := make(map[string]bool) + enabledTags := make(map[string]bool) + disabledTags := make(map[string]bool) for _, g := range strings.Split(info.Params.String("disable"), ",") { g = strings.TrimSpace(g) + if strings.HasPrefix(g, "#") { + disabledTags[strings.TrimPrefix(g, "#")] = true + continue + } + disabledGroups[g] = true } flagEnable := info.Params.String("enable") if flagEnable != "" { for _, g := range strings.Split(flagEnable, ",") { g = strings.TrimSpace(g) + if strings.HasPrefix(g, "#") { + enabledTags[strings.TrimPrefix(g, "#")] = true + continue + } + enabledGroups[g] = true } } + + if !enabledTags["experimental"] { + disabledTags["experimental"] = true + } ruleguardDebug := os.Getenv("GOCRITIC_RULEGUARD_DEBUG") != "" + inEnabledTags := func(g *ruleguard.GoRuleGroup) bool { + for _, t := range g.DocTags { + if enabledTags[t] { + return true + } + } + return false + } + inDisabledTags := func(g *ruleguard.GoRuleGroup) string { + for _, t := range g.DocTags { + if disabledTags[t] { + return t + } + } + return "" + } + loadContext := &ruleguard.LoadContext{ Fset: fset, DebugImports: ruleguardDebug, DebugPrint: debugPrint, - GroupFilter: func(g string) bool { + GroupFilter: func(g *ruleguard.GoRuleGroup) bool { + enabled := flagEnable == "" || enabledGroups[g.Name] || inEnabledTags(g) whyDisabled := "" - enabled := flagEnable == "" || enabledGroups[g] + switch { case !enabled: - whyDisabled = "not enabled by -enabled flag" - case disabledGroups[g]: - whyDisabled = "disabled by -disable flag" + whyDisabled = "not enabled by name or tag (-enable flag)" + case disabledGroups[g.Name]: + whyDisabled = "disabled by name (-disable flag)" + default: + if tag := inDisabledTags(g); tag != "" { + whyDisabled = fmt.Sprintf("disabled by %s tag (-disable flag)", tag) + } } + if ruleguardDebug { if whyDisabled != "" { - debugPrint(fmt.Sprintf("(-) %s is %s", g, whyDisabled)) + debugPrint(fmt.Sprintf("(-) %s is %s", g.Name, whyDisabled)) } else { - debugPrint(fmt.Sprintf("(+) %s is enabled", g)) + debugPrint(fmt.Sprintf("(+) %s is enabled", g.Name)) } } return whyDisabled == "" @@ -225,10 +264,11 @@ func (c *ruleguardChecker) WalkFile(f *ast.File) { DebugPrint: func(s string) { fmt.Fprintln(os.Stderr, s) }, - Pkg: c.ctx.Pkg, - Types: c.ctx.TypesInfo, - Sizes: c.ctx.SizesInfo, - Fset: c.ctx.FileSet, + Pkg: c.ctx.Pkg, + Types: c.ctx.TypesInfo, + Sizes: c.ctx.SizesInfo, + Fset: c.ctx.FileSet, + TruncateLen: 100, }) } diff --git a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go index 8a952b7c2..e145ba4fe 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go +++ b/vendor/github.com/go-critic/go-critic/checkers/rulesdata/rulesdata.go @@ -9,24 +9,21 @@ var PrecompiledRules = &ir.File{ CustomDecls: []string{}, BundleImports: []ir.BundleImport{}, RuleGroups: []ir.RuleGroup{ - ir.RuleGroup{ + { Line: 11, Name: "redundantSprint", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects redundant fmt.Sprint calls", - DocBefore: "fmt.Sprint(x)", - DocAfter: "x.String()", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects redundant fmt.Sprint calls", + DocBefore: "fmt.Sprint(x)", + DocAfter: "x.String()", Rules: []ir.Rule{ - ir.Rule{ + { Line: 12, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 12, Value: "fmt.Sprint($x)"}, - ir.PatternString{Line: 12, Value: "fmt.Sprintf(\"%s\", $x)"}, - ir.PatternString{Line: 12, Value: "fmt.Sprintf(\"%v\", $x)"}, + {Line: 12, Value: "fmt.Sprint($x)"}, + {Line: 12, Value: "fmt.Sprintf(\"%s\", $x)"}, + {Line: 12, Value: "fmt.Sprintf(\"%v\", $x)"}, }, ReportTemplate: "use $x.String() instead", SuggestTemplate: "$x.String()", @@ -35,17 +32,15 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeImplementsOp, Src: "m[\"x\"].Type.Implements(`fmt.Stringer`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 13, Op: ir.FilterStringOp, Src: "`fmt.Stringer`", Value: "fmt.Stringer"}, - }, + Args: []ir.FilterExpr{{Line: 13, Op: ir.FilterStringOp, Src: "`fmt.Stringer`", Value: "fmt.Stringer"}}, }, }, - ir.Rule{ + { Line: 17, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 17, Value: "fmt.Sprint($x)"}, - ir.PatternString{Line: 17, Value: "fmt.Sprintf(\"%s\", $x)"}, - ir.PatternString{Line: 17, Value: "fmt.Sprintf(\"%v\", $x)"}, + {Line: 17, Value: "fmt.Sprint($x)"}, + {Line: 17, Value: "fmt.Sprintf(\"%s\", $x)"}, + {Line: 17, Value: "fmt.Sprintf(\"%v\", $x)"}, }, ReportTemplate: "use $x.Error() instead", SuggestTemplate: "$x.Error()", @@ -54,17 +49,15 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeImplementsOp, Src: "m[\"x\"].Type.Implements(`error`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 18, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}, - }, + Args: []ir.FilterExpr{{Line: 18, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}}, }, }, - ir.Rule{ + { Line: 22, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 22, Value: "fmt.Sprint($x)"}, - ir.PatternString{Line: 22, Value: "fmt.Sprintf(\"%s\", $x)"}, - ir.PatternString{Line: 22, Value: "fmt.Sprintf(\"%v\", $x)"}, + {Line: 22, Value: "fmt.Sprint($x)"}, + {Line: 22, Value: "fmt.Sprintf(\"%s\", $x)"}, + {Line: 22, Value: "fmt.Sprintf(\"%v\", $x)"}, }, ReportTemplate: "$x is already string", SuggestTemplate: "$x", @@ -73,78 +66,69 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`string`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 23, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, + Args: []ir.FilterExpr{{Line: 23, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, - ir.RuleGroup{ + { Line: 32, Name: "deferUnlambda", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects deferred function literals that can be simplified", - DocBefore: "defer func() { f() }()", - DocAfter: "defer f()", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects deferred function literals that can be simplified", + DocBefore: "defer func() { f() }()", + DocAfter: "defer f()", Rules: []ir.Rule{ - ir.Rule{ - Line: 33, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 33, Value: "defer func() { $f($*args) }()"}, - }, + { + Line: 33, + SyntaxPatterns: []ir.PatternString{{Line: 33, Value: "defer func() { $f($*args) }()"}}, ReportTemplate: "can rewrite as `defer $f($args)`", WhereExpr: ir.FilterExpr{ Line: 34, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\" && m[\"f\"].Text != \"recover\" && m[\"args\"].Const", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 34, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\" && m[\"f\"].Text != \"recover\"", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 34, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"f\"].Text != \"panic\"", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 34, Op: ir.FilterVarNodeIsOp, Src: "m[\"f\"].Node.Is(`Ident`)", Value: "f", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 34, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}, - }, + Args: []ir.FilterExpr{{Line: 34, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, }, - ir.FilterExpr{ + { Line: 34, Op: ir.FilterNeqOp, Src: "m[\"f\"].Text != \"panic\"", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 34, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, - ir.FilterExpr{Line: 34, Op: ir.FilterStringOp, Src: "\"panic\"", Value: "panic"}, + {Line: 34, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, + {Line: 34, Op: ir.FilterStringOp, Src: "\"panic\"", Value: "panic"}, }, }, }, }, - ir.FilterExpr{ + { Line: 34, Op: ir.FilterNeqOp, Src: "m[\"f\"].Text != \"recover\"", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 34, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, - ir.FilterExpr{Line: 34, Op: ir.FilterStringOp, Src: "\"recover\"", Value: "recover"}, + {Line: 34, Op: ir.FilterVarTextOp, Src: "m[\"f\"].Text", Value: "f"}, + {Line: 34, Op: ir.FilterStringOp, Src: "\"recover\"", Value: "recover"}, }, }, }, }, - ir.FilterExpr{ + { Line: 34, Op: ir.FilterVarConstOp, Src: "m[\"args\"].Const", @@ -153,32 +137,28 @@ var PrecompiledRules = &ir.File{ }, }, }, - ir.Rule{ - Line: 37, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 37, Value: "defer func() { $pkg.$f($*args) }()"}, - }, + { + Line: 37, + SyntaxPatterns: []ir.PatternString{{Line: 37, Value: "defer func() { $pkg.$f($*args) }()"}}, ReportTemplate: "can rewrite as `defer $pkg.$f($args)`", WhereExpr: ir.FilterExpr{ Line: 38, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"args\"].Const && m[\"pkg\"].Object.Is(`PkgName`)", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 38, Op: ir.FilterAndOp, Src: "m[\"f\"].Node.Is(`Ident`) && m[\"args\"].Const", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 38, Op: ir.FilterVarNodeIsOp, Src: "m[\"f\"].Node.Is(`Ident`)", Value: "f", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 38, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}, - }, + Args: []ir.FilterExpr{{Line: 38, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, }, - ir.FilterExpr{ + { Line: 38, Op: ir.FilterVarConstOp, Src: "m[\"args\"].Const", @@ -186,37 +166,30 @@ var PrecompiledRules = &ir.File{ }, }, }, - ir.FilterExpr{ + { Line: 38, Op: ir.FilterVarObjectIsOp, Src: "m[\"pkg\"].Object.Is(`PkgName`)", Value: "pkg", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 38, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}, - }, + Args: []ir.FilterExpr{{Line: 38, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}}, }, }, }, }, }, }, - ir.RuleGroup{ + { Line: 46, Name: "ioutilDeprecated", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects deprecated io/ioutil package usages", - DocBefore: "ioutil.ReadAll(r)", - DocAfter: "io.ReadAll(r)", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects deprecated io/ioutil package usages", + DocBefore: "ioutil.ReadAll(r)", + DocAfter: "io.ReadAll(r)", Rules: []ir.Rule{ - ir.Rule{ - Line: 47, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 47, Value: "ioutil.ReadAll($_)"}, - }, + { + Line: 47, + SyntaxPatterns: []ir.PatternString{{Line: 47, Value: "ioutil.ReadAll($_)"}}, ReportTemplate: "ioutil.ReadAll is deprecated, use io.ReadAll instead", WhereExpr: ir.FilterExpr{ Line: 48, @@ -225,11 +198,9 @@ var PrecompiledRules = &ir.File{ Value: "1.16", }, }, - ir.Rule{ - Line: 51, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 51, Value: "ioutil.ReadFile($_)"}, - }, + { + Line: 51, + SyntaxPatterns: []ir.PatternString{{Line: 51, Value: "ioutil.ReadFile($_)"}}, ReportTemplate: "ioutil.ReadFile is deprecated, use os.ReadFile instead", WhereExpr: ir.FilterExpr{ Line: 52, @@ -238,11 +209,9 @@ var PrecompiledRules = &ir.File{ Value: "1.16", }, }, - ir.Rule{ - Line: 55, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 55, Value: "ioutil.WriteFile($_, $_, $_)"}, - }, + { + Line: 55, + SyntaxPatterns: []ir.PatternString{{Line: 55, Value: "ioutil.WriteFile($_, $_, $_)"}}, ReportTemplate: "ioutil.WriteFile is deprecated, use os.WriteFile instead", WhereExpr: ir.FilterExpr{ Line: 56, @@ -251,11 +220,9 @@ var PrecompiledRules = &ir.File{ Value: "1.16", }, }, - ir.Rule{ - Line: 59, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 59, Value: "ioutil.ReadDir($_)"}, - }, + { + Line: 59, + SyntaxPatterns: []ir.PatternString{{Line: 59, Value: "ioutil.ReadDir($_)"}}, ReportTemplate: "ioutil.ReadDir is deprecated, use os.ReadDir instead", WhereExpr: ir.FilterExpr{ Line: 60, @@ -264,11 +231,9 @@ var PrecompiledRules = &ir.File{ Value: "1.16", }, }, - ir.Rule{ - Line: 63, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 63, Value: "ioutil.NopCloser($_)"}, - }, + { + Line: 63, + SyntaxPatterns: []ir.PatternString{{Line: 63, Value: "ioutil.NopCloser($_)"}}, ReportTemplate: "ioutil.NopCloser is deprecated, use io.NopCloser instead", WhereExpr: ir.FilterExpr{ Line: 64, @@ -277,11 +242,9 @@ var PrecompiledRules = &ir.File{ Value: "1.16", }, }, - ir.Rule{ - Line: 67, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 67, Value: "ioutil.Discard"}, - }, + { + Line: 67, + SyntaxPatterns: []ir.PatternString{{Line: 67, Value: "ioutil.Discard"}}, ReportTemplate: "ioutil.Discard is deprecated, use io.Discard instead", WhereExpr: ir.FilterExpr{ Line: 68, @@ -292,139 +255,119 @@ var PrecompiledRules = &ir.File{ }, }, }, - ir.RuleGroup{ + { Line: 76, Name: "badLock", MatcherName: "m", - DocTags: []string{ - "diagnostic", - "experimental", - }, - DocSummary: "Detects suspicious mutex lock/unlock operations", - DocBefore: "mu.Lock(); mu.Unlock()", - DocAfter: "mu.Lock(); defer mu.Unlock()", + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects suspicious mutex lock/unlock operations", + DocBefore: "mu.Lock(); mu.Unlock()", + DocAfter: "mu.Lock(); defer mu.Unlock()", Rules: []ir.Rule{ - ir.Rule{ - Line: 80, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 80, Value: "$mu1.Lock(); $mu2.Unlock()"}, - }, + { + Line: 80, + SyntaxPatterns: []ir.PatternString{{Line: 80, Value: "$mu1.Lock(); $mu2.Unlock()"}}, ReportTemplate: "defer is missing, mutex is unlocked immediately", WhereExpr: ir.FilterExpr{ Line: 81, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 81, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - ir.FilterExpr{Line: 81, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 81, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 81, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, - ir.Rule{ - Line: 85, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 85, Value: "$mu1.RLock(); $mu2.RUnlock()"}, - }, + { + Line: 85, + SyntaxPatterns: []ir.PatternString{{Line: 85, Value: "$mu1.RLock(); $mu2.RUnlock()"}}, ReportTemplate: "defer is missing, mutex is unlocked immediately", WhereExpr: ir.FilterExpr{ Line: 86, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 86, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - ir.FilterExpr{Line: 86, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 86, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 86, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, - ir.Rule{ - Line: 91, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 91, Value: "$mu1.Lock(); defer $mu2.RUnlock()"}, - }, + { + Line: 91, + SyntaxPatterns: []ir.PatternString{{Line: 91, Value: "$mu1.Lock(); defer $mu2.RUnlock()"}}, ReportTemplate: "suspicious unlock, maybe Unlock was intended?", WhereExpr: ir.FilterExpr{ Line: 92, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 92, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - ir.FilterExpr{Line: 92, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 92, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 92, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, - ir.Rule{ - Line: 96, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 96, Value: "$mu1.RLock(); defer $mu2.Unlock()"}, - }, + { + Line: 96, + SyntaxPatterns: []ir.PatternString{{Line: 96, Value: "$mu1.RLock(); defer $mu2.Unlock()"}}, ReportTemplate: "suspicious unlock, maybe RUnlock was intended?", WhereExpr: ir.FilterExpr{ Line: 97, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 97, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - ir.FilterExpr{Line: 97, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 97, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 97, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, - ir.Rule{ - Line: 102, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 102, Value: "$mu1.Lock(); defer $mu2.Lock()"}, - }, + { + Line: 102, + SyntaxPatterns: []ir.PatternString{{Line: 102, Value: "$mu1.Lock(); defer $mu2.Lock()"}}, ReportTemplate: "maybe defer $mu1.Unlock() was intended?", WhereExpr: ir.FilterExpr{ Line: 103, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 103, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - ir.FilterExpr{Line: 103, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 103, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 103, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, - ir.Rule{ - Line: 107, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 107, Value: "$mu1.RLock(); defer $mu2.RLock()"}, - }, + { + Line: 107, + SyntaxPatterns: []ir.PatternString{{Line: 107, Value: "$mu1.RLock(); defer $mu2.RLock()"}}, ReportTemplate: "maybe defer $mu1.RUnlock() was intended?", WhereExpr: ir.FilterExpr{ Line: 108, Op: ir.FilterEqOp, Src: "m[\"mu1\"].Text == m[\"mu2\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 108, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, - ir.FilterExpr{Line: 108, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, + {Line: 108, Op: ir.FilterVarTextOp, Src: "m[\"mu1\"].Text", Value: "mu1"}, + {Line: 108, Op: ir.FilterVarTextOp, Src: "m[\"mu2\"].Text", Value: "mu2"}, }, }, LocationVar: "mu2", }, }, }, - ir.RuleGroup{ + { Line: 117, Name: "httpNoBody", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects nil usages in http.NewRequest calls, suggesting http.NoBody as an alternative", - DocBefore: "http.NewRequest(\"GET\", url, nil)", - DocAfter: "http.NewRequest(\"GET\", url, http.NoBody)", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects nil usages in http.NewRequest calls, suggesting http.NoBody as an alternative", + DocBefore: "http.NewRequest(\"GET\", url, nil)", + DocAfter: "http.NewRequest(\"GET\", url, http.NoBody)", Rules: []ir.Rule{ - ir.Rule{ - Line: 118, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 118, Value: "http.NewRequest($method, $url, $nil)"}, - }, + { + Line: 118, + SyntaxPatterns: []ir.PatternString{{Line: 118, Value: "http.NewRequest($method, $url, $nil)"}}, ReportTemplate: "http.NoBody should be preferred to the nil request body", SuggestTemplate: "http.NewRequest($method, $url, http.NoBody)", WhereExpr: ir.FilterExpr{ @@ -432,17 +375,15 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterEqOp, Src: "m[\"nil\"].Text == \"nil\"", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 119, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, - ir.FilterExpr{Line: 119, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, + {Line: 119, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, + {Line: 119, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, }, }, LocationVar: "nil", }, - ir.Rule{ - Line: 124, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 124, Value: "http.NewRequestWithContext($ctx, $method, $url, $nil)"}, - }, + { + Line: 124, + SyntaxPatterns: []ir.PatternString{{Line: 124, Value: "http.NewRequestWithContext($ctx, $method, $url, $nil)"}}, ReportTemplate: "http.NoBody should be preferred to the nil request body", SuggestTemplate: "http.NewRequestWithContext($ctx, $method, $url, http.NoBody)", WhereExpr: ir.FilterExpr{ @@ -450,264 +391,200 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterEqOp, Src: "m[\"nil\"].Text == \"nil\"", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 125, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, - ir.FilterExpr{Line: 125, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, + {Line: 125, Op: ir.FilterVarTextOp, Src: "m[\"nil\"].Text", Value: "nil"}, + {Line: 125, Op: ir.FilterStringOp, Src: "\"nil\"", Value: "nil"}, }, }, LocationVar: "nil", }, }, }, - ir.RuleGroup{ + { Line: 136, Name: "preferDecodeRune", MatcherName: "m", - DocTags: []string{ - "performance", - "experimental", - }, - DocSummary: "Detects expressions like []rune(s)[0] that may cause unwanted rune slice allocation", - DocBefore: "r := []rune(s)[0]", - DocAfter: "r, _ := utf8.DecodeRuneInString(s)", - DocNote: "See Go issue for details: https://github.com/golang/go/issues/45260", - Rules: []ir.Rule{ - ir.Rule{ - Line: 137, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 137, Value: "[]rune($s)[0]"}, - }, - ReportTemplate: "consider replacing $$ with utf8.DecodeRuneInString($s)", - WhereExpr: ir.FilterExpr{ - Line: 138, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 138, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, - }, - }, - }, + DocTags: []string{"performance", "experimental"}, + DocSummary: "Detects expressions like []rune(s)[0] that may cause unwanted rune slice allocation", + DocBefore: "r := []rune(s)[0]", + DocAfter: "r, _ := utf8.DecodeRuneInString(s)", + DocNote: "See Go issue for details: https://github.com/golang/go/issues/45260", + Rules: []ir.Rule{{ + Line: 137, + SyntaxPatterns: []ir.PatternString{{Line: 137, Value: "[]rune($s)[0]"}}, + ReportTemplate: "consider replacing $$ with utf8.DecodeRuneInString($s)", + WhereExpr: ir.FilterExpr{ + Line: 138, + Op: ir.FilterVarTypeIsOp, + Src: "m[\"s\"].Type.Is(`string`)", + Value: "s", + Args: []ir.FilterExpr{{Line: 138, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + }, + }}, }, - ir.RuleGroup{ + { Line: 146, Name: "sloppyLen", MatcherName: "m", - DocTags: []string{ - "style", - }, - DocSummary: "Detects usage of `len` when result is obvious or doesn't make sense", - DocBefore: "len(arr) <= 0", - DocAfter: "len(arr) == 0", + DocTags: []string{"style"}, + DocSummary: "Detects usage of `len` when result is obvious or doesn't make sense", + DocBefore: "len(arr) <= 0", + DocAfter: "len(arr) == 0", Rules: []ir.Rule{ - ir.Rule{ - Line: 147, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 147, Value: "len($_) >= 0"}, - }, + { + Line: 147, + SyntaxPatterns: []ir.PatternString{{Line: 147, Value: "len($_) >= 0"}}, ReportTemplate: "$$ is always true", }, - ir.Rule{ - Line: 148, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 148, Value: "len($_) < 0"}, - }, + { + Line: 148, + SyntaxPatterns: []ir.PatternString{{Line: 148, Value: "len($_) < 0"}}, ReportTemplate: "$$ is always false", }, - ir.Rule{ - Line: 149, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 149, Value: "len($x) <= 0"}, - }, + { + Line: 149, + SyntaxPatterns: []ir.PatternString{{Line: 149, Value: "len($x) <= 0"}}, ReportTemplate: "$$ can be len($x) == 0", }, }, }, - ir.RuleGroup{ + { Line: 156, Name: "valSwap", MatcherName: "m", - DocTags: []string{ - "style", - }, - DocSummary: "Detects value swapping code that are not using parallel assignment", - DocBefore: "*tmp = *x; *x = *y; *y = *tmp", - DocAfter: "*x, *y = *y, *x", - Rules: []ir.Rule{ - ir.Rule{ - Line: 157, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 157, Value: "$tmp := $y; $y = $x; $x = $tmp"}, - }, - ReportTemplate: "can re-write as `$y, $x = $x, $y`", - }, - }, + DocTags: []string{"style"}, + DocSummary: "Detects value swapping code that are not using parallel assignment", + DocBefore: "*tmp = *x; *x = *y; *y = *tmp", + DocAfter: "*x, *y = *y, *x", + Rules: []ir.Rule{{ + Line: 157, + SyntaxPatterns: []ir.PatternString{{Line: 157, Value: "$tmp := $y; $y = $x; $x = $tmp"}}, + ReportTemplate: "can re-write as `$y, $x = $x, $y`", + }}, }, - ir.RuleGroup{ + { Line: 165, Name: "switchTrue", MatcherName: "m", - DocTags: []string{ - "style", - }, - DocSummary: "Detects switch-over-bool statements that use explicit `true` tag value", - DocBefore: "switch true {...}", - DocAfter: "switch {...}", + DocTags: []string{"style"}, + DocSummary: "Detects switch-over-bool statements that use explicit `true` tag value", + DocBefore: "switch true {...}", + DocAfter: "switch {...}", Rules: []ir.Rule{ - ir.Rule{ - Line: 166, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 166, Value: "switch true { $*_ }"}, - }, + { + Line: 166, + SyntaxPatterns: []ir.PatternString{{Line: 166, Value: "switch true { $*_ }"}}, ReportTemplate: "replace 'switch true {}' with 'switch {}'", }, - ir.Rule{ - Line: 168, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 168, Value: "switch $x; true { $*_ }"}, - }, + { + Line: 168, + SyntaxPatterns: []ir.PatternString{{Line: 168, Value: "switch $x; true { $*_ }"}}, ReportTemplate: "replace 'switch $x; true {}' with 'switch $x; {}'", }, }, }, - ir.RuleGroup{ + { Line: 176, Name: "flagDeref", MatcherName: "m", - DocTags: []string{ - "diagnostic", - }, - DocSummary: "Detects immediate dereferencing of `flag` package pointers", - DocBefore: "b := *flag.Bool(\"b\", false, \"b docs\")", - DocAfter: "var b bool; flag.BoolVar(&b, \"b\", false, \"b docs\")", + DocTags: []string{"diagnostic"}, + DocSummary: "Detects immediate dereferencing of `flag` package pointers", + DocBefore: "b := *flag.Bool(\"b\", false, \"b docs\")", + DocAfter: "var b bool; flag.BoolVar(&b, \"b\", false, \"b docs\")", Rules: []ir.Rule{ - ir.Rule{ - Line: 177, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 177, Value: "*flag.Bool($*_)"}, - }, + { + Line: 177, + SyntaxPatterns: []ir.PatternString{{Line: 177, Value: "*flag.Bool($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.BoolVar", }, - ir.Rule{ - Line: 178, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 178, Value: "*flag.Duration($*_)"}, - }, + { + Line: 178, + SyntaxPatterns: []ir.PatternString{{Line: 178, Value: "*flag.Duration($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.DurationVar", }, - ir.Rule{ - Line: 179, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 179, Value: "*flag.Float64($*_)"}, - }, + { + Line: 179, + SyntaxPatterns: []ir.PatternString{{Line: 179, Value: "*flag.Float64($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Float64Var", }, - ir.Rule{ - Line: 180, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 180, Value: "*flag.Int($*_)"}, - }, + { + Line: 180, + SyntaxPatterns: []ir.PatternString{{Line: 180, Value: "*flag.Int($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.IntVar", }, - ir.Rule{ - Line: 181, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 181, Value: "*flag.Int64($*_)"}, - }, + { + Line: 181, + SyntaxPatterns: []ir.PatternString{{Line: 181, Value: "*flag.Int64($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Int64Var", }, - ir.Rule{ - Line: 182, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 182, Value: "*flag.String($*_)"}, - }, + { + Line: 182, + SyntaxPatterns: []ir.PatternString{{Line: 182, Value: "*flag.String($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.StringVar", }, - ir.Rule{ - Line: 183, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 183, Value: "*flag.Uint($*_)"}, - }, + { + Line: 183, + SyntaxPatterns: []ir.PatternString{{Line: 183, Value: "*flag.Uint($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.UintVar", }, - ir.Rule{ - Line: 184, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 184, Value: "*flag.Uint64($*_)"}, - }, + { + Line: 184, + SyntaxPatterns: []ir.PatternString{{Line: 184, Value: "*flag.Uint64($*_)"}}, ReportTemplate: "immediate deref in $$ is most likely an error; consider using flag.Uint64Var", }, }, }, - ir.RuleGroup{ + { Line: 191, Name: "emptyStringTest", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects empty string checks that can be written more idiomatically", - DocBefore: "len(s) == 0", - DocAfter: "s == \"\"", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects empty string checks that can be written more idiomatically", + DocBefore: "len(s) == 0", + DocAfter: "s == \"\"", Rules: []ir.Rule{ - ir.Rule{ - Line: 192, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 192, Value: "len($s) != 0"}, - }, + { + Line: 192, + SyntaxPatterns: []ir.PatternString{{Line: 192, Value: "len($s) != 0"}}, ReportTemplate: "replace `$$` with `$s != \"\"`", WhereExpr: ir.FilterExpr{ Line: 193, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 193, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, + Args: []ir.FilterExpr{{Line: 193, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, - ir.Rule{ - Line: 196, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 196, Value: "len($s) == 0"}, - }, + { + Line: 196, + SyntaxPatterns: []ir.PatternString{{Line: 196, Value: "len($s) == 0"}}, ReportTemplate: "replace `$$` with `$s == \"\"`", WhereExpr: ir.FilterExpr{ Line: 197, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 197, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, + Args: []ir.FilterExpr{{Line: 197, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, - ir.RuleGroup{ + { Line: 205, Name: "stringXbytes", MatcherName: "m", - DocTags: []string{ - "performance", - }, - DocSummary: "Detects redundant conversions between string and []byte", - DocBefore: "copy(b, []byte(s))", - DocAfter: "copy(b, s)", + DocTags: []string{"performance"}, + DocSummary: "Detects redundant conversions between string and []byte", + DocBefore: "copy(b, []byte(s))", + DocAfter: "copy(b, s)", Rules: []ir.Rule{ - ir.Rule{ - Line: 206, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 206, Value: "copy($_, []byte($s))"}, - }, + { + Line: 206, + SyntaxPatterns: []ir.PatternString{{Line: 206, Value: "copy($_, []byte($s))"}}, ReportTemplate: "can simplify `[]byte($s)` to `$s`", }, - ir.Rule{ - Line: 208, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 208, Value: "string($b) == \"\""}, - }, + { + Line: 208, + SyntaxPatterns: []ir.PatternString{{Line: 208, Value: "string($b) == \"\""}}, ReportTemplate: "suggestion: len($b) == 0", SuggestTemplate: "len($b) == 0", WhereExpr: ir.FilterExpr{ @@ -715,16 +592,12 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeIsOp, Src: "m[\"b\"].Type.Is(`[]byte`)", Value: "b", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 208, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, - }, + Args: []ir.FilterExpr{{Line: 208, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, - ir.Rule{ - Line: 209, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 209, Value: "string($b) != \"\""}, - }, + { + Line: 209, + SyntaxPatterns: []ir.PatternString{{Line: 209, Value: "string($b) != \"\""}}, ReportTemplate: "suggestion: len($b) != 0", SuggestTemplate: "len($b) != 0", WhereExpr: ir.FilterExpr{ @@ -732,16 +605,12 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeIsOp, Src: "m[\"b\"].Type.Is(`[]byte`)", Value: "b", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 209, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, - }, + Args: []ir.FilterExpr{{Line: 209, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, - ir.Rule{ - Line: 211, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 211, Value: "len(string($b))"}, - }, + { + Line: 211, + SyntaxPatterns: []ir.PatternString{{Line: 211, Value: "len(string($b))"}}, ReportTemplate: "suggestion: len($b)", SuggestTemplate: "len($b)", WhereExpr: ir.FilterExpr{ @@ -749,16 +618,12 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeIsOp, Src: "m[\"b\"].Type.Is(`[]byte`)", Value: "b", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 211, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, - }, + Args: []ir.FilterExpr{{Line: 211, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, - ir.Rule{ - Line: 213, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 213, Value: "string($x) == string($y)"}, - }, + { + Line: 213, + SyntaxPatterns: []ir.PatternString{{Line: 213, Value: "string($x) == string($y)"}}, ReportTemplate: "suggestion: bytes.Equal($x, $y)", SuggestTemplate: "bytes.Equal($x, $y)", WhereExpr: ir.FilterExpr{ @@ -766,32 +631,26 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 214, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]byte`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 214, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, - }, + Args: []ir.FilterExpr{{Line: 214, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, - ir.FilterExpr{ + { Line: 214, Op: ir.FilterVarTypeIsOp, Src: "m[\"y\"].Type.Is(`[]byte`)", Value: "y", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 214, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, - }, + Args: []ir.FilterExpr{{Line: 214, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, }, }, - ir.Rule{ - Line: 217, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 217, Value: "string($x) != string($y)"}, - }, + { + Line: 217, + SyntaxPatterns: []ir.PatternString{{Line: 217, Value: "string($x) != string($y)"}}, ReportTemplate: "suggestion: !bytes.Equal($x, $y)", SuggestTemplate: "!bytes.Equal($x, $y)", WhereExpr: ir.FilterExpr{ @@ -799,32 +658,26 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"x\"].Type.Is(`[]byte`) && m[\"y\"].Type.Is(`[]byte`)", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 218, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]byte`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 218, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, - }, + Args: []ir.FilterExpr{{Line: 218, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, - ir.FilterExpr{ + { Line: 218, Op: ir.FilterVarTypeIsOp, Src: "m[\"y\"].Type.Is(`[]byte`)", Value: "y", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 218, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}, - }, + Args: []ir.FilterExpr{{Line: 218, Op: ir.FilterStringOp, Src: "`[]byte`", Value: "[]byte"}}, }, }, }, }, - ir.Rule{ - Line: 221, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 221, Value: "$re.Match([]byte($s))"}, - }, + { + Line: 221, + SyntaxPatterns: []ir.PatternString{{Line: 221, Value: "$re.Match([]byte($s))"}}, ReportTemplate: "suggestion: $re.MatchString($s)", SuggestTemplate: "$re.MatchString($s)", WhereExpr: ir.FilterExpr{ @@ -832,32 +685,26 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 222, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 222, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, - }, + Args: []ir.FilterExpr{{Line: 222, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, }, - ir.FilterExpr{ + { Line: 222, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 222, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, + Args: []ir.FilterExpr{{Line: 222, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, - ir.Rule{ - Line: 225, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 225, Value: "$re.FindIndex([]byte($s))"}, - }, + { + Line: 225, + SyntaxPatterns: []ir.PatternString{{Line: 225, Value: "$re.FindIndex([]byte($s))"}}, ReportTemplate: "suggestion: $re.FindStringIndex($s)", SuggestTemplate: "$re.FindStringIndex($s)", WhereExpr: ir.FilterExpr{ @@ -865,32 +712,26 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 226, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 226, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, - }, + Args: []ir.FilterExpr{{Line: 226, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, }, - ir.FilterExpr{ + { Line: 226, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 226, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, + Args: []ir.FilterExpr{{Line: 226, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, - ir.Rule{ - Line: 229, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 229, Value: "$re.FindAllIndex([]byte($s), $n)"}, - }, + { + Line: 229, + SyntaxPatterns: []ir.PatternString{{Line: 229, Value: "$re.FindAllIndex([]byte($s), $n)"}}, ReportTemplate: "suggestion: $re.FindAllStringIndex($s, $n)", SuggestTemplate: "$re.FindAllStringIndex($s, $n)", WhereExpr: ir.FilterExpr{ @@ -898,190 +739,146 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`) && m[\"s\"].Type.Is(`string`)", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 230, Op: ir.FilterVarTypeIsOp, Src: "m[\"re\"].Type.Is(`*regexp.Regexp`)", Value: "re", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 230, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}, - }, + Args: []ir.FilterExpr{{Line: 230, Op: ir.FilterStringOp, Src: "`*regexp.Regexp`", Value: "*regexp.Regexp"}}, }, - ir.FilterExpr{ + { Line: 230, Op: ir.FilterVarTypeIsOp, Src: "m[\"s\"].Type.Is(`string`)", Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 230, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, + Args: []ir.FilterExpr{{Line: 230, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, }, }, }, }, }, }, - ir.RuleGroup{ + { Line: 239, Name: "indexAlloc", MatcherName: "m", - DocTags: []string{ - "performance", - }, - DocSummary: "Detects strings.Index calls that may cause unwanted allocs", - DocBefore: "strings.Index(string(x), y)", - DocAfter: "bytes.Index(x, []byte(y))", - DocNote: "See Go issue for details: https://github.com/golang/go/issues/25864", - Rules: []ir.Rule{ - ir.Rule{ - Line: 240, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 240, Value: "strings.Index(string($x), $y)"}, - }, - ReportTemplate: "consider replacing $$ with bytes.Index($x, []byte($y))", - WhereExpr: ir.FilterExpr{ - Line: 241, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Pure && m[\"y\"].Pure", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 241, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{Line: 241, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, - }, - }, - }, - }, + DocTags: []string{"performance"}, + DocSummary: "Detects strings.Index calls that may cause unwanted allocs", + DocBefore: "strings.Index(string(x), y)", + DocAfter: "bytes.Index(x, []byte(y))", + DocNote: "See Go issue for details: https://github.com/golang/go/issues/25864", + Rules: []ir.Rule{{ + Line: 240, + SyntaxPatterns: []ir.PatternString{{Line: 240, Value: "strings.Index(string($x), $y)"}}, + ReportTemplate: "consider replacing $$ with bytes.Index($x, []byte($y))", + WhereExpr: ir.FilterExpr{ + Line: 241, + Op: ir.FilterAndOp, + Src: "m[\"x\"].Pure && m[\"y\"].Pure", + Args: []ir.FilterExpr{ + {Line: 241, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 241, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + }, + }, + }}, }, - ir.RuleGroup{ + { Line: 249, Name: "wrapperFunc", MatcherName: "m", - DocTags: []string{ - "style", - }, - DocSummary: "Detects function calls that can be replaced with convenience wrappers", - DocBefore: "wg.Add(-1)", - DocAfter: "wg.Done()", + DocTags: []string{"style"}, + DocSummary: "Detects function calls that can be replaced with convenience wrappers", + DocBefore: "wg.Add(-1)", + DocAfter: "wg.Done()", Rules: []ir.Rule{ - ir.Rule{ - Line: 250, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 250, Value: "$wg.Add(-1)"}, - }, + { + Line: 250, + SyntaxPatterns: []ir.PatternString{{Line: 250, Value: "$wg.Add(-1)"}}, ReportTemplate: "use WaitGroup.Done method in `$$`", WhereExpr: ir.FilterExpr{ Line: 251, Op: ir.FilterVarTypeIsOp, Src: "m[\"wg\"].Type.Is(`sync.WaitGroup`)", Value: "wg", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 251, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}, - }, + Args: []ir.FilterExpr{{Line: 251, Op: ir.FilterStringOp, Src: "`sync.WaitGroup`", Value: "sync.WaitGroup"}}, }, }, - ir.Rule{ - Line: 254, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 254, Value: "$buf.Truncate(0)"}, - }, + { + Line: 254, + SyntaxPatterns: []ir.PatternString{{Line: 254, Value: "$buf.Truncate(0)"}}, ReportTemplate: "use Buffer.Reset method in `$$`", WhereExpr: ir.FilterExpr{ Line: 255, Op: ir.FilterVarTypeIsOp, Src: "m[\"buf\"].Type.Is(`bytes.Buffer`)", Value: "buf", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 255, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}, - }, + Args: []ir.FilterExpr{{Line: 255, Op: ir.FilterStringOp, Src: "`bytes.Buffer`", Value: "bytes.Buffer"}}, }, }, - ir.Rule{ - Line: 258, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 258, Value: "http.HandlerFunc(http.NotFound)"}, - }, + { + Line: 258, + SyntaxPatterns: []ir.PatternString{{Line: 258, Value: "http.HandlerFunc(http.NotFound)"}}, ReportTemplate: "use http.NotFoundHandler method in `$$`", }, - ir.Rule{ - Line: 260, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 260, Value: "strings.SplitN($_, $_, -1)"}, - }, + { + Line: 260, + SyntaxPatterns: []ir.PatternString{{Line: 260, Value: "strings.SplitN($_, $_, -1)"}}, ReportTemplate: "use strings.Split method in `$$`", }, - ir.Rule{ - Line: 261, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 261, Value: "strings.Replace($_, $_, $_, -1)"}, - }, + { + Line: 261, + SyntaxPatterns: []ir.PatternString{{Line: 261, Value: "strings.Replace($_, $_, $_, -1)"}}, ReportTemplate: "use strings.ReplaceAll method in `$$`", }, - ir.Rule{ - Line: 262, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 262, Value: "strings.Map(unicode.ToTitle, $_)"}, - }, + { + Line: 262, + SyntaxPatterns: []ir.PatternString{{Line: 262, Value: "strings.Map(unicode.ToTitle, $_)"}}, ReportTemplate: "use strings.ToTitle method in `$$`", }, - ir.Rule{ - Line: 264, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 264, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}, - }, + { + Line: 264, + SyntaxPatterns: []ir.PatternString{{Line: 264, Value: "bytes.SplitN(b, []byte(\".\"), -1)"}}, ReportTemplate: "use bytes.Split method in `$$`", }, - ir.Rule{ - Line: 265, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 265, Value: "bytes.Replace($_, $_, $_, -1)"}, - }, + { + Line: 265, + SyntaxPatterns: []ir.PatternString{{Line: 265, Value: "bytes.Replace($_, $_, $_, -1)"}}, ReportTemplate: "use bytes.ReplaceAll method in `$$`", }, - ir.Rule{ - Line: 266, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 266, Value: "bytes.Map(unicode.ToUpper, $_)"}, - }, + { + Line: 266, + SyntaxPatterns: []ir.PatternString{{Line: 266, Value: "bytes.Map(unicode.ToUpper, $_)"}}, ReportTemplate: "use bytes.ToUpper method in `$$`", }, - ir.Rule{ - Line: 267, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 267, Value: "bytes.Map(unicode.ToLower, $_)"}, - }, + { + Line: 267, + SyntaxPatterns: []ir.PatternString{{Line: 267, Value: "bytes.Map(unicode.ToLower, $_)"}}, ReportTemplate: "use bytes.ToLower method in `$$`", }, - ir.Rule{ - Line: 268, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 268, Value: "bytes.Map(unicode.ToTitle, $_)"}, - }, + { + Line: 268, + SyntaxPatterns: []ir.PatternString{{Line: 268, Value: "bytes.Map(unicode.ToTitle, $_)"}}, ReportTemplate: "use bytes.ToTitle method in `$$`", }, - ir.Rule{ - Line: 270, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 270, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}, - }, + { + Line: 270, + SyntaxPatterns: []ir.PatternString{{Line: 270, Value: "draw.DrawMask($_, $_, $_, $_, nil, image.Point{}, $_)"}}, ReportTemplate: "use draw.Draw method in `$$`", }, }, }, - ir.RuleGroup{ + { Line: 278, Name: "regexpMust", MatcherName: "m", - DocTags: []string{ - "style", - }, - DocSummary: "Detects `regexp.Compile*` that can be replaced with `regexp.MustCompile*`", - DocBefore: "re, _ := regexp.Compile(\"const pattern\")", - DocAfter: "re := regexp.MustCompile(\"const pattern\")", + DocTags: []string{"style"}, + DocSummary: "Detects `regexp.Compile*` that can be replaced with `regexp.MustCompile*`", + DocBefore: "re, _ := regexp.Compile(\"const pattern\")", + DocAfter: "re := regexp.MustCompile(\"const pattern\")", Rules: []ir.Rule{ - ir.Rule{ - Line: 279, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 279, Value: "regexp.Compile($pat)"}, - }, + { + Line: 279, + SyntaxPatterns: []ir.PatternString{{Line: 279, Value: "regexp.Compile($pat)"}}, ReportTemplate: "for const patterns like $pat, use regexp.MustCompile", WhereExpr: ir.FilterExpr{ Line: 280, @@ -1090,11 +887,9 @@ var PrecompiledRules = &ir.File{ Value: "pat", }, }, - ir.Rule{ - Line: 283, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 283, Value: "regexp.CompilePOSIX($pat)"}, - }, + { + Line: 283, + SyntaxPatterns: []ir.PatternString{{Line: 283, Value: "regexp.CompilePOSIX($pat)"}}, ReportTemplate: "for const patterns like $pat, use regexp.MustCompilePOSIX", WhereExpr: ir.FilterExpr{ Line: 284, @@ -1105,35 +900,31 @@ var PrecompiledRules = &ir.File{ }, }, }, - ir.RuleGroup{ + { Line: 292, Name: "badCall", MatcherName: "m", - DocTags: []string{ - "diagnostic", - }, - DocSummary: "Detects suspicious function calls", - DocBefore: "strings.Replace(s, from, to, 0)", - DocAfter: "strings.Replace(s, from, to, -1)", + DocTags: []string{"diagnostic"}, + DocSummary: "Detects suspicious function calls", + DocBefore: "strings.Replace(s, from, to, 0)", + DocAfter: "strings.Replace(s, from, to, -1)", Rules: []ir.Rule{ - ir.Rule{ - Line: 293, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 293, Value: "strings.Replace($_, $_, $_, $zero)"}, - }, + { + Line: 293, + SyntaxPatterns: []ir.PatternString{{Line: 293, Value: "strings.Replace($_, $_, $_, $zero)"}}, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ Line: 294, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 294, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, - ir.FilterExpr{ + { Line: 294, Op: ir.FilterIntOp, Src: "0", @@ -1143,24 +934,22 @@ var PrecompiledRules = &ir.File{ }, LocationVar: "zero", }, - ir.Rule{ - Line: 296, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 296, Value: "bytes.Replace($_, $_, $_, $zero)"}, - }, + { + Line: 296, + SyntaxPatterns: []ir.PatternString{{Line: 296, Value: "bytes.Replace($_, $_, $_, $zero)"}}, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ Line: 297, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 297, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, - ir.FilterExpr{ + { Line: 297, Op: ir.FilterIntOp, Src: "0", @@ -1170,24 +959,22 @@ var PrecompiledRules = &ir.File{ }, LocationVar: "zero", }, - ir.Rule{ - Line: 300, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 300, Value: "strings.SplitN($_, $_, $zero)"}, - }, + { + Line: 300, + SyntaxPatterns: []ir.PatternString{{Line: 300, Value: "strings.SplitN($_, $_, $zero)"}}, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ Line: 301, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 301, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, - ir.FilterExpr{ + { Line: 301, Op: ir.FilterIntOp, Src: "0", @@ -1197,24 +984,22 @@ var PrecompiledRules = &ir.File{ }, LocationVar: "zero", }, - ir.Rule{ - Line: 303, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 303, Value: "bytes.SplitN($_, $_, $zero)"}, - }, + { + Line: 303, + SyntaxPatterns: []ir.PatternString{{Line: 303, Value: "bytes.SplitN($_, $_, $zero)"}}, ReportTemplate: "suspicious arg 0, probably meant -1", WhereExpr: ir.FilterExpr{ Line: 304, Op: ir.FilterEqOp, Src: "m[\"zero\"].Value.Int() == 0", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 304, Op: ir.FilterVarValueIntOp, Src: "m[\"zero\"].Value.Int()", Value: "zero", }, - ir.FilterExpr{ + { Line: 304, Op: ir.FilterIntOp, Src: "0", @@ -1224,200 +1009,158 @@ var PrecompiledRules = &ir.File{ }, LocationVar: "zero", }, - ir.Rule{ - Line: 307, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 307, Value: "append($_)"}, - }, + { + Line: 307, + SyntaxPatterns: []ir.PatternString{{Line: 307, Value: "append($_)"}}, ReportTemplate: "no-op append call, probably missing arguments", }, - ir.Rule{ - Line: 309, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 309, Value: "filepath.Join($_)"}, - }, + { + Line: 309, + SyntaxPatterns: []ir.PatternString{{Line: 309, Value: "filepath.Join($_)"}}, ReportTemplate: "suspicious Join on 1 argument", }, }, }, - ir.RuleGroup{ + { Line: 316, Name: "assignOp", MatcherName: "m", - DocTags: []string{ - "style", - }, - DocSummary: "Detects assignments that can be simplified by using assignment operators", - DocBefore: "x = x * 2", - DocAfter: "x *= 2", + DocTags: []string{"style"}, + DocSummary: "Detects assignments that can be simplified by using assignment operators", + DocBefore: "x = x * 2", + DocAfter: "x *= 2", Rules: []ir.Rule{ - ir.Rule{ - Line: 317, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 317, Value: "$x = $x + 1"}, - }, + { + Line: 317, + SyntaxPatterns: []ir.PatternString{{Line: 317, Value: "$x = $x + 1"}}, ReportTemplate: "replace `$$` with `$x++`", WhereExpr: ir.FilterExpr{Line: 317, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 318, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 318, Value: "$x = $x - 1"}, - }, + { + Line: 318, + SyntaxPatterns: []ir.PatternString{{Line: 318, Value: "$x = $x - 1"}}, ReportTemplate: "replace `$$` with `$x--`", WhereExpr: ir.FilterExpr{Line: 318, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 320, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 320, Value: "$x = $x + $y"}, - }, + { + Line: 320, + SyntaxPatterns: []ir.PatternString{{Line: 320, Value: "$x = $x + $y"}}, ReportTemplate: "replace `$$` with `$x += $y`", WhereExpr: ir.FilterExpr{Line: 320, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 321, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 321, Value: "$x = $x - $y"}, - }, + { + Line: 321, + SyntaxPatterns: []ir.PatternString{{Line: 321, Value: "$x = $x - $y"}}, ReportTemplate: "replace `$$` with `$x -= $y`", WhereExpr: ir.FilterExpr{Line: 321, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 323, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 323, Value: "$x = $x * $y"}, - }, + { + Line: 323, + SyntaxPatterns: []ir.PatternString{{Line: 323, Value: "$x = $x * $y"}}, ReportTemplate: "replace `$$` with `$x *= $y`", WhereExpr: ir.FilterExpr{Line: 323, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 324, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 324, Value: "$x = $x / $y"}, - }, + { + Line: 324, + SyntaxPatterns: []ir.PatternString{{Line: 324, Value: "$x = $x / $y"}}, ReportTemplate: "replace `$$` with `$x /= $y`", WhereExpr: ir.FilterExpr{Line: 324, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 325, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 325, Value: "$x = $x % $y"}, - }, + { + Line: 325, + SyntaxPatterns: []ir.PatternString{{Line: 325, Value: "$x = $x % $y"}}, ReportTemplate: "replace `$$` with `$x %= $y`", WhereExpr: ir.FilterExpr{Line: 325, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 326, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 326, Value: "$x = $x & $y"}, - }, + { + Line: 326, + SyntaxPatterns: []ir.PatternString{{Line: 326, Value: "$x = $x & $y"}}, ReportTemplate: "replace `$$` with `$x &= $y`", WhereExpr: ir.FilterExpr{Line: 326, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 327, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 327, Value: "$x = $x | $y"}, - }, + { + Line: 327, + SyntaxPatterns: []ir.PatternString{{Line: 327, Value: "$x = $x | $y"}}, ReportTemplate: "replace `$$` with `$x |= $y`", WhereExpr: ir.FilterExpr{Line: 327, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 328, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 328, Value: "$x = $x ^ $y"}, - }, + { + Line: 328, + SyntaxPatterns: []ir.PatternString{{Line: 328, Value: "$x = $x ^ $y"}}, ReportTemplate: "replace `$$` with `$x ^= $y`", WhereExpr: ir.FilterExpr{Line: 328, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 329, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 329, Value: "$x = $x << $y"}, - }, + { + Line: 329, + SyntaxPatterns: []ir.PatternString{{Line: 329, Value: "$x = $x << $y"}}, ReportTemplate: "replace `$$` with `$x <<= $y`", WhereExpr: ir.FilterExpr{Line: 329, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 330, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 330, Value: "$x = $x >> $y"}, - }, + { + Line: 330, + SyntaxPatterns: []ir.PatternString{{Line: 330, Value: "$x = $x >> $y"}}, ReportTemplate: "replace `$$` with `$x >>= $y`", WhereExpr: ir.FilterExpr{Line: 330, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ - Line: 331, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 331, Value: "$x = $x &^ $y"}, - }, + { + Line: 331, + SyntaxPatterns: []ir.PatternString{{Line: 331, Value: "$x = $x &^ $y"}}, ReportTemplate: "replace `$$` with `$x &^= $y`", WhereExpr: ir.FilterExpr{Line: 331, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, }, }, - ir.RuleGroup{ + { Line: 338, Name: "preferWriteByte", MatcherName: "m", - DocTags: []string{ - "performance", - "experimental", - "opinionated", - }, - DocSummary: "Detects WriteRune calls with rune literal argument that is single byte and reports to use WriteByte instead", - DocBefore: "w.WriteRune('\\n')", - DocAfter: "w.WriteByte('\\n')", - Rules: []ir.Rule{ - ir.Rule{ - Line: 342, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 342, Value: "$w.WriteRune($c)"}, - }, - ReportTemplate: "consider writing single byte rune $c with $w.WriteByte($c)", - WhereExpr: ir.FilterExpr{ - Line: 343, - Op: ir.FilterAndOp, - Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\") && (m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 343, - Op: ir.FilterVarTypeImplementsOp, - Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\")", - Value: "w", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 343, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}, + DocTags: []string{"performance", "experimental", "opinionated"}, + DocSummary: "Detects WriteRune calls with rune literal argument that is single byte and reports to use WriteByte instead", + DocBefore: "w.WriteRune('\\n')", + DocAfter: "w.WriteByte('\\n')", + Rules: []ir.Rule{{ + Line: 342, + SyntaxPatterns: []ir.PatternString{{Line: 342, Value: "$w.WriteRune($c)"}}, + ReportTemplate: "consider writing single byte rune $c with $w.WriteByte($c)", + WhereExpr: ir.FilterExpr{ + Line: 343, + Op: ir.FilterAndOp, + Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\") && (m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", + Args: []ir.FilterExpr{ + { + Line: 343, + Op: ir.FilterVarTypeImplementsOp, + Src: "m[\"w\"].Type.Implements(\"io.ByteWriter\")", + Value: "w", + Args: []ir.FilterExpr{{Line: 343, Op: ir.FilterStringOp, Src: "\"io.ByteWriter\"", Value: "io.ByteWriter"}}, + }, + { + Line: 343, + Op: ir.FilterAndOp, + Src: "(m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", + Args: []ir.FilterExpr{ + { + Line: 343, + Op: ir.FilterVarConstOp, + Src: "m[\"c\"].Const", + Value: "c", }, - }, - ir.FilterExpr{ - Line: 343, - Op: ir.FilterAndOp, - Src: "(m[\"c\"].Const && m[\"c\"].Value.Int() < runeSelf)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 343, - Op: ir.FilterVarConstOp, - Src: "m[\"c\"].Const", - Value: "c", - }, - ir.FilterExpr{ - Line: 343, - Op: ir.FilterLtOp, - Src: "m[\"c\"].Value.Int() < runeSelf", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 343, - Op: ir.FilterVarValueIntOp, - Src: "m[\"c\"].Value.Int()", - Value: "c", - }, - ir.FilterExpr{ - Line: 343, - Op: ir.FilterIntOp, - Src: "runeSelf", - Value: int64(128), - }, + { + Line: 343, + Op: ir.FilterLtOp, + Src: "m[\"c\"].Value.Int() < runeSelf", + Args: []ir.FilterExpr{ + { + Line: 343, + Op: ir.FilterVarValueIntOp, + Src: "m[\"c\"].Value.Int()", + Value: "c", + }, + { + Line: 343, + Op: ir.FilterIntOp, + Src: "runeSelf", + Value: int64(128), }, }, }, @@ -1425,25 +1168,20 @@ var PrecompiledRules = &ir.File{ }, }, }, - }, + }}, }, - ir.RuleGroup{ + { Line: 351, Name: "preferFprint", MatcherName: "m", - DocTags: []string{ - "performance", - "experimental", - }, - DocSummary: "Detects fmt.Sprint(f/ln) calls which can be replaced with fmt.Fprint(f/ln)", - DocBefore: "w.Write([]byte(fmt.Sprintf(\"%x\", 10)))", - DocAfter: "fmt.Fprintf(w, \"%x\", 10)", + DocTags: []string{"performance", "experimental"}, + DocSummary: "Detects fmt.Sprint(f/ln) calls which can be replaced with fmt.Fprint(f/ln)", + DocBefore: "w.Write([]byte(fmt.Sprintf(\"%x\", 10)))", + DocAfter: "fmt.Fprintf(w, \"%x\", 10)", Rules: []ir.Rule{ - ir.Rule{ - Line: 352, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 352, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}, - }, + { + Line: 352, + SyntaxPatterns: []ir.PatternString{{Line: 352, Value: "$w.Write([]byte(fmt.Sprint($*args)))"}}, ReportTemplate: "fmt.Fprint($w, $args) should be preferred to the $$", SuggestTemplate: "fmt.Fprint($w, $args)", WhereExpr: ir.FilterExpr{ @@ -1451,16 +1189,12 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.Writer\")", Value: "w", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 353, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}, - }, + Args: []ir.FilterExpr{{Line: 353, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, }, }, - ir.Rule{ - Line: 357, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 357, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}, - }, + { + Line: 357, + SyntaxPatterns: []ir.PatternString{{Line: 357, Value: "$w.Write([]byte(fmt.Sprintf($*args)))"}}, ReportTemplate: "fmt.Fprintf($w, $args) should be preferred to the $$", SuggestTemplate: "fmt.Fprintf($w, $args)", WhereExpr: ir.FilterExpr{ @@ -1468,16 +1202,12 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.Writer\")", Value: "w", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 358, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}, - }, + Args: []ir.FilterExpr{{Line: 358, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, }, }, - ir.Rule{ - Line: 362, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 362, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}, - }, + { + Line: 362, + SyntaxPatterns: []ir.PatternString{{Line: 362, Value: "$w.Write([]byte(fmt.Sprintln($*args)))"}}, ReportTemplate: "fmt.Fprintln($w, $args) should be preferred to the $$", SuggestTemplate: "fmt.Fprintln($w, $args)", WhereExpr: ir.FilterExpr{ @@ -1485,189 +1215,156 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.Writer\")", Value: "w", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 363, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}, - }, + Args: []ir.FilterExpr{{Line: 363, Op: ir.FilterStringOp, Src: "\"io.Writer\"", Value: "io.Writer"}}, }, }, - ir.Rule{ - Line: 367, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 367, Value: "io.WriteString($w, fmt.Sprint($*args))"}, - }, + { + Line: 367, + SyntaxPatterns: []ir.PatternString{{Line: 367, Value: "io.WriteString($w, fmt.Sprint($*args))"}}, ReportTemplate: "suggestion: fmt.Fprint($w, $args)", SuggestTemplate: "fmt.Fprint($w, $args)", }, - ir.Rule{ - Line: 368, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 368, Value: "io.WriteString($w, fmt.Sprintf($*args))"}, - }, + { + Line: 368, + SyntaxPatterns: []ir.PatternString{{Line: 368, Value: "io.WriteString($w, fmt.Sprintf($*args))"}}, ReportTemplate: "suggestion: fmt.Fprintf($w, $args)", SuggestTemplate: "fmt.Fprintf($w, $args)", }, - ir.Rule{ - Line: 369, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 369, Value: "io.WriteString($w, fmt.Sprintln($*args))"}, - }, + { + Line: 369, + SyntaxPatterns: []ir.PatternString{{Line: 369, Value: "io.WriteString($w, fmt.Sprintln($*args))"}}, ReportTemplate: "suggestion: fmt.Fprintln($w, $args)", SuggestTemplate: "fmt.Fprintln($w, $args)", }, }, }, - ir.RuleGroup{ + { Line: 376, Name: "dupArg", MatcherName: "m", - DocTags: []string{ - "diagnostic", - }, - DocSummary: "Detects suspicious duplicated arguments", - DocBefore: "copy(dst, dst)", - DocAfter: "copy(dst, src)", + DocTags: []string{"diagnostic"}, + DocSummary: "Detects suspicious duplicated arguments", + DocBefore: "copy(dst, dst)", + DocAfter: "copy(dst, src)", Rules: []ir.Rule{ - ir.Rule{ + { Line: 377, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 377, Value: "$x.Equal($x)"}, - ir.PatternString{Line: 377, Value: "$x.Equals($x)"}, - ir.PatternString{Line: 377, Value: "$x.Compare($x)"}, - ir.PatternString{Line: 377, Value: "$x.Cmp($x)"}, + {Line: 377, Value: "$x.Equal($x)"}, + {Line: 377, Value: "$x.Equals($x)"}, + {Line: 377, Value: "$x.Compare($x)"}, + {Line: 377, Value: "$x.Cmp($x)"}, }, ReportTemplate: "suspicious method call with the same argument and receiver", WhereExpr: ir.FilterExpr{Line: 378, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, - ir.Rule{ + { Line: 381, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 381, Value: "copy($x, $x)"}, - ir.PatternString{Line: 382, Value: "math.Max($x, $x)"}, - ir.PatternString{Line: 383, Value: "math.Min($x, $x)"}, - ir.PatternString{Line: 384, Value: "reflect.Copy($x, $x)"}, - ir.PatternString{Line: 385, Value: "reflect.DeepEqual($x, $x)"}, - ir.PatternString{Line: 386, Value: "strings.Contains($x, $x)"}, - ir.PatternString{Line: 387, Value: "strings.Compare($x, $x)"}, - ir.PatternString{Line: 388, Value: "strings.EqualFold($x, $x)"}, - ir.PatternString{Line: 389, Value: "strings.HasPrefix($x, $x)"}, - ir.PatternString{Line: 390, Value: "strings.HasSuffix($x, $x)"}, - ir.PatternString{Line: 391, Value: "strings.Index($x, $x)"}, - ir.PatternString{Line: 392, Value: "strings.LastIndex($x, $x)"}, - ir.PatternString{Line: 393, Value: "strings.Split($x, $x)"}, - ir.PatternString{Line: 394, Value: "strings.SplitAfter($x, $x)"}, - ir.PatternString{Line: 395, Value: "strings.SplitAfterN($x, $x, $_)"}, - ir.PatternString{Line: 396, Value: "strings.SplitN($x, $x, $_)"}, - ir.PatternString{Line: 397, Value: "strings.Replace($_, $x, $x, $_)"}, - ir.PatternString{Line: 398, Value: "strings.ReplaceAll($_, $x, $x)"}, - ir.PatternString{Line: 399, Value: "bytes.Contains($x, $x)"}, - ir.PatternString{Line: 400, Value: "bytes.Compare($x, $x)"}, - ir.PatternString{Line: 401, Value: "bytes.Equal($x, $x)"}, - ir.PatternString{Line: 402, Value: "bytes.EqualFold($x, $x)"}, - ir.PatternString{Line: 403, Value: "bytes.HasPrefix($x, $x)"}, - ir.PatternString{Line: 404, Value: "bytes.HasSuffix($x, $x)"}, - ir.PatternString{Line: 405, Value: "bytes.Index($x, $x)"}, - ir.PatternString{Line: 406, Value: "bytes.LastIndex($x, $x)"}, - ir.PatternString{Line: 407, Value: "bytes.Split($x, $x)"}, - ir.PatternString{Line: 408, Value: "bytes.SplitAfter($x, $x)"}, - ir.PatternString{Line: 409, Value: "bytes.SplitAfterN($x, $x, $_)"}, - ir.PatternString{Line: 410, Value: "bytes.SplitN($x, $x, $_)"}, - ir.PatternString{Line: 411, Value: "bytes.Replace($_, $x, $x, $_)"}, - ir.PatternString{Line: 412, Value: "bytes.ReplaceAll($_, $x, $x)"}, - ir.PatternString{Line: 413, Value: "types.Identical($x, $x)"}, - ir.PatternString{Line: 414, Value: "types.IdenticalIgnoreTags($x, $x)"}, - ir.PatternString{Line: 415, Value: "draw.Draw($x, $_, $x, $_, $_)"}, + {Line: 381, Value: "copy($x, $x)"}, + {Line: 382, Value: "math.Max($x, $x)"}, + {Line: 383, Value: "math.Min($x, $x)"}, + {Line: 384, Value: "reflect.Copy($x, $x)"}, + {Line: 385, Value: "reflect.DeepEqual($x, $x)"}, + {Line: 386, Value: "strings.Contains($x, $x)"}, + {Line: 387, Value: "strings.Compare($x, $x)"}, + {Line: 388, Value: "strings.EqualFold($x, $x)"}, + {Line: 389, Value: "strings.HasPrefix($x, $x)"}, + {Line: 390, Value: "strings.HasSuffix($x, $x)"}, + {Line: 391, Value: "strings.Index($x, $x)"}, + {Line: 392, Value: "strings.LastIndex($x, $x)"}, + {Line: 393, Value: "strings.Split($x, $x)"}, + {Line: 394, Value: "strings.SplitAfter($x, $x)"}, + {Line: 395, Value: "strings.SplitAfterN($x, $x, $_)"}, + {Line: 396, Value: "strings.SplitN($x, $x, $_)"}, + {Line: 397, Value: "strings.Replace($_, $x, $x, $_)"}, + {Line: 398, Value: "strings.ReplaceAll($_, $x, $x)"}, + {Line: 399, Value: "bytes.Contains($x, $x)"}, + {Line: 400, Value: "bytes.Compare($x, $x)"}, + {Line: 401, Value: "bytes.Equal($x, $x)"}, + {Line: 402, Value: "bytes.EqualFold($x, $x)"}, + {Line: 403, Value: "bytes.HasPrefix($x, $x)"}, + {Line: 404, Value: "bytes.HasSuffix($x, $x)"}, + {Line: 405, Value: "bytes.Index($x, $x)"}, + {Line: 406, Value: "bytes.LastIndex($x, $x)"}, + {Line: 407, Value: "bytes.Split($x, $x)"}, + {Line: 408, Value: "bytes.SplitAfter($x, $x)"}, + {Line: 409, Value: "bytes.SplitAfterN($x, $x, $_)"}, + {Line: 410, Value: "bytes.SplitN($x, $x, $_)"}, + {Line: 411, Value: "bytes.Replace($_, $x, $x, $_)"}, + {Line: 412, Value: "bytes.ReplaceAll($_, $x, $x)"}, + {Line: 413, Value: "types.Identical($x, $x)"}, + {Line: 414, Value: "types.IdenticalIgnoreTags($x, $x)"}, + {Line: 415, Value: "draw.Draw($x, $_, $x, $_, $_)"}, }, ReportTemplate: "suspicious duplicated args in $$", WhereExpr: ir.FilterExpr{Line: 416, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, }, }, }, - ir.RuleGroup{ + { Line: 424, Name: "returnAfterHttpError", MatcherName: "m", - DocTags: []string{ - "diagnostic", - "experimental", - }, - DocSummary: "Detects suspicious http.Error call without following return", - DocBefore: "if err != nil { http.Error(...); }", - DocAfter: "if err != nil { http.Error(...); return; }", - Rules: []ir.Rule{ - ir.Rule{ - Line: 425, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 425, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}, - }, - ReportTemplate: "Possibly return is missed after the http.Error call", - LocationVar: "w", - }, - }, + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects suspicious http.Error call without following return", + DocBefore: "if err != nil { http.Error(...); }", + DocAfter: "if err != nil { http.Error(...); return; }", + Rules: []ir.Rule{{ + Line: 425, + SyntaxPatterns: []ir.PatternString{{Line: 425, Value: "if $_ { $*_; http.Error($w, $err, $code) }"}}, + ReportTemplate: "Possibly return is missed after the http.Error call", + LocationVar: "w", + }}, }, - ir.RuleGroup{ + { Line: 434, Name: "preferFilepathJoin", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects concatenation with os.PathSeparator which can be replaced with filepath.Join", - DocBefore: "x + string(os.PathSeparator) + y", - DocAfter: "filepath.Join(x, y)", - Rules: []ir.Rule{ - ir.Rule{ - Line: 435, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 435, Value: "$x + string(os.PathSeparator) + $y"}, - }, - ReportTemplate: "filepath.Join($x, $y) should be preferred to the $$", - SuggestTemplate: "filepath.Join($x, $y)", - WhereExpr: ir.FilterExpr{ - Line: 436, - Op: ir.FilterAndOp, - Src: "m[\"x\"].Type.Is(`string`) && m[\"y\"].Type.Is(`string`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 436, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"x\"].Type.Is(`string`)", - Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 436, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, - }, - ir.FilterExpr{ - Line: 436, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"y\"].Type.Is(`string`)", - Value: "y", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 436, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, - }, - }, - }, - }, - }, + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects concatenation with os.PathSeparator which can be replaced with filepath.Join", + DocBefore: "x + string(os.PathSeparator) + y", + DocAfter: "filepath.Join(x, y)", + Rules: []ir.Rule{{ + Line: 435, + SyntaxPatterns: []ir.PatternString{{Line: 435, Value: "$x + string(os.PathSeparator) + $y"}}, + ReportTemplate: "filepath.Join($x, $y) should be preferred to the $$", + SuggestTemplate: "filepath.Join($x, $y)", + WhereExpr: ir.FilterExpr{ + Line: 436, + Op: ir.FilterAndOp, + Src: "m[\"x\"].Type.Is(`string`) && m[\"y\"].Type.Is(`string`)", + Args: []ir.FilterExpr{ + { + Line: 436, + Op: ir.FilterVarTypeIsOp, + Src: "m[\"x\"].Type.Is(`string`)", + Value: "x", + Args: []ir.FilterExpr{{Line: 436, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + }, + { + Line: 436, + Op: ir.FilterVarTypeIsOp, + Src: "m[\"y\"].Type.Is(`string`)", + Value: "y", + Args: []ir.FilterExpr{{Line: 436, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + }, + }, + }, + }}, }, - ir.RuleGroup{ + { Line: 445, Name: "preferStringWriter", MatcherName: "m", - DocTags: []string{ - "performance", - "experimental", - }, - DocSummary: "Detects w.Write or io.WriteString calls which can be replaced with w.WriteString", - DocBefore: "w.Write([]byte(\"foo\"))", - DocAfter: "w.WriteString(\"foo\")", + DocTags: []string{"performance", "experimental"}, + DocSummary: "Detects w.Write or io.WriteString calls which can be replaced with w.WriteString", + DocBefore: "w.Write([]byte(\"foo\"))", + DocAfter: "w.WriteString(\"foo\")", Rules: []ir.Rule{ - ir.Rule{ - Line: 446, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 446, Value: "$w.Write([]byte($s))"}, - }, + { + Line: 446, + SyntaxPatterns: []ir.PatternString{{Line: 446, Value: "$w.Write([]byte($s))"}}, ReportTemplate: "$w.WriteString($s) should be preferred to the $$", SuggestTemplate: "$w.WriteString($s)", WhereExpr: ir.FilterExpr{ @@ -1675,16 +1372,12 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", Value: "w", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 447, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}, - }, + Args: []ir.FilterExpr{{Line: 447, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, }, }, - ir.Rule{ - Line: 451, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 451, Value: "io.WriteString($w, $s)"}, - }, + { + Line: 451, + SyntaxPatterns: []ir.PatternString{{Line: 451, Value: "io.WriteString($w, $s)"}}, ReportTemplate: "$w.WriteString($s) should be preferred to the $$", SuggestTemplate: "$w.WriteString($s)", WhereExpr: ir.FilterExpr{ @@ -1692,158 +1385,125 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeImplementsOp, Src: "m[\"w\"].Type.Implements(\"io.StringWriter\")", Value: "w", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 452, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}, - }, + Args: []ir.FilterExpr{{Line: 452, Op: ir.FilterStringOp, Src: "\"io.StringWriter\"", Value: "io.StringWriter"}}, }, }, }, }, - ir.RuleGroup{ + { Line: 461, Name: "sliceClear", MatcherName: "m", - DocTags: []string{ - "performance", - "experimental", - }, - DocSummary: "Detects slice clear loops, suggests an idiom that is recognized by the Go compiler", - DocBefore: "for i := 0; i < len(buf); i++ { buf[i] = 0 }", - DocAfter: "for i := range buf { buf[i] = 0 }", - Rules: []ir.Rule{ - ir.Rule{ - Line: 462, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 462, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}, - }, - ReportTemplate: "rewrite as for-range so compiler can recognize this pattern", - WhereExpr: ir.FilterExpr{ - Line: 463, - Op: ir.FilterEqOp, - Src: "m[\"zero\"].Value.Int() == 0", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 463, - Op: ir.FilterVarValueIntOp, - Src: "m[\"zero\"].Value.Int()", - Value: "zero", - }, - ir.FilterExpr{ - Line: 463, - Op: ir.FilterIntOp, - Src: "0", - Value: int64(0), - }, - }, - }, - }, - }, + DocTags: []string{"performance", "experimental"}, + DocSummary: "Detects slice clear loops, suggests an idiom that is recognized by the Go compiler", + DocBefore: "for i := 0; i < len(buf); i++ { buf[i] = 0 }", + DocAfter: "for i := range buf { buf[i] = 0 }", + Rules: []ir.Rule{{ + Line: 462, + SyntaxPatterns: []ir.PatternString{{Line: 462, Value: "for $i := 0; $i < len($xs); $i++ { $xs[$i] = $zero }"}}, + ReportTemplate: "rewrite as for-range so compiler can recognize this pattern", + WhereExpr: ir.FilterExpr{ + Line: 463, + Op: ir.FilterEqOp, + Src: "m[\"zero\"].Value.Int() == 0", + Args: []ir.FilterExpr{ + { + Line: 463, + Op: ir.FilterVarValueIntOp, + Src: "m[\"zero\"].Value.Int()", + Value: "zero", + }, + { + Line: 463, + Op: ir.FilterIntOp, + Src: "0", + Value: int64(0), + }, + }, + }, + }}, }, - ir.RuleGroup{ + { Line: 471, Name: "syncMapLoadAndDelete", MatcherName: "m", - DocTags: []string{ - "diagnostic", - "experimental", - }, - DocSummary: "Detects sync.Map load+delete operations that can be replaced with LoadAndDelete", - DocBefore: "v, ok := m.Load(k); if ok { m.Delete($k); f(v); }", - DocAfter: "v, deleted := m.LoadAndDelete(k); if deleted { f(v) }", - Rules: []ir.Rule{ - ir.Rule{ - Line: 472, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 472, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}, - }, - ReportTemplate: "use $m.LoadAndDelete to perform load+delete operations atomically", - WhereExpr: ir.FilterExpr{ - Line: 473, - Op: ir.FilterAndOp, - Src: "m.GoVersion().GreaterEqThan(\"1.15\") &&\n\tm[\"m\"].Type.Is(`*sync.Map`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 473, - Op: ir.FilterGoVersionGreaterEqThanOp, - Src: "m.GoVersion().GreaterEqThan(\"1.15\")", - Value: "1.15", - }, - ir.FilterExpr{ - Line: 474, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"m\"].Type.Is(`*sync.Map`)", - Value: "m", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 474, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}, - }, - }, - }, - }, - }, - }, + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects sync.Map load+delete operations that can be replaced with LoadAndDelete", + DocBefore: "v, ok := m.Load(k); if ok { m.Delete($k); f(v); }", + DocAfter: "v, deleted := m.LoadAndDelete(k); if deleted { f(v) }", + Rules: []ir.Rule{{ + Line: 472, + SyntaxPatterns: []ir.PatternString{{Line: 472, Value: "$_, $ok := $m.Load($k); if $ok { $m.Delete($k); $*_ }"}}, + ReportTemplate: "use $m.LoadAndDelete to perform load+delete operations atomically", + WhereExpr: ir.FilterExpr{ + Line: 473, + Op: ir.FilterAndOp, + Src: "m.GoVersion().GreaterEqThan(\"1.15\") &&\n\tm[\"m\"].Type.Is(`*sync.Map`)", + Args: []ir.FilterExpr{ + { + Line: 473, + Op: ir.FilterGoVersionGreaterEqThanOp, + Src: "m.GoVersion().GreaterEqThan(\"1.15\")", + Value: "1.15", + }, + { + Line: 474, + Op: ir.FilterVarTypeIsOp, + Src: "m[\"m\"].Type.Is(`*sync.Map`)", + Value: "m", + Args: []ir.FilterExpr{{Line: 474, Op: ir.FilterStringOp, Src: "`*sync.Map`", Value: "*sync.Map"}}, + }, + }, + }, + }}, }, - ir.RuleGroup{ + { Line: 482, Name: "sprintfQuotedString", MatcherName: "m", - DocTags: []string{ - "diagnostic", - "experimental", - }, - DocSummary: "Detects \"%s\" formatting directives that can be replaced with %q", - DocBefore: "fmt.Sprintf(`\"%s\"`, s)", - DocAfter: "fmt.Sprintf(`%q`, s)", - Rules: []ir.Rule{ - ir.Rule{ - Line: 483, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 483, Value: "fmt.Sprintf($s, $*_)"}, - }, - ReportTemplate: "use %q instead of \"%s\" for quoted strings", - WhereExpr: ir.FilterExpr{ - Line: 484, - Op: ir.FilterOrOp, - Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\") ||\n\tm[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 484, - Op: ir.FilterVarTextMatchesOp, - Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\")", - Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 484, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}, - }, - }, - ir.FilterExpr{ - Line: 485, - Op: ir.FilterVarTextMatchesOp, - Src: "m[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", - Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 485, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}, - }, - }, - }, - }, - }, - }, + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects \"%s\" formatting directives that can be replaced with %q", + DocBefore: "fmt.Sprintf(`\"%s\"`, s)", + DocAfter: "fmt.Sprintf(`%q`, s)", + Rules: []ir.Rule{{ + Line: 483, + SyntaxPatterns: []ir.PatternString{{Line: 483, Value: "fmt.Sprintf($s, $*_)"}}, + ReportTemplate: "use %q instead of \"%s\" for quoted strings", + WhereExpr: ir.FilterExpr{ + Line: 484, + Op: ir.FilterOrOp, + Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\") ||\n\tm[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", + Args: []ir.FilterExpr{ + { + Line: 484, + Op: ir.FilterVarTextMatchesOp, + Src: "m[\"s\"].Text.Matches(\"^`.*\\\"%s\\\".*`$\")", + Value: "s", + Args: []ir.FilterExpr{{Line: 484, Op: ir.FilterStringOp, Src: "\"^`.*\\\"%s\\\".*`$\"", Value: "^`.*\"%s\".*`$"}}, + }, + { + Line: 485, + Op: ir.FilterVarTextMatchesOp, + Src: "m[\"s\"].Text.Matches(`^\".*\\\\\"%s\\\\\".*\"$`)", + Value: "s", + Args: []ir.FilterExpr{{Line: 485, Op: ir.FilterStringOp, Src: "`^\".*\\\\\"%s\\\\\".*\"$`", Value: "^\".*\\\\\"%s\\\\\".*\"$"}}, + }, + }, + }, + }}, }, - ir.RuleGroup{ + { Line: 493, Name: "offBy1", MatcherName: "m", - DocTags: []string{ - "diagnostic", - }, - DocSummary: "Detects various off-by-one kind of errors", - DocBefore: "xs[len(xs)]", - DocAfter: "xs[len(xs)-1]", + DocTags: []string{"diagnostic"}, + DocSummary: "Detects various off-by-one kind of errors", + DocBefore: "xs[len(xs)]", + DocAfter: "xs[len(xs)-1]", Rules: []ir.Rule{ - ir.Rule{ - Line: 494, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 494, Value: "$x[len($x)]"}, - }, + { + Line: 494, + SyntaxPatterns: []ir.PatternString{{Line: 494, Value: "$x[len($x)]"}}, ReportTemplate: "index expr always panics; maybe you wanted $x[len($x)-1]?", SuggestTemplate: "$x[len($x)-1]", WhereExpr: ir.FilterExpr{ @@ -1851,26 +1511,24 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"x\"].Type.Is(`[]$_`)", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 495, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{ + {Line: 495, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + { Line: 495, Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]$_`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 495, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}, - }, + Args: []ir.FilterExpr{{Line: 495, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}}, }, }, }, }, - ir.Rule{ + { Line: 502, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 503, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"}, - ir.PatternString{Line: 504, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"}, - ir.PatternString{Line: 505, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"}, - ir.PatternString{Line: 506, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"}, + {Line: 503, Value: "$i := strings.Index($s, $_); $_ := $slicing[$i:]"}, + {Line: 504, Value: "$i := strings.Index($s, $_); $_ = $slicing[$i:]"}, + {Line: 505, Value: "$i := bytes.Index($s, $_); $_ := $slicing[$i:]"}, + {Line: 506, Value: "$i := bytes.Index($s, $_); $_ = $slicing[$i:]"}, }, ReportTemplate: "Index() can return -1; maybe you wanted to do $s[$i+1:]", WhereExpr: ir.FilterExpr{ @@ -1878,19 +1536,19 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterEqOp, Src: "m[\"s\"].Text == m[\"slicing\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 507, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, - ir.FilterExpr{Line: 507, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, + {Line: 507, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, + {Line: 507, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, }, }, LocationVar: "slicing", }, - ir.Rule{ + { Line: 511, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 512, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"}, - ir.PatternString{Line: 513, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"}, - ir.PatternString{Line: 514, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"}, - ir.PatternString{Line: 515, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"}, + {Line: 512, Value: "$i := strings.Index($s, $_); $_ := $slicing[:$i]"}, + {Line: 513, Value: "$i := strings.Index($s, $_); $_ = $slicing[:$i]"}, + {Line: 514, Value: "$i := bytes.Index($s, $_); $_ := $slicing[:$i]"}, + {Line: 515, Value: "$i := bytes.Index($s, $_); $_ = $slicing[:$i]"}, }, ReportTemplate: "Index() can return -1; maybe you wanted to do $s[:$i+1]", WhereExpr: ir.FilterExpr{ @@ -1898,229 +1556,185 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterEqOp, Src: "m[\"s\"].Text == m[\"slicing\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 516, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, - ir.FilterExpr{Line: 516, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, + {Line: 516, Op: ir.FilterVarTextOp, Src: "m[\"s\"].Text", Value: "s"}, + {Line: 516, Op: ir.FilterVarTextOp, Src: "m[\"slicing\"].Text", Value: "slicing"}, }, }, LocationVar: "slicing", }, - ir.Rule{ + { Line: 520, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 521, Value: "$s[strings.Index($s, $_):]"}, - ir.PatternString{Line: 522, Value: "$s[:strings.Index($s, $_)]"}, - ir.PatternString{Line: 523, Value: "$s[bytes.Index($s, $_):]"}, - ir.PatternString{Line: 524, Value: "$s[:bytes.Index($s, $_)]"}, + {Line: 521, Value: "$s[strings.Index($s, $_):]"}, + {Line: 522, Value: "$s[:strings.Index($s, $_)]"}, + {Line: 523, Value: "$s[bytes.Index($s, $_):]"}, + {Line: 524, Value: "$s[:bytes.Index($s, $_)]"}, }, ReportTemplate: "Index() can return -1; maybe you wanted to do Index()+1", }, }, }, - ir.RuleGroup{ + { Line: 532, Name: "unslice", MatcherName: "m", - DocTags: []string{ - "style", - }, - DocSummary: "Detects slice expressions that can be simplified to sliced expression itself", - DocBefore: "copy(b[:], values...)", - DocAfter: "copy(b, values...)", - Rules: []ir.Rule{ - ir.Rule{ - Line: 533, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 533, Value: "$s[:]"}, - }, - ReportTemplate: "could simplify $$ to $s", - SuggestTemplate: "$s", - WhereExpr: ir.FilterExpr{ - Line: 534, - Op: ir.FilterOrOp, - Src: "m[\"s\"].Type.Is(`string`) || m[\"s\"].Type.Is(`[]$_`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 534, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`string`)", - Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 534, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}, - }, - }, - ir.FilterExpr{ - Line: 534, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"s\"].Type.Is(`[]$_`)", - Value: "s", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 534, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}, - }, - }, - }, - }, - }, - }, + DocTags: []string{"style"}, + DocSummary: "Detects slice expressions that can be simplified to sliced expression itself", + DocBefore: "copy(b[:], values...)", + DocAfter: "copy(b, values...)", + Rules: []ir.Rule{{ + Line: 533, + SyntaxPatterns: []ir.PatternString{{Line: 533, Value: "$s[:]"}}, + ReportTemplate: "could simplify $$ to $s", + SuggestTemplate: "$s", + WhereExpr: ir.FilterExpr{ + Line: 534, + Op: ir.FilterOrOp, + Src: "m[\"s\"].Type.Is(`string`) || m[\"s\"].Type.Is(`[]$_`)", + Args: []ir.FilterExpr{ + { + Line: 534, + Op: ir.FilterVarTypeIsOp, + Src: "m[\"s\"].Type.Is(`string`)", + Value: "s", + Args: []ir.FilterExpr{{Line: 534, Op: ir.FilterStringOp, Src: "`string`", Value: "string"}}, + }, + { + Line: 534, + Op: ir.FilterVarTypeIsOp, + Src: "m[\"s\"].Type.Is(`[]$_`)", + Value: "s", + Args: []ir.FilterExpr{{Line: 534, Op: ir.FilterStringOp, Src: "`[]$_`", Value: "[]$_"}}, + }, + }, + }, + }}, }, - ir.RuleGroup{ + { Line: 543, Name: "yodaStyleExpr", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects Yoda style expressions and suggests to replace them", - DocBefore: "return nil != ptr", - DocAfter: "return ptr != nil", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects Yoda style expressions and suggests to replace them", + DocBefore: "return nil != ptr", + DocAfter: "return ptr != nil", Rules: []ir.Rule{ - ir.Rule{ - Line: 544, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 544, Value: "$constval != $x"}, - }, + { + Line: 544, + SyntaxPatterns: []ir.PatternString{{Line: 544, Value: "$constval != $x"}}, ReportTemplate: "consider to change order in expression to $x != $constval", WhereExpr: ir.FilterExpr{ Line: 544, Op: ir.FilterAndOp, Src: "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 544, Op: ir.FilterVarNodeIsOp, Src: "m[\"constval\"].Node.Is(`BasicLit`)", Value: "constval", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 544, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, - }, + Args: []ir.FilterExpr{{Line: 544, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, }, - ir.FilterExpr{ + { Line: 544, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 544, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"x\"].Node.Is(`BasicLit`)", - Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 544, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, - }, - }, - }, + Args: []ir.FilterExpr{{ + Line: 544, + Op: ir.FilterVarNodeIsOp, + Src: "m[\"x\"].Node.Is(`BasicLit`)", + Value: "x", + Args: []ir.FilterExpr{{Line: 544, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + }}, }, }, }, }, - ir.Rule{ - Line: 546, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 546, Value: "$constval == $x"}, - }, + { + Line: 546, + SyntaxPatterns: []ir.PatternString{{Line: 546, Value: "$constval == $x"}}, ReportTemplate: "consider to change order in expression to $x == $constval", WhereExpr: ir.FilterExpr{ Line: 546, Op: ir.FilterAndOp, Src: "m[\"constval\"].Node.Is(`BasicLit`) && !m[\"x\"].Node.Is(`BasicLit`)", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 546, Op: ir.FilterVarNodeIsOp, Src: "m[\"constval\"].Node.Is(`BasicLit`)", Value: "constval", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 546, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, - }, + Args: []ir.FilterExpr{{Line: 546, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, }, - ir.FilterExpr{ + { Line: 546, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 546, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"x\"].Node.Is(`BasicLit`)", - Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 546, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, - }, - }, - }, + Args: []ir.FilterExpr{{ + Line: 546, + Op: ir.FilterVarNodeIsOp, + Src: "m[\"x\"].Node.Is(`BasicLit`)", + Value: "x", + Args: []ir.FilterExpr{{Line: 546, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + }}, }, }, }, }, - ir.Rule{ - Line: 549, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 549, Value: "nil != $x"}, - }, + { + Line: 549, + SyntaxPatterns: []ir.PatternString{{Line: 549, Value: "nil != $x"}}, ReportTemplate: "consider to change order in expression to $x != nil", WhereExpr: ir.FilterExpr{ Line: 549, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 549, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"x\"].Node.Is(`BasicLit`)", - Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 549, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, - }, - }, - }, - }, - }, - ir.Rule{ - Line: 551, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 551, Value: "nil == $x"}, - }, + Args: []ir.FilterExpr{{ + Line: 549, + Op: ir.FilterVarNodeIsOp, + Src: "m[\"x\"].Node.Is(`BasicLit`)", + Value: "x", + Args: []ir.FilterExpr{{Line: 549, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + }}, + }, + }, + { + Line: 551, + SyntaxPatterns: []ir.PatternString{{Line: 551, Value: "nil == $x"}}, ReportTemplate: "consider to change order in expression to $x == nil", WhereExpr: ir.FilterExpr{ Line: 551, Op: ir.FilterNotOp, Src: "!m[\"x\"].Node.Is(`BasicLit`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 551, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"x\"].Node.Is(`BasicLit`)", - Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 551, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}, - }, - }, - }, + Args: []ir.FilterExpr{{ + Line: 551, + Op: ir.FilterVarNodeIsOp, + Src: "m[\"x\"].Node.Is(`BasicLit`)", + Value: "x", + Args: []ir.FilterExpr{{Line: 551, Op: ir.FilterStringOp, Src: "`BasicLit`", Value: "BasicLit"}}, + }}, }, }, }, }, - ir.RuleGroup{ + { Line: 559, Name: "equalFold", MatcherName: "m", - DocTags: []string{ - "performance", - "experimental", - }, - DocSummary: "Detects unoptimal strings/bytes case-insensitive comparison", - DocBefore: "strings.ToLower(x) == strings.ToLower(y)", - DocAfter: "strings.EqualFold(x, y)", + DocTags: []string{"performance", "experimental"}, + DocSummary: "Detects unoptimal strings/bytes case-insensitive comparison", + DocBefore: "strings.ToLower(x) == strings.ToLower(y)", + DocAfter: "strings.EqualFold(x, y)", Rules: []ir.Rule{ - ir.Rule{ + { Line: 568, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 569, Value: "strings.ToLower($x) == $y"}, - ir.PatternString{Line: 570, Value: "strings.ToLower($x) == strings.ToLower($y)"}, - ir.PatternString{Line: 571, Value: "$x == strings.ToLower($y)"}, - ir.PatternString{Line: 572, Value: "strings.ToUpper($x) == $y"}, - ir.PatternString{Line: 573, Value: "strings.ToUpper($x) == strings.ToUpper($y)"}, - ir.PatternString{Line: 574, Value: "$x == strings.ToUpper($y)"}, + {Line: 569, Value: "strings.ToLower($x) == $y"}, + {Line: 570, Value: "strings.ToLower($x) == strings.ToLower($y)"}, + {Line: 571, Value: "$x == strings.ToLower($y)"}, + {Line: 572, Value: "strings.ToUpper($x) == $y"}, + {Line: 573, Value: "strings.ToUpper($x) == strings.ToUpper($y)"}, + {Line: 574, Value: "$x == strings.ToUpper($y)"}, }, ReportTemplate: "consider replacing with strings.EqualFold($x, $y)", SuggestTemplate: "strings.EqualFold($x, $y)", @@ -2129,36 +1743,36 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 575, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 575, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{Line: 575, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + {Line: 575, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 575, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, - ir.FilterExpr{ + { Line: 575, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 575, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - ir.FilterExpr{Line: 575, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + {Line: 575, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + {Line: 575, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, }, }, - ir.Rule{ + { Line: 580, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 581, Value: "strings.ToLower($x) != $y"}, - ir.PatternString{Line: 582, Value: "strings.ToLower($x) != strings.ToLower($y)"}, - ir.PatternString{Line: 583, Value: "$x != strings.ToLower($y)"}, - ir.PatternString{Line: 584, Value: "strings.ToUpper($x) != $y"}, - ir.PatternString{Line: 585, Value: "strings.ToUpper($x) != strings.ToUpper($y)"}, - ir.PatternString{Line: 586, Value: "$x != strings.ToUpper($y)"}, + {Line: 581, Value: "strings.ToLower($x) != $y"}, + {Line: 582, Value: "strings.ToLower($x) != strings.ToLower($y)"}, + {Line: 583, Value: "$x != strings.ToLower($y)"}, + {Line: 584, Value: "strings.ToUpper($x) != $y"}, + {Line: 585, Value: "strings.ToUpper($x) != strings.ToUpper($y)"}, + {Line: 586, Value: "$x != strings.ToUpper($y)"}, }, ReportTemplate: "consider replacing with !strings.EqualFold($x, $y)", SuggestTemplate: "!strings.EqualFold($x, $y)", @@ -2167,36 +1781,36 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 587, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 587, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{Line: 587, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + {Line: 587, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 587, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, - ir.FilterExpr{ + { Line: 587, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 587, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - ir.FilterExpr{Line: 587, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + {Line: 587, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + {Line: 587, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, }, }, - ir.Rule{ + { Line: 592, SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 593, Value: "bytes.Equal(bytes.ToLower($x), $y)"}, - ir.PatternString{Line: 594, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"}, - ir.PatternString{Line: 595, Value: "bytes.Equal($x, bytes.ToLower($y))"}, - ir.PatternString{Line: 596, Value: "bytes.Equal(bytes.ToUpper($x), $y)"}, - ir.PatternString{Line: 597, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"}, - ir.PatternString{Line: 598, Value: "bytes.Equal($x, bytes.ToUpper($y))"}, + {Line: 593, Value: "bytes.Equal(bytes.ToLower($x), $y)"}, + {Line: 594, Value: "bytes.Equal(bytes.ToLower($x), bytes.ToLower($y))"}, + {Line: 595, Value: "bytes.Equal($x, bytes.ToLower($y))"}, + {Line: 596, Value: "bytes.Equal(bytes.ToUpper($x), $y)"}, + {Line: 597, Value: "bytes.Equal(bytes.ToUpper($x), bytes.ToUpper($y))"}, + {Line: 598, Value: "bytes.Equal($x, bytes.ToUpper($y))"}, }, ReportTemplate: "consider replacing with bytes.EqualFold($x, $y)", SuggestTemplate: "bytes.EqualFold($x, $y)", @@ -2205,22 +1819,22 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure && m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 599, Op: ir.FilterAndOp, Src: "m[\"x\"].Pure && m[\"y\"].Pure", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 599, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, - ir.FilterExpr{Line: 599, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, + {Line: 599, Op: ir.FilterVarPureOp, Src: "m[\"x\"].Pure", Value: "x"}, + {Line: 599, Op: ir.FilterVarPureOp, Src: "m[\"y\"].Pure", Value: "y"}, }, }, - ir.FilterExpr{ + { Line: 599, Op: ir.FilterNeqOp, Src: "m[\"x\"].Text != m[\"y\"].Text", Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 599, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, - ir.FilterExpr{Line: 599, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, + {Line: 599, Op: ir.FilterVarTextOp, Src: "m[\"x\"].Text", Value: "x"}, + {Line: 599, Op: ir.FilterVarTextOp, Src: "m[\"y\"].Text", Value: "y"}, }, }, }, @@ -2228,167 +1842,143 @@ var PrecompiledRules = &ir.File{ }, }, }, - ir.RuleGroup{ + { Line: 608, Name: "argOrder", MatcherName: "m", - DocTags: []string{ - "diagnostic", - }, - DocSummary: "Detects suspicious arguments order", - DocBefore: "strings.HasPrefix(\"#\", userpass)", - DocAfter: "strings.HasPrefix(userpass, \"#\")", - Rules: []ir.Rule{ - ir.Rule{ - Line: 609, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 610, Value: "strings.HasPrefix($lit, $s)"}, - ir.PatternString{Line: 611, Value: "bytes.HasPrefix($lit, $s)"}, - ir.PatternString{Line: 612, Value: "strings.HasSuffix($lit, $s)"}, - ir.PatternString{Line: 613, Value: "bytes.HasSuffix($lit, $s)"}, - ir.PatternString{Line: 614, Value: "strings.Contains($lit, $s)"}, - ir.PatternString{Line: 615, Value: "bytes.Contains($lit, $s)"}, - ir.PatternString{Line: 616, Value: "strings.TrimPrefix($lit, $s)"}, - ir.PatternString{Line: 617, Value: "bytes.TrimPrefix($lit, $s)"}, - ir.PatternString{Line: 618, Value: "strings.TrimSuffix($lit, $s)"}, - ir.PatternString{Line: 619, Value: "bytes.TrimSuffix($lit, $s)"}, - ir.PatternString{Line: 620, Value: "strings.Split($lit, $s)"}, - ir.PatternString{Line: 621, Value: "bytes.Split($lit, $s)"}, - }, - ReportTemplate: "$lit and $s arguments order looks reversed", - WhereExpr: ir.FilterExpr{ - Line: 622, - Op: ir.FilterAndOp, - Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice) &&\n\t!m[\"lit\"].Node.Is(`Ident`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 622, - Op: ir.FilterAndOp, - Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 622, + DocTags: []string{"diagnostic"}, + DocSummary: "Detects suspicious arguments order", + DocBefore: "strings.HasPrefix(\"#\", userpass)", + DocAfter: "strings.HasPrefix(userpass, \"#\")", + Rules: []ir.Rule{{ + Line: 609, + SyntaxPatterns: []ir.PatternString{ + {Line: 610, Value: "strings.HasPrefix($lit, $s)"}, + {Line: 611, Value: "bytes.HasPrefix($lit, $s)"}, + {Line: 612, Value: "strings.HasSuffix($lit, $s)"}, + {Line: 613, Value: "bytes.HasSuffix($lit, $s)"}, + {Line: 614, Value: "strings.Contains($lit, $s)"}, + {Line: 615, Value: "bytes.Contains($lit, $s)"}, + {Line: 616, Value: "strings.TrimPrefix($lit, $s)"}, + {Line: 617, Value: "bytes.TrimPrefix($lit, $s)"}, + {Line: 618, Value: "strings.TrimSuffix($lit, $s)"}, + {Line: 619, Value: "bytes.TrimSuffix($lit, $s)"}, + {Line: 620, Value: "strings.Split($lit, $s)"}, + {Line: 621, Value: "bytes.Split($lit, $s)"}, + }, + ReportTemplate: "$lit and $s arguments order looks reversed", + WhereExpr: ir.FilterExpr{ + Line: 622, + Op: ir.FilterAndOp, + Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice) &&\n\t!m[\"lit\"].Node.Is(`Ident`)", + Args: []ir.FilterExpr{ + { + Line: 622, + Op: ir.FilterAndOp, + Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice) &&\n\t!(m[\"s\"].Const || m[\"s\"].ConstSlice)", + Args: []ir.FilterExpr{ + { + Line: 622, + Op: ir.FilterOrOp, + Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice)", + Args: []ir.FilterExpr{ + { + Line: 622, + Op: ir.FilterVarConstOp, + Src: "m[\"lit\"].Const", + Value: "lit", + }, + { + Line: 622, + Op: ir.FilterVarConstSliceOp, + Src: "m[\"lit\"].ConstSlice", + Value: "lit", + }, + }, + }, + { + Line: 623, + Op: ir.FilterNotOp, + Src: "!(m[\"s\"].Const || m[\"s\"].ConstSlice)", + Args: []ir.FilterExpr{{ + Line: 623, Op: ir.FilterOrOp, - Src: "(m[\"lit\"].Const || m[\"lit\"].ConstSlice)", + Src: "(m[\"s\"].Const || m[\"s\"].ConstSlice)", Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 622, + { + Line: 623, Op: ir.FilterVarConstOp, - Src: "m[\"lit\"].Const", - Value: "lit", + Src: "m[\"s\"].Const", + Value: "s", }, - ir.FilterExpr{ - Line: 622, + { + Line: 623, Op: ir.FilterVarConstSliceOp, - Src: "m[\"lit\"].ConstSlice", - Value: "lit", + Src: "m[\"s\"].ConstSlice", + Value: "s", }, }, - }, - ir.FilterExpr{ - Line: 623, - Op: ir.FilterNotOp, - Src: "!(m[\"s\"].Const || m[\"s\"].ConstSlice)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 623, - Op: ir.FilterOrOp, - Src: "(m[\"s\"].Const || m[\"s\"].ConstSlice)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 623, - Op: ir.FilterVarConstOp, - Src: "m[\"s\"].Const", - Value: "s", - }, - ir.FilterExpr{ - Line: 623, - Op: ir.FilterVarConstSliceOp, - Src: "m[\"s\"].ConstSlice", - Value: "s", - }, - }, - }, - }, - }, - }, - }, - ir.FilterExpr{ - Line: 624, - Op: ir.FilterNotOp, - Src: "!m[\"lit\"].Node.Is(`Ident`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 624, - Op: ir.FilterVarNodeIsOp, - Src: "m[\"lit\"].Node.Is(`Ident`)", - Value: "lit", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 624, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}, - }, - }, + }}, }, }, }, + { + Line: 624, + Op: ir.FilterNotOp, + Src: "!m[\"lit\"].Node.Is(`Ident`)", + Args: []ir.FilterExpr{{ + Line: 624, + Op: ir.FilterVarNodeIsOp, + Src: "m[\"lit\"].Node.Is(`Ident`)", + Value: "lit", + Args: []ir.FilterExpr{{Line: 624, Op: ir.FilterStringOp, Src: "`Ident`", Value: "Ident"}}, + }}, + }, }, }, - }, + }}, }, - ir.RuleGroup{ + { Line: 632, Name: "stringConcatSimplify", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects string concat operations that can be simplified", - DocBefore: "strings.Join([]string{x, y}, \"_\")", - DocAfter: "x + \"_\" + y", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects string concat operations that can be simplified", + DocBefore: "strings.Join([]string{x, y}, \"_\")", + DocAfter: "x + \"_\" + y", Rules: []ir.Rule{ - ir.Rule{ - Line: 633, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 633, Value: "strings.Join([]string{$x, $y}, \"\")"}, - }, + { + Line: 633, + SyntaxPatterns: []ir.PatternString{{Line: 633, Value: "strings.Join([]string{$x, $y}, \"\")"}}, ReportTemplate: "suggestion: $x + $y", SuggestTemplate: "$x + $y", }, - ir.Rule{ - Line: 634, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 634, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}, - }, + { + Line: 634, + SyntaxPatterns: []ir.PatternString{{Line: 634, Value: "strings.Join([]string{$x, $y, $z}, \"\")"}}, ReportTemplate: "suggestion: $x + $y + $z", SuggestTemplate: "$x + $y + $z", }, - ir.Rule{ - Line: 635, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 635, Value: "strings.Join([]string{$x, $y}, $glue)"}, - }, + { + Line: 635, + SyntaxPatterns: []ir.PatternString{{Line: 635, Value: "strings.Join([]string{$x, $y}, $glue)"}}, ReportTemplate: "suggestion: $x + $glue + $y", SuggestTemplate: "$x + $glue + $y", }, }, }, - ir.RuleGroup{ + { Line: 642, Name: "timeExprSimplify", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects manual conversion to milli- or microseconds", - DocBefore: "t.Unix() / 1000", - DocAfter: "t.UnixMilli()", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects manual conversion to milli- or microseconds", + DocBefore: "t.Unix() / 1000", + DocAfter: "t.UnixMilli()", Rules: []ir.Rule{ - ir.Rule{ - Line: 647, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 647, Value: "$t.Unix() / 1000"}, - }, + { + Line: 647, + SyntaxPatterns: []ir.PatternString{{Line: 647, Value: "$t.Unix() / 1000"}}, ReportTemplate: "use $t.UnixMilli() instead of $$", SuggestTemplate: "$t.UnixMilli()", WhereExpr: ir.FilterExpr{ @@ -2396,45 +1986,39 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 648, Op: ir.FilterGoVersionGreaterEqThanOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\")", Value: "1.17", }, - ir.FilterExpr{ + { Line: 648, Op: ir.FilterOrOp, Src: "isTime(m[\"t\"])", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 648, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`time.Time`)", Value: "t", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 644, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}, - }, + Args: []ir.FilterExpr{{Line: 644, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, }, - ir.FilterExpr{ + { Line: 648, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`*time.Time`)", Value: "t", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 644, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}, - }, + Args: []ir.FilterExpr{{Line: 644, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, }, }, }, }, }, }, - ir.Rule{ - Line: 652, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 652, Value: "$t.UnixNano() * 1000"}, - }, + { + Line: 652, + SyntaxPatterns: []ir.PatternString{{Line: 652, Value: "$t.UnixNano() * 1000"}}, ReportTemplate: "use $t.UnixMicro() instead of $$", SuggestTemplate: "$t.UnixMicro()", WhereExpr: ir.FilterExpr{ @@ -2442,34 +2026,30 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterAndOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\") && isTime(m[\"t\"])", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 653, Op: ir.FilterGoVersionGreaterEqThanOp, Src: "m.GoVersion().GreaterEqThan(\"1.17\")", Value: "1.17", }, - ir.FilterExpr{ + { Line: 653, Op: ir.FilterOrOp, Src: "isTime(m[\"t\"])", Args: []ir.FilterExpr{ - ir.FilterExpr{ + { Line: 653, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`time.Time`)", Value: "t", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 644, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}, - }, + Args: []ir.FilterExpr{{Line: 644, Op: ir.FilterStringOp, Src: "`time.Time`", Value: "time.Time"}}, }, - ir.FilterExpr{ + { Line: 653, Op: ir.FilterVarTypeIsOp, Src: "m[\"t\"].Type.Is(`*time.Time`)", Value: "t", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 644, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}, - }, + Args: []ir.FilterExpr{{Line: 644, Op: ir.FilterStringOp, Src: "`*time.Time`", Value: "*time.Time"}}, }, }, }, @@ -2478,101 +2058,77 @@ var PrecompiledRules = &ir.File{ }, }, }, - ir.RuleGroup{ + { Line: 662, Name: "exposedSyncMutex", MatcherName: "m", - DocTags: []string{ - "style", - "experimental", - }, - DocSummary: "Detects exposed methods from sync.Mutex and sync.RWMutex", - DocBefore: "type Foo struct{ ...; sync.Mutex; ... }", - DocAfter: "type Foo struct{ ...; mu sync.Mutex; ... }", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects exposed methods from sync.Mutex and sync.RWMutex", + DocBefore: "type Foo struct{ ...; sync.Mutex; ... }", + DocAfter: "type Foo struct{ ...; mu sync.Mutex; ... }", Rules: []ir.Rule{ - ir.Rule{ - Line: 667, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 667, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}, - }, + { + Line: 667, + SyntaxPatterns: []ir.PatternString{{Line: 667, Value: "type $x struct { $*_; sync.Mutex; $*_ }"}}, ReportTemplate: "don't embed sync.Mutex", WhereExpr: ir.FilterExpr{ Line: 668, Op: ir.FilterVarTextMatchesOp, Src: "isExported(m[\"x\"])", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, - }, + Args: []ir.FilterExpr{{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, }, }, - ir.Rule{ - Line: 671, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 671, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}, - }, + { + Line: 671, + SyntaxPatterns: []ir.PatternString{{Line: 671, Value: "type $x struct { $*_; *sync.Mutex; $*_ }"}}, ReportTemplate: "don't embed *sync.Mutex", WhereExpr: ir.FilterExpr{ Line: 672, Op: ir.FilterVarTextMatchesOp, Src: "isExported(m[\"x\"])", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, - }, + Args: []ir.FilterExpr{{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, }, }, - ir.Rule{ - Line: 675, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 675, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}, - }, + { + Line: 675, + SyntaxPatterns: []ir.PatternString{{Line: 675, Value: "type $x struct { $*_; sync.RWMutex; $*_ }"}}, ReportTemplate: "don't embed sync.RWMutex", WhereExpr: ir.FilterExpr{ Line: 676, Op: ir.FilterVarTextMatchesOp, Src: "isExported(m[\"x\"])", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, - }, + Args: []ir.FilterExpr{{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, }, }, - ir.Rule{ - Line: 679, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 679, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}, - }, + { + Line: 679, + SyntaxPatterns: []ir.PatternString{{Line: 679, Value: "type $x struct { $*_; *sync.RWMutex; $*_ }"}}, ReportTemplate: "don't embed *sync.RWMutex", WhereExpr: ir.FilterExpr{ Line: 680, Op: ir.FilterVarTextMatchesOp, Src: "isExported(m[\"x\"])", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}, - }, + Args: []ir.FilterExpr{{Line: 664, Op: ir.FilterStringOp, Src: "`^\\p{Lu}`", Value: "^\\p{Lu}"}}, }, }, }, }, - ir.RuleGroup{ + { Line: 688, Name: "badSorting", MatcherName: "m", - DocTags: []string{ - "diagnostic", - "experimental", - }, - DocSummary: "Detects bad usage of sort package", - DocBefore: "xs = sort.StringSlice(xs)", - DocAfter: "sort.Strings(xs)", + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects bad usage of sort package", + DocBefore: "xs = sort.StringSlice(xs)", + DocAfter: "sort.Strings(xs)", Rules: []ir.Rule{ - ir.Rule{ - Line: 689, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 689, Value: "$x = sort.IntSlice($x)"}, - }, + { + Line: 689, + SyntaxPatterns: []ir.PatternString{{Line: 689, Value: "$x = sort.IntSlice($x)"}}, ReportTemplate: "suspicious sort.IntSlice usage, maybe sort.Ints was intended?", SuggestTemplate: "sort.Ints($x)", WhereExpr: ir.FilterExpr{ @@ -2580,16 +2136,12 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]int`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 690, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}, - }, + Args: []ir.FilterExpr{{Line: 690, Op: ir.FilterStringOp, Src: "`[]int`", Value: "[]int"}}, }, }, - ir.Rule{ - Line: 694, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 694, Value: "$x = sort.Float64Slice($x)"}, - }, + { + Line: 694, + SyntaxPatterns: []ir.PatternString{{Line: 694, Value: "$x = sort.Float64Slice($x)"}}, ReportTemplate: "suspicious sort.Float64s usage, maybe sort.Float64s was intended?", SuggestTemplate: "sort.Float64s($x)", WhereExpr: ir.FilterExpr{ @@ -2597,16 +2149,12 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]float64`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 695, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}, - }, + Args: []ir.FilterExpr{{Line: 695, Op: ir.FilterStringOp, Src: "`[]float64`", Value: "[]float64"}}, }, }, - ir.Rule{ - Line: 699, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 699, Value: "$x = sort.StringSlice($x)"}, - }, + { + Line: 699, + SyntaxPatterns: []ir.PatternString{{Line: 699, Value: "$x = sort.StringSlice($x)"}}, ReportTemplate: "suspicious sort.StringSlice usage, maybe sort.Strings was intended?", SuggestTemplate: "sort.Strings($x)", WhereExpr: ir.FilterExpr{ @@ -2614,137 +2162,141 @@ var PrecompiledRules = &ir.File{ Op: ir.FilterVarTypeIsOp, Src: "m[\"x\"].Type.Is(`[]string`)", Value: "x", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 700, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}, - }, + Args: []ir.FilterExpr{{Line: 700, Op: ir.FilterStringOp, Src: "`[]string`", Value: "[]string"}}, }, }, }, }, - ir.RuleGroup{ + { Line: 709, Name: "externalErrorReassign", MatcherName: "m", - DocTags: []string{ - "diagnostic", - "experimental", - }, - DocSummary: "Detects suspicious reassigment of error from another package", - DocBefore: "io.EOF = nil", - DocAfter: "/* don't do it */", - Rules: []ir.Rule{ - ir.Rule{ - Line: 710, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 710, Value: "$pkg.$err = $x"}, - }, - ReportTemplate: "suspicious reassigment of error from another package", - WhereExpr: ir.FilterExpr{ - Line: 711, - Op: ir.FilterAndOp, - Src: "m[\"err\"].Type.Is(`error`) && m[\"pkg\"].Object.Is(`PkgName`)", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 711, - Op: ir.FilterVarTypeIsOp, - Src: "m[\"err\"].Type.Is(`error`)", - Value: "err", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 711, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}, - }, - }, - ir.FilterExpr{ - Line: 711, - Op: ir.FilterVarObjectIsOp, - Src: "m[\"pkg\"].Object.Is(`PkgName`)", - Value: "pkg", - Args: []ir.FilterExpr{ - ir.FilterExpr{Line: 711, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}, - }, - }, - }, - }, - }, - }, + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects suspicious reassigment of error from another package", + DocBefore: "io.EOF = nil", + DocAfter: "/* don't do it */", + Rules: []ir.Rule{{ + Line: 710, + SyntaxPatterns: []ir.PatternString{{Line: 710, Value: "$pkg.$err = $x"}}, + ReportTemplate: "suspicious reassigment of error from another package", + WhereExpr: ir.FilterExpr{ + Line: 711, + Op: ir.FilterAndOp, + Src: "m[\"err\"].Type.Is(`error`) && m[\"pkg\"].Object.Is(`PkgName`)", + Args: []ir.FilterExpr{ + { + Line: 711, + Op: ir.FilterVarTypeIsOp, + Src: "m[\"err\"].Type.Is(`error`)", + Value: "err", + Args: []ir.FilterExpr{{Line: 711, Op: ir.FilterStringOp, Src: "`error`", Value: "error"}}, + }, + { + Line: 711, + Op: ir.FilterVarObjectIsOp, + Src: "m[\"pkg\"].Object.Is(`PkgName`)", + Value: "pkg", + Args: []ir.FilterExpr{{Line: 711, Op: ir.FilterStringOp, Src: "`PkgName`", Value: "PkgName"}}, + }, + }, + }, + }}, }, - ir.RuleGroup{ + { Line: 719, Name: "emptyDecl", MatcherName: "m", - DocTags: []string{ - "diagnostic", - "experimental", - }, - DocSummary: "Detects suspicious empty declarations blocks", - DocBefore: "var()", - DocAfter: "/* nothing */", + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects suspicious empty declarations blocks", + DocBefore: "var()", + DocAfter: "/* nothing */", Rules: []ir.Rule{ - ir.Rule{ - Line: 720, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 720, Value: "var()"}, - }, + { + Line: 720, + SyntaxPatterns: []ir.PatternString{{Line: 720, Value: "var()"}}, ReportTemplate: "empty var() block", }, - ir.Rule{ - Line: 721, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 721, Value: "const()"}, - }, + { + Line: 721, + SyntaxPatterns: []ir.PatternString{{Line: 721, Value: "const()"}}, ReportTemplate: "empty const() block", }, - ir.Rule{ - Line: 722, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 722, Value: "type()"}, - }, + { + Line: 722, + SyntaxPatterns: []ir.PatternString{{Line: 722, Value: "type()"}}, ReportTemplate: "empty type() block", }, }, }, - ir.RuleGroup{ + { Line: 729, Name: "dynamicFmtString", MatcherName: "m", - DocTags: []string{ - "diagnostic", - "experimental", - }, - DocSummary: "Detects suspicious formatting strings usage", - DocBefore: "fmt.Errorf(msg)", - DocAfter: "fmt.Errorf(\"%s\", msg)", + DocTags: []string{"diagnostic", "experimental"}, + DocSummary: "Detects suspicious formatting strings usage", + DocBefore: "fmt.Errorf(msg)", + DocAfter: "fmt.Errorf(\"%s\", msg)", Rules: []ir.Rule{ - ir.Rule{ - Line: 730, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 730, Value: "fmt.Errorf($f)"}, - }, + { + Line: 730, + SyntaxPatterns: []ir.PatternString{{Line: 730, Value: "fmt.Errorf($f)"}}, ReportTemplate: "use errors.New($f) or fmt.Errorf(\"%s\", $f) instead", SuggestTemplate: "errors.New($f)", WhereExpr: ir.FilterExpr{ Line: 731, Op: ir.FilterNotOp, Src: "!m[\"f\"].Const", - Args: []ir.FilterExpr{ - ir.FilterExpr{ - Line: 731, - Op: ir.FilterVarConstOp, - Src: "m[\"f\"].Const", - Value: "f", - }, - }, + Args: []ir.FilterExpr{{ + Line: 731, + Op: ir.FilterVarConstOp, + Src: "m[\"f\"].Const", + Value: "f", + }}, }, }, - ir.Rule{ - Line: 735, - SyntaxPatterns: []ir.PatternString{ - ir.PatternString{Line: 735, Value: "fmt.Errorf($f($*args))"}, - }, + { + Line: 735, + SyntaxPatterns: []ir.PatternString{{Line: 735, Value: "fmt.Errorf($f($*args))"}}, ReportTemplate: "use errors.New($f($*args)) or fmt.Errorf(\"%s\", $f($*args)) instead", SuggestTemplate: "errors.New($f($*args))", }, }, }, + { + Line: 744, + Name: "stringsCompare", + MatcherName: "m", + DocTags: []string{"style", "experimental"}, + DocSummary: "Detects strings.Compare usage", + DocBefore: "strings.Compare(x, y)", + DocAfter: "x < y", + Rules: []ir.Rule{ + { + Line: 745, + SyntaxPatterns: []ir.PatternString{{Line: 745, Value: "strings.Compare($s1, $s2) == 0"}}, + ReportTemplate: "suggestion: $s1 == $s2", + SuggestTemplate: "$s1 == $s2", + }, + { + Line: 748, + SyntaxPatterns: []ir.PatternString{ + {Line: 748, Value: "strings.Compare($s1, $s2) == -1"}, + {Line: 749, Value: "strings.Compare($s1, $s2) < 0"}, + }, + ReportTemplate: "suggestion: $s1 < $s2", + SuggestTemplate: "$s1 < $s2", + }, + { + Line: 752, + SyntaxPatterns: []ir.PatternString{ + {Line: 752, Value: "strings.Compare($s1, $s2) == 1"}, + {Line: 753, Value: "strings.Compare($s1, $s2) > 0"}, + }, + ReportTemplate: "suggestion: $s1 > $s2", + SuggestTemplate: "$s1 > $s2", + }, + }, + }, }, } diff --git a/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go b/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go new file mode 100644 index 000000000..5ec2881b4 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/todoCommentWithoutDetail_checker.go @@ -0,0 +1,50 @@ +package checkers + +import ( + "go/ast" + "regexp" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "todoCommentWithoutDetail" + info.Tags = []string{"style", "opinionated", "experimental"} + info.Summary = "Detects TODO comments without detail/assignee" + info.Before = ` +// TODO +fiiWithCtx(nil, a, b) +` + info.After = ` +// TODO(admin): pass context.TODO() instead of nil +fiiWithCtx(nil, a, b) +` + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + visitor := &todoCommentWithoutCodeChecker{ + ctx: ctx, + regex: regexp.MustCompile(`^(//|/\*)?\s*(TODO|FIX|FIXME|BUG)\s*(\*/)?$`), + } + return astwalk.WalkerForComment(visitor), nil + }) +} + +type todoCommentWithoutCodeChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + regex *regexp.Regexp +} + +func (c *todoCommentWithoutCodeChecker) VisitComment(cg *ast.CommentGroup) { + for _, comment := range cg.List { + if c.regex.MatchString(comment.Text) { + c.warn(cg) + break + } + } +} + +func (c *todoCommentWithoutCodeChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "may want to add detail/assignee to this TODO/FIXME/BUG comment") +} diff --git a/vendor/github.com/golangci/go-misc/deadcode/deadcode.go b/vendor/github.com/golangci/go-misc/deadcode/deadcode.go index 2e7cfc962..c154a576b 100644 --- a/vendor/github.com/golangci/go-misc/deadcode/deadcode.go +++ b/vendor/github.com/golangci/go-misc/deadcode/deadcode.go @@ -92,7 +92,10 @@ func (ctx *Context) Process() []types.Object { } func isTestFuncByName(name string) bool { - return strings.HasPrefix(name, "Test") || strings.HasPrefix(name, "Benchmark") || strings.HasPrefix(name, "Example") + return strings.HasPrefix(name, "Test") || + strings.HasPrefix(name, "Benchmark") || + strings.HasPrefix(name, "Fuzz") || + strings.HasPrefix(name, "Example") } func (ctx *Context) doPackage(prog *loader.Program, pkg *loader.PackageInfo) []types.Object { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go index 677018a6c..a6ed4c84f 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go @@ -39,7 +39,7 @@ func (e *Executor) initHelp() { func printLinterConfigs(lcs []*linter.Config) { sort.Slice(lcs, func(i, j int) bool { - return strings.Compare(lcs[i].Name(), lcs[j].Name()) < 0 + return lcs[i].Name() < lcs[j].Name() }) for _, lc := range lcs { altNamesStr := "" diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go index 3f984ce37..3d4795178 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go @@ -123,6 +123,7 @@ type LintersSettings struct { ErrorLint ErrorLintSettings Exhaustive ExhaustiveSettings ExhaustiveStruct ExhaustiveStructSettings + Exhaustruct ExhaustructSettings Forbidigo ForbidigoSettings Funlen FunlenSettings Gci GciSettings @@ -255,6 +256,11 @@ type ExhaustiveStructSettings struct { StructPatterns []string `mapstructure:"struct-patterns"` } +type ExhaustructSettings struct { + Include []string `mapstructure:"include"` + Exclude []string `mapstructure:"exclude"` +} + type ForbidigoSettings struct { Forbid []string `mapstructure:"forbid"` ExcludeGodocExamples bool `mapstructure:"exclude-godoc-examples"` @@ -544,21 +550,16 @@ type TestpackageSettings struct { } type ThelperSettings struct { - Test struct { - First bool `mapstructure:"first"` - Name bool `mapstructure:"name"` - Begin bool `mapstructure:"begin"` - } `mapstructure:"test"` - Benchmark struct { - First bool `mapstructure:"first"` - Name bool `mapstructure:"name"` - Begin bool `mapstructure:"begin"` - } `mapstructure:"benchmark"` - TB struct { - First bool `mapstructure:"first"` - Name bool `mapstructure:"name"` - Begin bool `mapstructure:"begin"` - } `mapstructure:"tb"` + Test ThelperOptions `mapstructure:"test"` + Fuzz ThelperOptions `mapstructure:"fuzz"` + Benchmark ThelperOptions `mapstructure:"benchmark"` + TB ThelperOptions `mapstructure:"tb"` +} + +type ThelperOptions struct { + First *bool `mapstructure:"first"` + Name *bool `mapstructure:"name"` + Begin *bool `mapstructure:"begin"` } type TenvSettings struct { @@ -579,6 +580,7 @@ type VarnamelenSettings struct { MinNameLength int `mapstructure:"min-name-length"` CheckReceiver bool `mapstructure:"check-receiver"` CheckReturn bool `mapstructure:"check-return"` + CheckTypeParam bool `mapstructure:"check-type-param"` IgnoreNames []string `mapstructure:"ignore-names"` IgnoreTypeAssertOk bool `mapstructure:"ignore-type-assert-ok"` IgnoreMapIndexOk bool `mapstructure:"ignore-map-index-ok"` @@ -592,9 +594,11 @@ type WhitespaceSettings struct { } type WrapcheckSettings struct { - IgnoreSigs []string `mapstructure:"ignoreSigs"` - IgnoreSigRegexps []string `mapstructure:"ignoreSigRegexps"` - IgnorePackageGlobs []string `mapstructure:"ignorePackageGlobs"` + // TODO(ldez): v2 the options must be renamed to use hyphen. + IgnoreSigs []string `mapstructure:"ignoreSigs"` + IgnoreSigRegexps []string `mapstructure:"ignoreSigRegexps"` + IgnorePackageGlobs []string `mapstructure:"ignorePackageGlobs"` + IgnoreInterfaceRegexps []string `mapstructure:"ignoreInterfaceRegexps"` } type WSLSettings struct { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go new file mode 100644 index 000000000..a4c3913f7 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/commons.go @@ -0,0 +1,6 @@ +package golinters + +import "github.com/golangci/golangci-lint/pkg/logutils" + +// linterLogger must be use only when the context logger is not available. +var linterLogger = logutils.NewStderrLog("linter") diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go index 6dc2b2004..6cc2208a3 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errchkjson.go @@ -25,7 +25,7 @@ func NewErrChkJSONFuncName(cfg *config.ErrChkJSONSettings) *goanalysis.Linter { return goanalysis.NewLinter( "errchkjson", "Checks types passed to the json encoding functions. "+ - "Reports unsupported types and optionally reports occations, "+ + "Reports unsupported types and optionally reports occasions, "+ "where the check for the returned error can be omitted.", []*analysis.Analyzer{a}, cfgMap, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go new file mode 100644 index 000000000..9911d315e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/lufeee/execinquery" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewExecInQuery() *goanalysis.Linter { + a := execinquery.Analyzer + + return goanalysis.NewLinter( + a.Name, + a.Doc, + []*analysis.Analyzer{a}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go new file mode 100644 index 000000000..8c843289d --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustruct.go @@ -0,0 +1,26 @@ +package golinters + +import ( + "github.com/GaijinEntertainment/go-exhaustruct/v2/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewExhaustruct(settings *config.ExhaustructSettings) *goanalysis.Linter { + var include, exclude []string + + if settings != nil { + include = settings.Include + exclude = settings.Exclude + } + + a, err := analyzer.NewAnalyzer(include, exclude) + if err != nil { + linterLogger.Fatalf("exhaustruct configuration: %v", err) + } + + return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil). + WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go index bd75de3e6..284215dfc 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go @@ -19,7 +19,7 @@ func MakeFakeLoaderProgram(pass *analysis.Pass) *loader.Program { { Pkg: pass.Pkg, Importable: true, // not used - TransitivelyErrorFree: true, // TODO + TransitivelyErrorFree: true, // TODO ??? Files: pass.Files, Errors: nil, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go index cd5b2a43e..09eb9b5b2 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go @@ -37,7 +37,7 @@ func NewGodot() *goanalysis.Linter { // Convert deprecated setting // todo(butuzov): remove on v2 release - if cfg.CheckAll { // nolint:staticcheck + if cfg.CheckAll { // nolint:staticcheck // Keep for retro-compatibility. settings.Scope = godot.AllScope } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go new file mode 100644 index 000000000..8c943d83e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nonamedreturns.go @@ -0,0 +1,17 @@ +package golinters + +import ( + "github.com/firefart/nonamedreturns/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewNoNamedReturns() *goanalysis.Linter { + return goanalysis.NewLinter( + "nonamedreturns", + "Reports all named returns", + []*analysis.Analyzer{analyzer.Analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go new file mode 100644 index 000000000..a63b9bb5f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nosprintfhostport.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/stbenjam/no-sprintf-host-port/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewNoSprintfHostPort() *goanalysis.Linter { + a := analyzer.Analyzer + + return goanalysis.NewLinter( + a.Name, + a.Doc, + []*analysis.Analyzer{a}, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go index 64fe20d2f..8fd5e9802 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go @@ -66,7 +66,7 @@ func NewRevive(cfg *config.ReviveSettings) *goanalysis.Linter { revive := lint.New(os.ReadFile, cfg.MaxOpenFiles) - lintingRules, err := reviveConfig.GetLintingRules(conf) + lintingRules, err := reviveConfig.GetLintingRules(conf, []lint.Rule{}) if err != nil { return nil, err } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go index 1d92f2fbf..41edbe761 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go @@ -13,43 +13,44 @@ import ( func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter { a := analyzer.NewAnalyzer() - cfgMap := map[string]map[string]interface{}{} - if cfg != nil { - var opts []string + opts := map[string]struct{}{ + "t_name": {}, + "t_begin": {}, + "t_first": {}, - if cfg.Test.Name { - opts = append(opts, "t_name") - } - if cfg.Test.Begin { - opts = append(opts, "t_begin") - } - if cfg.Test.First { - opts = append(opts, "t_first") - } + "f_name": {}, + "f_begin": {}, + "f_first": {}, - if cfg.Benchmark.Name { - opts = append(opts, "b_name") - } - if cfg.Benchmark.Begin { - opts = append(opts, "b_begin") - } - if cfg.Benchmark.First { - opts = append(opts, "b_first") - } + "b_name": {}, + "b_begin": {}, + "b_first": {}, - if cfg.TB.Name { - opts = append(opts, "tb_name") - } - if cfg.TB.Begin { - opts = append(opts, "tb_begin") - } - if cfg.TB.First { - opts = append(opts, "tb_first") - } + "tb_name": {}, + "tb_begin": {}, + "tb_first": {}, + } - cfgMap[a.Name] = map[string]interface{}{ - "checks": strings.Join(opts, ","), - } + if cfg != nil { + applyTHelperOptions(cfg.Test, "t_", opts) + applyTHelperOptions(cfg.Fuzz, "f_", opts) + applyTHelperOptions(cfg.Benchmark, "b_", opts) + applyTHelperOptions(cfg.TB, "tb_", opts) + } + + if len(opts) == 0 { + linterLogger.Fatalf("thelper: at least one option must be enabled") + } + + var args []string + for k := range opts { + args = append(args, k) + } + + cfgMap := map[string]map[string]interface{}{ + a.Name: { + "checks": strings.Join(args, ","), + }, } return goanalysis.NewLinter( @@ -59,3 +60,23 @@ func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter { cfgMap, ).WithLoadMode(goanalysis.LoadModeTypesInfo) } + +func applyTHelperOptions(o config.ThelperOptions, prefix string, opts map[string]struct{}) { + if o.Name != nil { + if !*o.Name { + delete(opts, prefix+"name") + } + } + + if o.Begin != nil { + if !*o.Begin { + delete(opts, prefix+"begin") + } + } + + if o.First != nil { + if !*o.First { + delete(opts, prefix+"first") + } + } +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go index cfdf1f2ca..13e05a508 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go @@ -35,8 +35,27 @@ func NewUnused(settings *config.StaticCheckSettings) *goanalysis.Linter { sr := unused.Serialize(pass, res.(unused.Result), pass.Fset) + used := make(map[string]bool) + for _, obj := range sr.Used { + used[fmt.Sprintf("%s %d %s", obj.Position.Filename, obj.Position.Line, obj.Name)] = true + } + var issues []goanalysis.Issue + // Inspired by https://github.com/dominikh/go-tools/blob/d694aadcb1f50c2d8ac0a1dd06217ebb9f654764/lintcmd/lint.go#L177-L197 for _, object := range sr.Unused { + if object.Kind == "type param" { + continue + } + + if object.InGenerated { + continue + } + + key := fmt.Sprintf("%s %d %s", object.Position.Filename, object.Position.Line, object.Name) + if used[key] { + continue + } + issue := goanalysis.NewIssue(&result.Issue{ FromLinter: name, Text: fmt.Sprintf("%s %s is unused", object.Kind, object.Name), diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go index 6e3176f17..d86c04b20 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varnamelen.go @@ -12,13 +12,14 @@ import ( ) func NewVarnamelen(settings *config.VarnamelenSettings) *goanalysis.Linter { - a := varnamelen.NewAnalyzer() - + analyzer := varnamelen.NewAnalyzer() cfg := map[string]map[string]interface{}{} + if settings != nil { vnlCfg := map[string]interface{}{ "checkReceiver": strconv.FormatBool(settings.CheckReceiver), "checkReturn": strconv.FormatBool(settings.CheckReturn), + "checkTypeParam": strconv.FormatBool(settings.CheckTypeParam), "ignoreNames": strings.Join(settings.IgnoreNames, ","), "ignoreTypeAssertOk": strconv.FormatBool(settings.IgnoreTypeAssertOk), "ignoreMapIndexOk": strconv.FormatBool(settings.IgnoreMapIndexOk), @@ -33,13 +34,13 @@ func NewVarnamelen(settings *config.VarnamelenSettings) *goanalysis.Linter { vnlCfg["minNameLength"] = strconv.Itoa(settings.MinNameLength) } - cfg[a.Name] = vnlCfg + cfg[analyzer.Name] = vnlCfg } return goanalysis.NewLinter( - a.Name, + analyzer.Name, "checks that the length of a variable's name matches its scope", - []*analysis.Analyzer{a}, + []*analysis.Analyzer{analyzer}, cfg, ).WithLoadMode(goanalysis.LoadModeTypesInfo) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go index c52bcb740..098eb87ba 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go @@ -22,6 +22,9 @@ func NewWrapcheck(settings *config.WrapcheckSettings) *goanalysis.Linter { if len(settings.IgnorePackageGlobs) != 0 { cfg.IgnorePackageGlobs = settings.IgnorePackageGlobs } + if len(settings.IgnoreInterfaceRegexps) != 0 { + cfg.IgnoreInterfaceRegexps = settings.IgnoreInterfaceRegexps + } } a := wrapcheck.NewAnalyzer(cfg) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go index 9814aa857..907c1c4db 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go @@ -3,7 +3,6 @@ package lintersdb import ( "os" "sort" - "strings" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" @@ -123,7 +122,7 @@ func (es EnabledSet) GetOptimizedLinters() ([]*linter.Config, error) { if a.DoesChangeTypes != b.DoesChangeTypes { return b.DoesChangeTypes // move type-changing linters to the end to optimize speed } - return strings.Compare(a.Name(), b.Name()) < 0 + return a.Name() < b.Name() }) return resultLinters, nil @@ -168,7 +167,7 @@ func (es EnabledSet) combineGoAnalysisLinters(linters map[string]*linter.Config) return false } - return strings.Compare(a.Name(), b.Name()) <= 0 + return a.Name() <= b.Name() }) ml := goanalysis.NewMetaLinter(goanalysisLinters) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go index b64ba23e4..9f72e6379 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go @@ -107,6 +107,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { var errorlintCfg *config.ErrorLintSettings var exhaustiveCfg *config.ExhaustiveSettings var exhaustiveStructCfg *config.ExhaustiveStructSettings + var exhaustructCfg *config.ExhaustructSettings var gciCfg *config.GciSettings var goModDirectivesCfg *config.GoModDirectivesSettings var goMndCfg *config.GoMndSettings @@ -140,6 +141,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { errorlintCfg = &m.cfg.LintersSettings.ErrorLint exhaustiveCfg = &m.cfg.LintersSettings.Exhaustive exhaustiveStructCfg = &m.cfg.LintersSettings.ExhaustiveStruct + exhaustructCfg = &m.cfg.LintersSettings.Exhaustruct gciCfg = &m.cfg.LintersSettings.Gci goModDirectivesCfg = &m.cfg.LintersSettings.GoModDirectives goMndCfg = &m.cfg.LintersSettings.Gomnd @@ -266,6 +268,12 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithLoadForGoAnalysis(). WithURL("https://github.com/polyfloyd/go-errorlint"), + linter.NewConfig(golinters.NewExecInQuery()). + WithSince("v1.46.0"). + WithPresets(linter.PresetSQL). + WithLoadForGoAnalysis(). + WithURL("https://github.com/lufeee/execinquery"), + linter.NewConfig(golinters.NewExhaustive(exhaustiveCfg)). WithSince(" v1.28.0"). WithPresets(linter.PresetBugs). @@ -276,7 +284,14 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.32.0"). WithPresets(linter.PresetStyle, linter.PresetTest). WithLoadForGoAnalysis(). - WithURL("https://github.com/mbilski/exhaustivestruct"), + WithURL("https://github.com/mbilski/exhaustivestruct"). + Deprecated("The owner seems to have abandoned the linter.", "v1.46.0", "exhaustruct"), + + linter.NewConfig(golinters.NewExhaustruct(exhaustructCfg)). + WithSince("v1.46.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithLoadForGoAnalysis(). + WithURL("https://github.com/GaijinEntertainment/go-exhaustruct"), linter.NewConfig(golinters.NewExportLoopRef()). WithSince("v1.28.0"). @@ -414,8 +429,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithLoadForGoAnalysis(). WithPresets(linter.PresetStyle). WithAlternativeNames(megacheckName). - WithURL("https://github.com/dominikh/go-tools/tree/master/simple"). - WithNoopFallback(m.cfg), + WithURL("https://github.com/dominikh/go-tools/tree/master/simple"), linter.NewConfig(golinters.NewGovet(govetCfg)). WithSince("v1.0.0"). @@ -522,6 +536,16 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithURL("https://github.com/sonatard/noctx"). WithNoopFallback(m.cfg), + linter.NewConfig(golinters.NewNoNamedReturns()). + WithSince("v1.46.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/firefart/nonamedreturns"), + + linter.NewConfig(golinters.NewNoSprintfHostPort()). + WithSince("v1.46.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/stbenjam/no-sprintf-host-port"), + linter.NewConfig(golinters.NewParallelTest()). WithSince("v1.33.0"). WithPresets(linter.PresetStyle, linter.PresetTest). @@ -573,8 +597,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetMetaLinter). WithAlternativeNames(megacheckName). - WithURL("https://staticcheck.io/"). - WithNoopFallback(m.cfg), + WithURL("https://staticcheck.io/"), linter.NewConfig(golinters.NewStructcheck()). WithSince("v1.0.0"). @@ -587,8 +610,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithSince("v1.20.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetStyle). - WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"). - WithNoopFallback(m.cfg), + WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"), linter.NewConfig(golinters.NewTagliatelle(tagliatelleCfg)). WithSince("v1.40.0"). @@ -645,8 +667,7 @@ func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { WithAlternativeNames(megacheckName). ConsiderSlow(). WithChangeTypes(). - WithURL("https://github.com/dominikh/go-tools/tree/master/unused"). - WithNoopFallback(m.cfg), + WithURL("https://github.com/dominikh/go-tools/tree/master/unused"), linter.NewConfig(golinters.NewVarcheck()). WithSince("v1.0.0"). diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go index b4697ee4c..b87060d6c 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go @@ -38,7 +38,8 @@ func NewStderrLog(name string) *StderrLog { sl.logger.Out = StdErr formatter := &logrus.TextFormatter{ - DisableTimestamp: true, // `INFO[0007] msg` -> `INFO msg` + DisableTimestamp: true, // `INFO[0007] msg` -> `INFO msg` + EnvironmentOverrideColors: true, } if os.Getenv("LOG_TIMESTAMP") == "1" { formatter.DisableTimestamp = false diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go index fd4bfddb2..a5c5d4a9f 100644 --- a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go +++ b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go @@ -26,6 +26,7 @@ Available checks ` + checkTName + ` - check *testing.T param has t name Also available similar checks for benchmark and TB helpers: ` + + checkFBegin + `, ` + checkFFirst + `, ` + checkFName + `,` + checkBBegin + `, ` + checkBFirst + `, ` + checkBName + `,` + checkTBBegin + `, ` + checkTBFirst + `, ` + checkTBName + ` @@ -60,6 +61,7 @@ func (m enabledChecksValue) Set(s string) error { for _, v := range ss { switch v { case checkTBegin, checkTFirst, checkTName, + checkFBegin, checkFFirst, checkFName, checkBBegin, checkBFirst, checkBName, checkTBBegin, checkTBFirst, checkTBName: m[v] = struct{}{} @@ -74,6 +76,9 @@ const ( checkTBegin = "t_begin" checkTFirst = "t_first" checkTName = "t_name" + checkFBegin = "f_begin" + checkFFirst = "f_first" + checkFName = "f_name" checkBBegin = "b_begin" checkBFirst = "b_first" checkBName = "b_name" @@ -94,6 +99,9 @@ func NewAnalyzer() *analysis.Analyzer { checkTBegin: struct{}{}, checkTFirst: struct{}{}, checkTName: struct{}{}, + checkFBegin: struct{}{}, + checkFFirst: struct{}{}, + checkFName: struct{}{}, checkBBegin: struct{}{}, checkBFirst: struct{}{}, checkBName: struct{}{}, @@ -118,13 +126,17 @@ func NewAnalyzer() *analysis.Analyzer { } func (t thelper) run(pass *analysis.Pass) (interface{}, error) { - tCheckOpts, bCheckOpts, tbCheckOpts, ok := t.buildCheckFuncOpts(pass) + tCheckOpts, fCheckOpts, bCheckOpts, tbCheckOpts, ok := t.buildCheckFuncOpts(pass) + if !ok { + return nil, nil + } + + inspect, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) if !ok { return nil, nil } var reports reports - inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) nodeFilter := []ast.Node{ (*ast.FuncDecl)(nil), (*ast.FuncLit)(nil), @@ -144,13 +156,16 @@ func (t thelper) run(pass *analysis.Pass) (interface{}, error) { fd.Body = n.Body fd.Name = n.Name case *ast.CallExpr: - tbRunSubtestExprs := extractSubtestExp(pass, n, tCheckOpts.tbRun, tCheckOpts.tbTestFuncType) - if len(tbRunSubtestExprs) == 0 { - tbRunSubtestExprs = extractSubtestExp(pass, n, bCheckOpts.tbRun, bCheckOpts.tbTestFuncType) + runSubtestExprs := extractSubtestExp(pass, n, tCheckOpts.subRun, tCheckOpts.subTestFuncType) + if len(runSubtestExprs) == 0 { + runSubtestExprs = extractSubtestExp(pass, n, bCheckOpts.subRun, bCheckOpts.subTestFuncType) + } + if len(runSubtestExprs) == 0 { + runSubtestExprs = extractSubtestFuzzExp(pass, n, fCheckOpts.subRun) } - if len(tbRunSubtestExprs) > 0 { - for _, expr := range tbRunSubtestExprs { + if len(runSubtestExprs) > 0 { + for _, expr := range runSubtestExprs { reports.Filter(funcDefPosition(pass, expr)) } } else { @@ -162,6 +177,7 @@ func (t thelper) run(pass *analysis.Pass) (interface{}, error) { } checkFunc(pass, &reports, fd, tCheckOpts) + checkFunc(pass, &reports, fd, fCheckOpts) checkFunc(pass, &reports, fd, bCheckOpts) checkFunc(pass, &reports, fd, tbCheckOpts) }) @@ -172,19 +188,19 @@ func (t thelper) run(pass *analysis.Pass) (interface{}, error) { } type checkFuncOpts struct { - skipPrefix string - varName string - tbHelper types.Object - tbRun types.Object - tbTestFuncType types.Type - tbType types.Type - ctxType types.Type - checkBegin bool - checkFirst bool - checkName bool + skipPrefix string + varName string + fnHelper types.Object + subRun types.Object + subTestFuncType types.Type + hpType types.Type + ctxType types.Type + checkBegin bool + checkFirst bool + checkName bool } -func (t thelper) buildCheckFuncOpts(pass *analysis.Pass) (checkFuncOpts, checkFuncOpts, checkFuncOpts, bool) { +func (t thelper) buildCheckFuncOpts(pass *analysis.Pass) (checkFuncOpts, checkFuncOpts, checkFuncOpts, checkFuncOpts, bool) { var ctxType types.Type ctxObj := analysisutil.ObjectOf(pass, "context", "Context") if ctxObj != nil { @@ -193,20 +209,25 @@ func (t thelper) buildCheckFuncOpts(pass *analysis.Pass) (checkFuncOpts, checkFu tCheckOpts, ok := t.buildTestCheckFuncOpts(pass, ctxType) if !ok { - return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false + return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false + } + + fCheckOpts, ok := t.buildFuzzCheckFuncOpts(pass, ctxType) + if !ok { + return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false } bCheckOpts, ok := t.buildBenchmarkCheckFuncOpts(pass, ctxType) if !ok { - return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false + return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false } tbCheckOpts, ok := t.buildTBCheckFuncOpts(pass, ctxType) if !ok { - return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false + return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false } - return tCheckOpts, bCheckOpts, tbCheckOpts, true + return tCheckOpts, fCheckOpts, bCheckOpts, tbCheckOpts, true } func (t thelper) buildTestCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { @@ -225,19 +246,19 @@ func (t thelper) buildTestCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) return checkFuncOpts{}, false } - tbType := types.NewPointer(tObj.Type()) - tVar := types.NewVar(token.NoPos, nil, "t", tbType) + tType := types.NewPointer(tObj.Type()) + tVar := types.NewVar(token.NoPos, nil, "t", tType) return checkFuncOpts{ - skipPrefix: "Test", - varName: "t", - tbHelper: tHelper, - tbRun: tRun, - tbType: tbType, - tbTestFuncType: types.NewSignature(nil, types.NewTuple(tVar), nil, false), - ctxType: ctxType, - checkBegin: t.enabledChecks.Enabled(checkTBegin), - checkFirst: t.enabledChecks.Enabled(checkTFirst), - checkName: t.enabledChecks.Enabled(checkTName), + skipPrefix: "Test", + varName: "t", + fnHelper: tHelper, + subRun: tRun, + hpType: tType, + subTestFuncType: types.NewSignature(nil, types.NewTuple(tVar), nil, false), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkTBegin), + checkFirst: t.enabledChecks.Enabled(checkTFirst), + checkName: t.enabledChecks.Enabled(checkTName), }, true } @@ -257,19 +278,19 @@ func (t thelper) buildBenchmarkCheckFuncOpts(pass *analysis.Pass, ctxType types. return checkFuncOpts{}, false } - tbType := types.NewPointer(bObj.Type()) - bVar := types.NewVar(token.NoPos, nil, "b", tbType) + bType := types.NewPointer(bObj.Type()) + bVar := types.NewVar(token.NoPos, nil, "b", bType) return checkFuncOpts{ - skipPrefix: "Benchmark", - varName: "b", - tbHelper: bHelper, - tbRun: bRun, - tbType: types.NewPointer(bObj.Type()), - tbTestFuncType: types.NewSignature(nil, types.NewTuple(bVar), nil, false), - ctxType: ctxType, - checkBegin: t.enabledChecks.Enabled(checkBBegin), - checkFirst: t.enabledChecks.Enabled(checkBFirst), - checkName: t.enabledChecks.Enabled(checkBName), + skipPrefix: "Benchmark", + varName: "b", + fnHelper: bHelper, + subRun: bRun, + hpType: types.NewPointer(bObj.Type()), + subTestFuncType: types.NewSignature(nil, types.NewTuple(bVar), nil, false), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkBBegin), + checkFirst: t.enabledChecks.Enabled(checkBFirst), + checkName: t.enabledChecks.Enabled(checkBName), }, true } @@ -287,8 +308,8 @@ func (t thelper) buildTBCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) ( return checkFuncOpts{ skipPrefix: "", varName: "tb", - tbHelper: tbHelper, - tbType: tbObj.Type(), + fnHelper: tbHelper, + hpType: tbObj.Type(), ctxType: ctxType, checkBegin: t.enabledChecks.Enabled(checkTBBegin), checkFirst: t.enabledChecks.Enabled(checkTBFirst), @@ -304,11 +325,15 @@ type funcDecl struct { } func checkFunc(pass *analysis.Pass, reports *reports, funcDecl funcDecl, opts checkFuncOpts) { + if !opts.checkFirst && !opts.checkBegin && !opts.checkName { + return + } + if opts.skipPrefix != "" && strings.HasPrefix(funcDecl.Name.Name, opts.skipPrefix) { return } - p, pos, ok := searchFuncParam(pass, funcDecl, opts.tbType) + p, pos, ok := searchFuncParam(pass, funcDecl, opts.hpType) if !ok { return } @@ -322,7 +347,7 @@ func checkFunc(pass *analysis.Pass, reports *reports, funcDecl funcDecl, opts ch } if !checkFirstPassed { - reports.Reportf(funcDecl.Pos, "parameter %s should be the first or after context.Context", opts.tbType) + reports.Reportf(funcDecl.Pos, "parameter %s should be the first or after context.Context", opts.hpType) } } } @@ -330,12 +355,12 @@ func checkFunc(pass *analysis.Pass, reports *reports, funcDecl funcDecl, opts ch if len(p.Names) > 0 && p.Names[0].Name != "_" { if opts.checkName { if p.Names[0].Name != opts.varName { - reports.Reportf(funcDecl.Pos, "parameter %s should have name %s", opts.tbType, opts.varName) + reports.Reportf(funcDecl.Pos, "parameter %s should have name %s", opts.hpType, opts.varName) } } if opts.checkBegin { - if len(funcDecl.Body.List) == 0 || !isTHelperCall(pass, funcDecl.Body.List[0], opts.tbHelper) { + if len(funcDecl.Body.List) == 0 || !isTHelperCall(pass, funcDecl.Body.List[0], opts.fnHelper) { reports.Reportf(funcDecl.Pos, "test helper function should start from %s.Helper()", opts.varName) } } @@ -398,6 +423,27 @@ func extractSubtestExp( return []ast.Expr{e.Args[1]} } +// extractSubtestFuzzExp analyzes that call expresion 'e' is f.Fuzz +// and returns subtest function. +func extractSubtestFuzzExp( + pass *analysis.Pass, e *ast.CallExpr, fuzzRun types.Object, +) []ast.Expr { + selExpr, ok := e.Fun.(*ast.SelectorExpr) + if !ok { + return nil + } + + if !isSelectorCall(pass, selExpr, fuzzRun) { + return nil + } + + if len(e.Args) != 1 { + return nil + } + + return []ast.Expr{e.Args[0]} +} + // unwrapTestingFunctionConstruction checks that expresion is build testing functions // and returns the result of building. func unwrapTestingFunctionBuilding(pass *analysis.Pass, expr ast.Expr, testFuncType types.Type) []ast.Expr { diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer_go117.go b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer_go117.go new file mode 100644 index 000000000..ea728a402 --- /dev/null +++ b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer_go117.go @@ -0,0 +1,14 @@ +//go:build !go1.18 +// +build !go1.18 + +package analyzer + +import ( + "go/types" + + "golang.org/x/tools/go/analysis" +) + +func (t thelper) buildFuzzCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { + return checkFuncOpts{}, true +} diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer_go118.go b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer_go118.go new file mode 100644 index 000000000..14ed5a0eb --- /dev/null +++ b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer_go118.go @@ -0,0 +1,40 @@ +//go:build go1.18 +// +build go1.18 + +package analyzer + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" +) + +func (t thelper) buildFuzzCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { + fObj := analysisutil.ObjectOf(pass, "testing", "F") + if fObj == nil { + return checkFuncOpts{}, false + } + + fHelper, _, _ := types.LookupFieldOrMethod(fObj.Type(), true, fObj.Pkg(), "Helper") + if fHelper == nil { + return checkFuncOpts{}, false + } + + tFuzz, _, _ := types.LookupFieldOrMethod(fObj.Type(), true, fObj.Pkg(), "Fuzz") + if tFuzz == nil { + return checkFuncOpts{}, false + } + + return checkFuncOpts{ + skipPrefix: "Fuzz", + varName: "f", + fnHelper: fHelper, + subRun: tFuzz, + hpType: types.NewPointer(fObj.Type()), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkFBegin), + checkFirst: t.enabledChecks.Enabled(checkFFirst), + checkName: t.enabledChecks.Enabled(checkFName), + }, true +} diff --git a/vendor/github.com/ldez/gomoddirectives/.golangci.yml b/vendor/github.com/ldez/gomoddirectives/.golangci.yml index cc0a1fcae..a2483e95f 100644 --- a/vendor/github.com/ldez/gomoddirectives/.golangci.yml +++ b/vendor/github.com/ldez/gomoddirectives/.golangci.yml @@ -72,6 +72,7 @@ linters: - wrapcheck - exhaustive - exhaustivestruct + - varnamelen issues: exclude-use-default: false diff --git a/vendor/github.com/ldez/gomoddirectives/module.go b/vendor/github.com/ldez/gomoddirectives/module.go index 379ae07fa..907be244f 100644 --- a/vendor/github.com/ldez/gomoddirectives/module.go +++ b/vendor/github.com/ldez/gomoddirectives/module.go @@ -1,10 +1,11 @@ package gomoddirectives import ( + "bytes" "encoding/json" "errors" "fmt" - "io/ioutil" + "os" "os/exec" "golang.org/x/mod/modfile" @@ -29,7 +30,7 @@ func GetModuleFile() (*modfile.File, error) { } var v modInfo - err = json.Unmarshal(raw, &v) + err = json.NewDecoder(bytes.NewBuffer(raw)).Decode(&v) if err != nil { return nil, fmt.Errorf("unmarshaling error: %w: %s", err, string(raw)) } @@ -38,7 +39,7 @@ func GetModuleFile() (*modfile.File, error) { return nil, errors.New("working directory is not part of a module") } - raw, err = ioutil.ReadFile(v.GoMod) + raw, err = os.ReadFile(v.GoMod) if err != nil { return nil, fmt.Errorf("reading go.mod file: %w", err) } diff --git a/vendor/github.com/lufeee/execinquery/.gitignore b/vendor/github.com/lufeee/execinquery/.gitignore new file mode 100644 index 000000000..00e1abc31 --- /dev/null +++ b/vendor/github.com/lufeee/execinquery/.gitignore @@ -0,0 +1 @@ +execinquery diff --git a/vendor/github.com/lufeee/execinquery/LICENSE b/vendor/github.com/lufeee/execinquery/LICENSE new file mode 100644 index 000000000..b6ab14aec --- /dev/null +++ b/vendor/github.com/lufeee/execinquery/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 lufe + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/lufeee/execinquery/README.md b/vendor/github.com/lufeee/execinquery/README.md new file mode 100644 index 000000000..38fa7c8b9 --- /dev/null +++ b/vendor/github.com/lufeee/execinquery/README.md @@ -0,0 +1,76 @@ +# execinquery - a simple query string checker in Query function +[![Go Matrix](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml/badge.svg?branch=main)](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml) +[![Go lint](https://github.com/lufeee/execinquery/actions/workflows/lint.yml/badge.svg?branch=main)](https://github.com/lufeee/execinquery/actions/workflows/lint.yml) +[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) +## About + +execinquery is a linter about query string checker in Query function which reads your Go src files and +warnings it finds. + +## Installation + +```sh +go install github.com/lufeee/execinquery/cmd/execinquery +``` + +## Usage +```go +package main + +import ( + "database/sql" + "log" +) + +func main() { + db, err := sql.Open("mysql", "test:test@tcp(test:3306)/test") + if err != nil { + log.Fatal("Database Connect Error: ", err) + } + defer db.Close() + + test := "a" + _, err = db.Query("Update * FROM hoge where id = ?", test) + if err != nil { + log.Fatal("Query Error: ", err) + } + +} +``` + +```console +go vet -vettool=$(which execinquery) ./... + +# command-line-arguments +./a.go:16:11: Use Exec instead of Query to execute `UPDATE` query +``` + +## CI + +### CircleCI + +```yaml +- run: + name: install execinquery + command: go install github.com/lufeee/execinquery + +- run: + name: run execinquery + command: go vet -vettool=`which execinquery` ./... +``` + +### GitHub Actions + +```yaml +- name: install execinquery + run: go install github.com/lufeee/execinquery + +- name: run execinquery + run: go vet -vettool=`which execinquery` ./... +``` + +### License + +MIT license. + +
diff --git a/vendor/github.com/lufeee/execinquery/execinquery.go b/vendor/github.com/lufeee/execinquery/execinquery.go new file mode 100644 index 000000000..c37dc1701 --- /dev/null +++ b/vendor/github.com/lufeee/execinquery/execinquery.go @@ -0,0 +1,135 @@ +package execinquery + +import ( + "go/ast" + "regexp" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const doc = "execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds" + +// Analyzer is checking database/sql pkg Query's function +var Analyzer = &analysis.Analyzer{ + Name: "execinquery", + Doc: doc, + Run: newLinter().run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, +} + +type linter struct { + commentExp *regexp.Regexp + multilineCommentExp *regexp.Regexp +} + +func newLinter() *linter { + return &linter{ + commentExp: regexp.MustCompile(`--[^\n]*\n`), + multilineCommentExp: regexp.MustCompile(`(?s)/\*.*?\*/`), + } +} + +func (l linter) run(pass *analysis.Pass) (interface{}, error) { + result := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + + result.Preorder(nodeFilter, func(n ast.Node) { + switch n := n.(type) { + case *ast.CallExpr: + selector, ok := n.Fun.(*ast.SelectorExpr) + if !ok { + return + } + + if pass.TypesInfo == nil || pass.TypesInfo.Uses[selector.Sel] == nil || pass.TypesInfo.Uses[selector.Sel].Pkg() == nil { + return + } + + if "database/sql" != pass.TypesInfo.Uses[selector.Sel].Pkg().Path() { + return + } + + if !strings.Contains(selector.Sel.Name, "Query") { + return + } + + replacement := "Exec" + var i int // the index of the query argument + if strings.Contains(selector.Sel.Name, "Context") { + replacement += "Context" + i = 1 + } + + if len(n.Args) <= i { + return + } + + query := l.getQueryString(n.Args[i]) + if query == "" { + return + } + + query = strings.TrimSpace(l.cleanValue(query)) + parts := strings.SplitN(query, " ", 2) + cmd := strings.ToUpper(parts[0]) + + if strings.HasPrefix(cmd, "SELECT") { + return + } + + pass.Reportf(n.Fun.Pos(), "Use %s instead of %s to execute `%s` query", replacement, selector.Sel.Name, cmd) + } + }) + + return nil, nil +} + +func (l linter) cleanValue(s string) string { + v := strings.NewReplacer(`"`, "", "`", "").Replace(s) + + v = l.multilineCommentExp.ReplaceAllString(v, "") + + return l.commentExp.ReplaceAllString(v, "") +} + +func (l linter) getQueryString(exp interface{}) string { + switch e := exp.(type) { + case *ast.AssignStmt: + var v string + for _, stmt := range e.Rhs { + v += l.cleanValue(l.getQueryString(stmt)) + } + return v + + case *ast.BasicLit: + return e.Value + + case *ast.ValueSpec: + var v string + for _, value := range e.Values { + v += l.cleanValue(l.getQueryString(value)) + } + return v + + case *ast.Ident: + if e.Obj == nil { + return "" + } + return l.getQueryString(e.Obj.Decl) + + case *ast.BinaryExpr: + v := l.cleanValue(l.getQueryString(e.X)) + v += l.cleanValue(l.getQueryString(e.Y)) + return v + } + + return "" +} diff --git a/vendor/github.com/magiconair/properties/lex.go b/vendor/github.com/magiconair/properties/lex.go index 367166d58..e1e9dd7b1 100644 --- a/vendor/github.com/magiconair/properties/lex.go +++ b/vendor/github.com/magiconair/properties/lex.go @@ -128,18 +128,6 @@ func (l *lexer) acceptRun(valid string) { l.backup() } -// acceptRunUntil consumes a run of runes up to a terminator. -func (l *lexer) acceptRunUntil(term rune) { - for term != l.next() { - } - l.backup() -} - -// hasText returns true if the current parsed text is not empty. -func (l *lexer) isNotEmpty() bool { - return l.pos > l.start -} - // lineNumber reports which line we're on, based on the position of // the previous item returned by nextItem. Doing it this way // means we don't have to worry about peek double counting. diff --git a/vendor/github.com/magiconair/properties/parser.go b/vendor/github.com/magiconair/properties/parser.go index cdc4a8034..430e4fcd2 100644 --- a/vendor/github.com/magiconair/properties/parser.go +++ b/vendor/github.com/magiconair/properties/parser.go @@ -59,14 +59,6 @@ func (p *parser) errorf(format string, args ...interface{}) { panic(fmt.Errorf(format, args...)) } -func (p *parser) expect(expected itemType) (token item) { - token = p.lex.nextItem() - if token.typ != expected { - p.unexpected(token) - } - return token -} - func (p *parser) expectOneOf(expected ...itemType) (token item) { token = p.lex.nextItem() for _, v := range expected { @@ -91,5 +83,4 @@ func (p *parser) recover(errp *error) { } *errp = e.(error) } - return } diff --git a/vendor/github.com/magiconair/properties/properties.go b/vendor/github.com/magiconair/properties/properties.go index 1529e7223..62ae2d67a 100644 --- a/vendor/github.com/magiconair/properties/properties.go +++ b/vendor/github.com/magiconair/properties/properties.go @@ -786,7 +786,6 @@ func expand(s string, keys []string, prefix, postfix string, values map[string]s } s = s[:start] + new_val + s[end+1:] } - return s, nil } // encode encodes a UTF-8 string to ISO-8859-1 and escapes some characters. diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go index 6c21ea322..388517a30 100644 --- a/vendor/github.com/mgechev/revive/config/config.go +++ b/vendor/github.com/mgechev/revive/config/config.go @@ -84,6 +84,7 @@ var allRules = append([]lint.Rule{ &rule.TimeEqualRule{}, &rule.BannedCharsRule{}, &rule.OptimizeOperandsOrderRule{}, + &rule.UseAnyRule{}, }, defaultRules...) var allFormatters = []lint.Formatter{ @@ -107,11 +108,17 @@ func getFormatters() map[string]lint.Formatter { } // GetLintingRules yields the linting rules that must be applied by the linter -func GetLintingRules(config *lint.Config) ([]lint.Rule, error) { +func GetLintingRules(config *lint.Config, extraRules []lint.Rule) ([]lint.Rule, error) { rulesMap := map[string]lint.Rule{} for _, r := range allRules { rulesMap[r.Name()] = r } + for _, r := range extraRules { + if _, ok := rulesMap[r.Name()]; ok { + continue + } + rulesMap[r.Name()] = r + } var lintingRules []lint.Rule for name, ruleConfig := range config.Rules { @@ -180,7 +187,7 @@ const defaultConfidence = 0.8 // GetConfig yields the configuration func GetConfig(configPath string) (*lint.Config, error) { - var config = &lint.Config{} + config := &lint.Config{} switch { case configPath != "": config.Confidence = defaultConfidence diff --git a/vendor/github.com/mgechev/revive/formatter/checkstyle.go b/vendor/github.com/mgechev/revive/formatter/checkstyle.go index bd20da888..e7a451a5f 100644 --- a/vendor/github.com/mgechev/revive/formatter/checkstyle.go +++ b/vendor/github.com/mgechev/revive/formatter/checkstyle.go @@ -3,8 +3,9 @@ package formatter import ( "bytes" "encoding/xml" - "github.com/mgechev/revive/lint" plainTemplate "text/template" + + "github.com/mgechev/revive/lint" ) // Checkstyle is an implementation of the Formatter interface @@ -29,7 +30,7 @@ type issue struct { // Format formats the failures gotten from the lint. func (f *Checkstyle) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { - var issues = map[string][]issue{} + issues := map[string][]issue{} for failure := range failures { buf := new(bytes.Buffer) xml.Escape(buf, []byte(failure.Failure)) diff --git a/vendor/github.com/mgechev/revive/formatter/stylish.go b/vendor/github.com/mgechev/revive/formatter/stylish.go index cd81fdae7..ddafa5017 100644 --- a/vendor/github.com/mgechev/revive/formatter/stylish.go +++ b/vendor/github.com/mgechev/revive/formatter/stylish.go @@ -34,8 +34,8 @@ func formatFailure(failure lint.Failure, severity lint.Severity) []string { // Format formats the failures gotten from the lint. func (f *Stylish) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { var result [][]string - var totalErrors = 0 - var total = 0 + totalErrors := 0 + total := 0 for f := range failures { total++ diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go index 0de921aeb..f859da6fd 100644 --- a/vendor/github.com/mgechev/revive/lint/file.go +++ b/vendor/github.com/mgechev/revive/lint/file.go @@ -126,11 +126,13 @@ type enableDisableConfig struct { position int } -const directiveRE = `^//[\s]*revive:(enable|disable)(?:-(line|next-line))?(?::([^\s]+))?[\s]*(?: (.+))?$` -const directivePos = 1 -const modifierPos = 2 -const rulesPos = 3 -const reasonPos = 4 +const ( + directiveRE = `^//[\s]*revive:(enable|disable)(?:-(line|next-line))?(?::([^\s]+))?[\s]*(?: (.+))?$` + directivePos = 1 + modifierPos = 2 + rulesPos = 3 + reasonPos = 4 +) var re = regexp.MustCompile(directiveRE) diff --git a/vendor/github.com/mgechev/revive/lint/linter.go b/vendor/github.com/mgechev/revive/lint/linter.go index 9a194f088..2743b7d9c 100644 --- a/vendor/github.com/mgechev/revive/lint/linter.go +++ b/vendor/github.com/mgechev/revive/lint/linter.go @@ -159,5 +159,6 @@ func getPositionInvalidFile(filename, s string) FailurePosition { Filename: filename, Line: line, Column: column, - }} + }, + } } diff --git a/vendor/github.com/mgechev/revive/lint/rule.go b/vendor/github.com/mgechev/revive/lint/rule.go index c10b9e86b..ccc66691c 100644 --- a/vendor/github.com/mgechev/revive/lint/rule.go +++ b/vendor/github.com/mgechev/revive/lint/rule.go @@ -23,7 +23,7 @@ type AbstractRule struct { } // ToFailurePosition returns the failure position. -func ToFailurePosition(start token.Pos, end token.Pos, file *File) FailurePosition { +func ToFailurePosition(start, end token.Pos, file *File) FailurePosition { return FailurePosition{ Start: file.ToPosition(start), End: file.ToPosition(end), diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go index 69bf92069..530e2e9c1 100644 --- a/vendor/github.com/mgechev/revive/rule/add-constant.go +++ b/vendor/github.com/mgechev/revive/rule/add-constant.go @@ -22,7 +22,7 @@ func newWhiteList() whiteList { return map[string]map[string]bool{kindINT: {}, kindFLOAT: {}, kindSTRING: {}} } -func (wl whiteList) add(kind string, list string) { +func (wl whiteList) add(kind, list string) { elems := strings.Split(list, ",") for _, e := range elems { wl[kind][e] = true @@ -120,7 +120,6 @@ func (w lintAddConstantRule) Visit(node ast.Node) ast.Visitor { } return w - } func (w lintAddConstantRule) checkStrLit(n *ast.BasicLit) { diff --git a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go index 0a4e696c6..34710963e 100644 --- a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go +++ b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go @@ -63,7 +63,7 @@ func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor { return w } -func (w lintBoolLiteral) addFailure(node ast.Node, msg string, cat string) { +func (w lintBoolLiteral) addFailure(node ast.Node, msg, cat string) { w.onFailure(lint.Failure{ Confidence: 1, Node: node, diff --git a/vendor/github.com/mgechev/revive/rule/call-to-gc.go b/vendor/github.com/mgechev/revive/rule/call-to-gc.go index e05fa6924..e3cb3af81 100644 --- a/vendor/github.com/mgechev/revive/rule/call-to-gc.go +++ b/vendor/github.com/mgechev/revive/rule/call-to-gc.go @@ -16,7 +16,7 @@ func (r *CallToGCRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { failures = append(failures, failure) } - var gcTriggeringFunctions = map[string]map[string]bool{ + gcTriggeringFunctions := map[string]map[string]bool{ "runtime": {"GC": true}, } diff --git a/vendor/github.com/mgechev/revive/rule/confusing-naming.go b/vendor/github.com/mgechev/revive/rule/confusing-naming.go index 143bb18c3..9cd3a18a0 100644 --- a/vendor/github.com/mgechev/revive/rule/confusing-naming.go +++ b/vendor/github.com/mgechev/revive/rule/confusing-naming.go @@ -3,7 +3,6 @@ package rule import ( "fmt" "go/ast" - "strings" "sync" @@ -71,7 +70,7 @@ func (r *ConfusingNamingRule) Name() string { return "confusing-naming" } -//checkMethodName checks if a given method/function name is similar (just case differences) to other method/function of the same struct/file. +// checkMethodName checks if a given method/function name is similar (just case differences) to other method/function of the same struct/file. func checkMethodName(holder string, id *ast.Ident, w *lintConfusingNames) { if id.Name == "init" && holder == defaultStructName { // ignore init functions @@ -128,7 +127,7 @@ type lintConfusingNames struct { const defaultStructName = "_" // used to map functions -//getStructName of a function receiver. Defaults to defaultStructName +// getStructName of a function receiver. Defaults to defaultStructName func getStructName(r *ast.FieldList) string { result := defaultStructName diff --git a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go index 6a9156111..ba15412e0 100644 --- a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go +++ b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go @@ -1,9 +1,10 @@ package rule import ( - "github.com/mgechev/revive/lint" "go/ast" "go/token" + + "github.com/mgechev/revive/lint" ) // ConstantLogicalExprRule warns on constant logical expressions. @@ -44,11 +45,13 @@ func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor { return w } - if n.Op == token.EQL { + // Handles cases like: a <= a, a == a, a >= a + if w.isEqualityOperator(n.Op) { w.newFailure(n, "expression always evaluates to true") return w } + // Handles cases like: a < a, a > a, a != a if w.isInequalityOperator(n.Op) { w.newFailure(n, "expression always evaluates to false") return w @@ -69,9 +72,18 @@ func (w *lintConstantLogicalExpr) isOperatorWithLogicalResult(t token.Token) boo return false } +func (w *lintConstantLogicalExpr) isEqualityOperator(t token.Token) bool { + switch t { + case token.EQL, token.LEQ, token.GEQ: + return true + } + + return false +} + func (w *lintConstantLogicalExpr) isInequalityOperator(t token.Token) bool { switch t { - case token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: + case token.LSS, token.GTR, token.NEQ: return true } diff --git a/vendor/github.com/mgechev/revive/rule/context-as-argument.go b/vendor/github.com/mgechev/revive/rule/context-as-argument.go index a737cecc2..094262983 100644 --- a/vendor/github.com/mgechev/revive/rule/context-as-argument.go +++ b/vendor/github.com/mgechev/revive/rule/context-as-argument.go @@ -15,7 +15,6 @@ type ContextAsArgumentRule struct { // Apply applies the rule to given file. func (r *ContextAsArgumentRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { - if r.allowTypesLUT == nil { r.allowTypesLUT = getAllowTypesFromArguments(args) } diff --git a/vendor/github.com/mgechev/revive/rule/deep-exit.go b/vendor/github.com/mgechev/revive/rule/deep-exit.go index 0cdec005a..cfc8fc570 100644 --- a/vendor/github.com/mgechev/revive/rule/deep-exit.go +++ b/vendor/github.com/mgechev/revive/rule/deep-exit.go @@ -17,7 +17,7 @@ func (r *DeepExitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { failures = append(failures, failure) } - var exitFunctions = map[string]map[string]bool{ + exitFunctions := map[string]map[string]bool{ "os": {"Exit": true}, "syscall": {"Exit": true}, "log": { diff --git a/vendor/github.com/mgechev/revive/rule/defer.go b/vendor/github.com/mgechev/revive/rule/defer.go index 101127f76..4b62f34e5 100644 --- a/vendor/github.com/mgechev/revive/rule/defer.go +++ b/vendor/github.com/mgechev/revive/rule/defer.go @@ -122,11 +122,12 @@ func (w lintDeferRule) visitSubtree(n ast.Node, inADefer, inALoop, inAFuncLit bo inADefer: inADefer, inALoop: inALoop, inAFuncLit: inAFuncLit, - allow: w.allow} + allow: w.allow, + } ast.Walk(nw, n) } -func (w lintDeferRule) newFailure(msg string, node ast.Node, confidence float64, cat string, subcase string) { +func (w lintDeferRule) newFailure(msg string, node ast.Node, confidence float64, cat, subcase string) { if !w.allow[subcase] { return } diff --git a/vendor/github.com/mgechev/revive/rule/error-strings.go b/vendor/github.com/mgechev/revive/rule/error-strings.go index f0739d9c7..8d630dc65 100644 --- a/vendor/github.com/mgechev/revive/rule/error-strings.go +++ b/vendor/github.com/mgechev/revive/rule/error-strings.go @@ -17,7 +17,7 @@ type ErrorStringsRule struct{} func (r *ErrorStringsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { var failures []lint.Failure - var errorFunctions = map[string]map[string]struct{}{ + errorFunctions := map[string]map[string]struct{}{ "fmt": { "Errorf": {}, }, diff --git a/vendor/github.com/mgechev/revive/rule/exported.go b/vendor/github.com/mgechev/revive/rule/exported.go index d81be3ac5..621fae546 100644 --- a/vendor/github.com/mgechev/revive/rule/exported.go +++ b/vendor/github.com/mgechev/revive/rule/exported.go @@ -23,7 +23,7 @@ type ExportedRule struct { func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { var failures []lint.Failure - if isTest(file) { + if file.IsTest() { return failures } @@ -61,7 +61,7 @@ func (r *ExportedRule) Name() string { return "exported" } -func (r *ExportedRule) getConf(args lint.Arguments) (checkPrivateReceivers bool, disableStutteringCheck bool, sayRepetitiveInsteadOfStutters bool) { +func (r *ExportedRule) getConf(args lint.Arguments) (checkPrivateReceivers, disableStutteringCheck, sayRepetitiveInsteadOfStutters bool) { // if any, we expect a slice of strings as configuration if len(args) < 1 { return diff --git a/vendor/github.com/mgechev/revive/rule/flag-param.go b/vendor/github.com/mgechev/revive/rule/flag-param.go index 6cb6daea9..0cc2208c4 100644 --- a/vendor/github.com/mgechev/revive/rule/flag-param.go +++ b/vendor/github.com/mgechev/revive/rule/flag-param.go @@ -2,8 +2,9 @@ package rule import ( "fmt" - "github.com/mgechev/revive/lint" "go/ast" + + "github.com/mgechev/revive/lint" ) // FlagParamRule lints given else constructs. diff --git a/vendor/github.com/mgechev/revive/rule/function-length.go b/vendor/github.com/mgechev/revive/rule/function-length.go index 2cdb84c91..b213cc4f7 100644 --- a/vendor/github.com/mgechev/revive/rule/function-length.go +++ b/vendor/github.com/mgechev/revive/rule/function-length.go @@ -43,7 +43,7 @@ func (r *FunctionLength) Name() string { return "function-length" } -func (r *FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt int64, maxLines int64) { +func (r *FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt, maxLines int64) { if len(arguments) != 2 { panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected 2 arguments but got %d`, len(arguments))) } diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max-public-structs.go index 5934d0b0b..2a5fa7bc2 100644 --- a/vendor/github.com/mgechev/revive/rule/max-public-structs.go +++ b/vendor/github.com/mgechev/revive/rule/max-public-structs.go @@ -2,7 +2,6 @@ package rule import ( "go/ast" - "strings" "github.com/mgechev/revive/lint" diff --git a/vendor/github.com/mgechev/revive/rule/nested-structs.go b/vendor/github.com/mgechev/revive/rule/nested-structs.go index cfe9648b2..deb202ec4 100644 --- a/vendor/github.com/mgechev/revive/rule/nested-structs.go +++ b/vendor/github.com/mgechev/revive/rule/nested-structs.go @@ -47,15 +47,25 @@ func (l *lintNestedStructs) Visit(n ast.Node) ast.Visitor { } return nil case *ast.Field: - if _, ok := v.Type.(*ast.StructType); ok { + filter := func(n ast.Node) bool { + switch n.(type) { + case *ast.StructType: + return true + default: + return false + } + } + structs := pick(v, filter, nil) + for _, s := range structs { l.onFailure(lint.Failure{ Failure: "no nested structs are allowed", Category: "style", - Node: v, + Node: s, Confidence: 1, }) - break } + return nil // no need to visit (again) the field } + return l } diff --git a/vendor/github.com/mgechev/revive/rule/package-comments.go b/vendor/github.com/mgechev/revive/rule/package-comments.go index 00fc5bb91..035e11397 100644 --- a/vendor/github.com/mgechev/revive/rule/package-comments.go +++ b/vendor/github.com/mgechev/revive/rule/package-comments.go @@ -20,7 +20,7 @@ type PackageCommentsRule struct{} func (r *PackageCommentsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { var failures []lint.Failure - if isTest(file) { + if file.IsTest() { return failures } diff --git a/vendor/github.com/mgechev/revive/rule/range-val-address.go b/vendor/github.com/mgechev/revive/rule/range-val-address.go index bad26e5e9..1412955df 100644 --- a/vendor/github.com/mgechev/revive/rule/range-val-address.go +++ b/vendor/github.com/mgechev/revive/rule/range-val-address.go @@ -95,16 +95,34 @@ func (bw rangeBodyVisitor) Visit(node ast.Node) ast.Visitor { case *ast.CallExpr: if fun, ok := e.Fun.(*ast.Ident); ok && fun.Name == "append" { // e.g. ...append(arr, &value) for _, v := range e.Args { - if bw.isAccessingRangeValueAddress(v) { - bw.onFailure(bw.newFailure(e)) + if lit, ok := v.(*ast.CompositeLit); ok { // e.g. ...append(arr, v{id:&value}) + bw.checkCompositeLit(lit) + continue + } + if bw.isAccessingRangeValueAddress(v) { // e.g. ...append(arr, &value) + bw.onFailure(bw.newFailure(v)) } } } + case *ast.CompositeLit: // e.g. ...v{id:&value} + bw.checkCompositeLit(e) } } return bw } +func (bw rangeBodyVisitor) checkCompositeLit(comp *ast.CompositeLit) { + for _, exp := range comp.Elts { + e, ok := exp.(*ast.KeyValueExpr) + if !ok { + continue + } + if bw.isAccessingRangeValueAddress(e.Value) { + bw.onFailure(bw.newFailure(e.Value)) + } + } +} + func (bw rangeBodyVisitor) isAccessingRangeValueAddress(exp ast.Expr) bool { u, ok := exp.(*ast.UnaryExpr) if !ok { diff --git a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go index 0547b8d93..d961271e1 100644 --- a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go +++ b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go @@ -35,7 +35,6 @@ type rangeValInClosure struct { } func (w rangeValInClosure) Visit(node ast.Node) ast.Visitor { - // Find the variables updated by the loop statement. var vars []*ast.Ident addVar := func(expr ast.Expr) { diff --git a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go index 947b8aac7..62cb7b615 100644 --- a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go +++ b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go @@ -2,11 +2,66 @@ package rule import ( "fmt" - "github.com/mgechev/revive/lint" "go/ast" "go/token" + + "github.com/mgechev/revive/lint" ) +var builtInConstAndVars = map[string]bool{ + "true": true, + "false": true, + "iota": true, + "nil": true, +} + +var builtFunctions = map[string]bool{ + "append": true, + "cap": true, + "close": true, + "complex": true, + "copy": true, + "delete": true, + "imag": true, + "len": true, + "make": true, + "new": true, + "panic": true, + "print": true, + "println": true, + "real": true, + "recover": true, +} + +var builtInTypes = map[string]bool{ + "ComplexType": true, + "FloatType": true, + "IntegerType": true, + "Type": true, + "Type1": true, + "bool": true, + "byte": true, + "complex128": true, + "complex64": true, + "error": true, + "float32": true, + "float64": true, + "int": true, + "int16": true, + "int32": true, + "int64": true, + "int8": true, + "rune": true, + "string": true, + "uint": true, + "uint16": true, + "uint32": true, + "uint64": true, + "uint8": true, + "uintptr": true, + "any": true, +} + // RedefinesBuiltinIDRule warns when a builtin identifier is shadowed. type RedefinesBuiltinIDRule struct{} @@ -14,65 +69,12 @@ type RedefinesBuiltinIDRule struct{} func (r *RedefinesBuiltinIDRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { var failures []lint.Failure - var builtInConstAndVars = map[string]bool{ - "true": true, - "false": true, - "iota": true, - "nil": true, - } - - var builtFunctions = map[string]bool{ - "append": true, - "cap": true, - "close": true, - "complex": true, - "copy": true, - "delete": true, - "imag": true, - "len": true, - "make": true, - "new": true, - "panic": true, - "print": true, - "println": true, - "real": true, - "recover": true, - } - - var builtInTypes = map[string]bool{ - "ComplexType": true, - "FloatType": true, - "IntegerType": true, - "Type": true, - "Type1": true, - "bool": true, - "byte": true, - "complex128": true, - "complex64": true, - "error": true, - "float32": true, - "float64": true, - "int": true, - "int16": true, - "int32": true, - "int64": true, - "int8": true, - "rune": true, - "string": true, - "uint": true, - "uint16": true, - "uint32": true, - "uint64": true, - "uint8": true, - "uintptr": true, - } - onFailure := func(failure lint.Failure) { failures = append(failures, failure) } astFile := file.AST - w := &lintRedefinesBuiltinID{builtInConstAndVars, builtFunctions, builtInTypes, onFailure} + w := &lintRedefinesBuiltinID{onFailure} ast.Walk(w, astFile) return failures @@ -84,34 +86,46 @@ func (r *RedefinesBuiltinIDRule) Name() string { } type lintRedefinesBuiltinID struct { - constsAndVars map[string]bool - funcs map[string]bool - types map[string]bool - onFailure func(lint.Failure) + onFailure func(lint.Failure) } func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor { switch n := node.(type) { case *ast.GenDecl: - if n.Tok != token.TYPE { - return nil // skip if not type declaration - } - typeSpec, ok := n.Specs[0].(*ast.TypeSpec) - if !ok { - return nil - } - id := typeSpec.Name.Name - if w.types[id] { - w.addFailure(n, fmt.Sprintf("redefinition of the built-in type %s", id)) + switch n.Tok { + case token.TYPE: + typeSpec, ok := n.Specs[0].(*ast.TypeSpec) + if !ok { + return nil + } + id := typeSpec.Name.Name + if ok, bt := w.isBuiltIn(id); ok { + w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, id)) + } + case token.VAR, token.CONST: + for _, vs := range n.Specs { + valSpec, ok := vs.(*ast.ValueSpec) + if !ok { + continue + } + for _, name := range valSpec.Names { + if ok, bt := w.isBuiltIn(name.Name); ok { + w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, name)) + } + } + } + default: + return nil // skip if not type/var/const declaration } + case *ast.FuncDecl: if n.Recv != nil { return w // skip methods } id := n.Name.Name - if w.funcs[id] { - w.addFailure(n, fmt.Sprintf("redefinition of the built-in function %s", id)) + if ok, bt := w.isBuiltIn(id); ok { + w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, id)) } case *ast.AssignStmt: for _, e := range n.Lhs { @@ -120,13 +134,19 @@ func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor { continue } - if w.constsAndVars[id.Name] { + if ok, bt := w.isBuiltIn(id.Name); ok { var msg string - if n.Tok == token.DEFINE { - msg = fmt.Sprintf("assignment creates a shadow of built-in identifier %s", id.Name) - } else { - msg = fmt.Sprintf("assignment modifies built-in identifier %s", id.Name) + switch bt { + case "constant or variable": + if n.Tok == token.DEFINE { + msg = fmt.Sprintf("assignment creates a shadow of built-in identifier %s", id.Name) + } else { + msg = fmt.Sprintf("assignment modifies built-in identifier %s", id.Name) + } + default: + msg = fmt.Sprintf("redefinition of the built-in %s %s", bt, id) } + w.addFailure(n, msg) } } @@ -143,3 +163,19 @@ func (w lintRedefinesBuiltinID) addFailure(node ast.Node, msg string) { Failure: msg, }) } + +func (w lintRedefinesBuiltinID) isBuiltIn(id string) (r bool, builtInKind string) { + if builtFunctions[id] { + return true, "function" + } + + if builtInConstAndVars[id] { + return true, "constant or variable" + } + + if builtInTypes[id] { + return true, "type" + } + + return false, "" +} diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string-format.go index 73557c43d..502e92313 100644 --- a/vendor/github.com/mgechev/revive/rule/string-format.go +++ b/vendor/github.com/mgechev/revive/rule/string-format.go @@ -276,7 +276,8 @@ func (rule stringFormatSubrule) lintMessage(s string, node ast.Node) { rule.parent.onFailure(lint.Failure{ Confidence: 1, Failure: failure, - Node: node}) + Node: node, + }) } // #endregion diff --git a/vendor/github.com/mgechev/revive/rule/struct-tag.go b/vendor/github.com/mgechev/revive/rule/struct-tag.go index cb3818e92..f890e9db7 100644 --- a/vendor/github.com/mgechev/revive/rule/struct-tag.go +++ b/vendor/github.com/mgechev/revive/rule/struct-tag.go @@ -53,7 +53,6 @@ func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor { } return w - } // checkTaggedField checks the tag of the given field. diff --git a/vendor/github.com/mgechev/revive/rule/superfluous-else.go b/vendor/github.com/mgechev/revive/rule/superfluous-else.go index b9ce39606..1bce093fe 100644 --- a/vendor/github.com/mgechev/revive/rule/superfluous-else.go +++ b/vendor/github.com/mgechev/revive/rule/superfluous-else.go @@ -18,7 +18,7 @@ func (r *SuperfluousElseRule) Apply(file *lint.File, _ lint.Arguments) []lint.Fa failures = append(failures, failure) } - var branchingFunctions = map[string]map[string]bool{ + branchingFunctions := map[string]map[string]bool{ "os": {"Exit": true}, "log": { "Fatal": true, diff --git a/vendor/github.com/mgechev/revive/rule/unreachable-code.go b/vendor/github.com/mgechev/revive/rule/unreachable-code.go index 5472feaa9..9032f3178 100644 --- a/vendor/github.com/mgechev/revive/rule/unreachable-code.go +++ b/vendor/github.com/mgechev/revive/rule/unreachable-code.go @@ -16,7 +16,7 @@ func (r *UnreachableCodeRule) Apply(file *lint.File, _ lint.Arguments) []lint.Fa failures = append(failures, failure) } - var branchingFunctions = map[string]map[string]bool{ + branchingFunctions := map[string]map[string]bool{ "os": {"Exit": true}, "log": { "Fatal": true, diff --git a/vendor/github.com/mgechev/revive/rule/use-any.go b/vendor/github.com/mgechev/revive/rule/use-any.go new file mode 100644 index 000000000..5e75e2f87 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/use-any.go @@ -0,0 +1,54 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// UseAnyRule lints given else constructs. +type UseAnyRule struct{} + +// Apply applies the rule to given file. +func (r *UseAnyRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + walker := lintUseAny{ + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + fileAst := file.AST + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *UseAnyRule) Name() string { + return "use-any" +} + +type lintUseAny struct { + onFailure func(lint.Failure) +} + +func (w lintUseAny) Visit(n ast.Node) ast.Visitor { + it, ok := n.(*ast.InterfaceType) + if !ok { + return w + } + + if len(it.Methods.List) != 0 { + return w // it is not and empty interface + } + + w.onFailure(lint.Failure{ + Node: n, + Confidence: 1, + Category: "naming", + Failure: "since GO 1.18 'interface{}' can be replaced by 'any'", + }) + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/utils.go b/vendor/github.com/mgechev/revive/rule/utils.go index 8b0f556b9..2de971285 100644 --- a/vendor/github.com/mgechev/revive/rule/utils.go +++ b/vendor/github.com/mgechev/revive/rule/utils.go @@ -19,10 +19,6 @@ const styleGuideBase = "https://golang.org/wiki/CodeReviewComments" // If id == nil, the answer is false. func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" } -func isTest(f *lint.File) bool { - return strings.HasSuffix(f.Name, "_test.go") -} - var commonMethods = map[string]bool{ "Error": true, "Read": true, diff --git a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md index 38a099162..c75823490 100644 --- a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md +++ b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md @@ -1,3 +1,16 @@ +## 1.5.0 + +* New option `IgnoreUntaggedFields` to ignore decoding to any fields + without `mapstructure` (or the configured tag name) set [GH-277] +* New option `ErrorUnset` which makes it an error if any fields + in a target struct are not set by the decoding process. [GH-225] +* New function `OrComposeDecodeHookFunc` to help compose decode hooks. [GH-240] +* Decoding to slice from array no longer crashes [GH-265] +* Decode nested struct pointers to map [GH-271] +* Fix issue where `,squash` was ignored if `Squash` option was set. [GH-280] +* Fix issue where fields with `,omitempty` would sometimes decode + into a map with an empty string key [GH-281] + ## 1.4.3 * Fix cases where `json.Number` didn't decode properly [GH-261] diff --git a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go index 4d4bbc733..3a754ca72 100644 --- a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go +++ b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go @@ -77,6 +77,28 @@ func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc { } } +// OrComposeDecodeHookFunc executes all input hook functions until one of them returns no error. In that case its value is returned. +// If all hooks return an error, OrComposeDecodeHookFunc returns an error concatenating all error messages. +func OrComposeDecodeHookFunc(ff ...DecodeHookFunc) DecodeHookFunc { + return func(a, b reflect.Value) (interface{}, error) { + var allErrs string + var out interface{} + var err error + + for _, f := range ff { + out, err = DecodeHookExec(f, a, b) + if err != nil { + allErrs += err.Error() + "\n" + continue + } + + return out, nil + } + + return nil, errors.New(allErrs) + } +} + // StringToSliceHookFunc returns a DecodeHookFunc that converts // string to []string by splitting on the given sep. func StringToSliceHookFunc(sep string) DecodeHookFunc { diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure.go b/vendor/github.com/mitchellh/mapstructure/mapstructure.go index 6b81b0067..1efb22ac3 100644 --- a/vendor/github.com/mitchellh/mapstructure/mapstructure.go +++ b/vendor/github.com/mitchellh/mapstructure/mapstructure.go @@ -122,7 +122,7 @@ // field value is zero and a numeric type, the field is empty, and it won't // be encoded into the destination type. // -// type Source { +// type Source struct { // Age int `mapstructure:",omitempty"` // } // @@ -215,6 +215,12 @@ type DecoderConfig struct { // (extra keys). ErrorUnused bool + // If ErrorUnset is true, then it is an error for there to exist + // fields in the result that were not set in the decoding process + // (extra fields). This only applies to decoding to a struct. This + // will affect all nested structs as well. + ErrorUnset bool + // ZeroFields, if set to true, will zero fields before writing them. // For example, a map will be emptied before decoded values are put in // it. If this is false, a map will be merged. @@ -259,6 +265,10 @@ type DecoderConfig struct { // defaults to "mapstructure" TagName string + // IgnoreUntaggedFields ignores all struct fields without explicit + // TagName, comparable to `mapstructure:"-"` as default behaviour. + IgnoreUntaggedFields bool + // MatchName is the function used to match the map key to the struct // field name or tag. Defaults to `strings.EqualFold`. This can be used // to implement case-sensitive tag values, support snake casing, etc. @@ -284,6 +294,11 @@ type Metadata struct { // Unused is a slice of keys that were found in the raw value but // weren't decoded since there was no matching field in the result interface Unused []string + + // Unset is a slice of field names that were found in the result interface + // but weren't set in the decoding process since there was no matching value + // in the input + Unset []string } // Decode takes an input structure and uses reflection to translate it to @@ -375,6 +390,10 @@ func NewDecoder(config *DecoderConfig) (*Decoder, error) { if config.Metadata.Unused == nil { config.Metadata.Unused = make([]string, 0) } + + if config.Metadata.Unset == nil { + config.Metadata.Unset = make([]string, 0) + } } if config.TagName == "" { @@ -906,9 +925,15 @@ func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val re tagValue := f.Tag.Get(d.config.TagName) keyName := f.Name + if tagValue == "" && d.config.IgnoreUntaggedFields { + continue + } + // If Squash is set in the config, we squash the field down. squash := d.config.Squash && v.Kind() == reflect.Struct && f.Anonymous + v = dereferencePtrToStructIfNeeded(v, d.config.TagName) + // Determine the name of the key in the map if index := strings.Index(tagValue, ","); index != -1 { if tagValue[:index] == "-" { @@ -920,7 +945,7 @@ func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val re } // If "squash" is specified in the tag, we squash the field down. - squash = !squash && strings.Index(tagValue[index+1:], "squash") != -1 + squash = squash || strings.Index(tagValue[index+1:], "squash") != -1 if squash { // When squashing, the embedded type can be a pointer to a struct. if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Struct { @@ -932,7 +957,9 @@ func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val re return fmt.Errorf("cannot squash non-struct type '%s'", v.Type()) } } - keyName = tagValue[:index] + if keyNameTagValue := tagValue[:index]; keyNameTagValue != "" { + keyName = keyNameTagValue + } } else if len(tagValue) > 0 { if tagValue == "-" { continue @@ -1088,7 +1115,7 @@ func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) } // If the input value is nil, then don't allocate since empty != nil - if dataVal.IsNil() { + if dataValKind != reflect.Array && dataVal.IsNil() { return nil } @@ -1250,6 +1277,7 @@ func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) e dataValKeysUnused[dataValKey.Interface()] = struct{}{} } + targetValKeysUnused := make(map[interface{}]struct{}) errors := make([]string, 0) // This slice will keep track of all the structs we'll be decoding. @@ -1354,7 +1382,8 @@ func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) e if !rawMapVal.IsValid() { // There was no matching key in the map for the value in - // the struct. Just ignore. + // the struct. Remember it for potential errors and metadata. + targetValKeysUnused[fieldName] = struct{}{} continue } } @@ -1414,6 +1443,17 @@ func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) e errors = appendErrors(errors, err) } + if d.config.ErrorUnset && len(targetValKeysUnused) > 0 { + keys := make([]string, 0, len(targetValKeysUnused)) + for rawKey := range targetValKeysUnused { + keys = append(keys, rawKey.(string)) + } + sort.Strings(keys) + + err := fmt.Errorf("'%s' has unset fields: %s", name, strings.Join(keys, ", ")) + errors = appendErrors(errors, err) + } + if len(errors) > 0 { return &Error{errors} } @@ -1428,6 +1468,14 @@ func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) e d.config.Metadata.Unused = append(d.config.Metadata.Unused, key) } + for rawKey := range targetValKeysUnused { + key := rawKey.(string) + if name != "" { + key = name + "." + key + } + + d.config.Metadata.Unset = append(d.config.Metadata.Unset, key) + } } return nil @@ -1465,3 +1513,28 @@ func getKind(val reflect.Value) reflect.Kind { return kind } } + +func isStructTypeConvertibleToMap(typ reflect.Type, checkMapstructureTags bool, tagName string) bool { + for i := 0; i < typ.NumField(); i++ { + f := typ.Field(i) + if f.PkgPath == "" && !checkMapstructureTags { // check for unexported fields + return true + } + if checkMapstructureTags && f.Tag.Get(tagName) != "" { // check for mapstructure tags inside + return true + } + } + return false +} + +func dereferencePtrToStructIfNeeded(v reflect.Value, tagName string) reflect.Value { + if v.Kind() != reflect.Ptr || v.Elem().Kind() != reflect.Struct { + return v + } + deref := v.Elem() + derefT := deref.Type() + if isStructTypeConvertibleToMap(derefT, true, tagName) { + return deref + } + return v +} diff --git a/vendor/github.com/pelletier/go-toml/README.md b/vendor/github.com/pelletier/go-toml/README.md index 6c061712b..7399e04bf 100644 --- a/vendor/github.com/pelletier/go-toml/README.md +++ b/vendor/github.com/pelletier/go-toml/README.md @@ -25,9 +25,9 @@ and [much faster][v2-bench]. If you only need reading and writing TOML documents (majority of cases), those features are implemented and the API unlikely to change. -The remaining features (Document structure editing and tooling) will be added -shortly. While pull-requests are welcome on v1, no active development is -expected on it. When v2.0.0 is released, v1 will be deprecated. +The remaining features will be added shortly. While pull-requests are welcome on +v1, no active development is expected on it. When v2.0.0 is released, v1 will be +deprecated. 👉 [go-toml v2][v2] diff --git a/vendor/github.com/pelletier/go-toml/SECURITY.md b/vendor/github.com/pelletier/go-toml/SECURITY.md new file mode 100644 index 000000000..b2f21cfc9 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/SECURITY.md @@ -0,0 +1,19 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are +currently being supported with security updates. + +| Version | Supported | +| ---------- | ------------------ | +| Latest 2.x | :white_check_mark: | +| All 1.x | :x: | +| All 0.x | :x: | + +## Reporting a Vulnerability + +Email a vulnerability report to `security@pelletier.codes`. Make sure to include +as many details as possible to reproduce the vulnerability. This is a +side-project: I will try to get back to you as quickly as possible, time +permitting in my personal life. Providing a working patch helps very much! diff --git a/vendor/github.com/pelletier/go-toml/marshal.go b/vendor/github.com/pelletier/go-toml/marshal.go index 3443c3545..571273049 100644 --- a/vendor/github.com/pelletier/go-toml/marshal.go +++ b/vendor/github.com/pelletier/go-toml/marshal.go @@ -1113,7 +1113,7 @@ func (d *Decoder) valueFromToml(mtype reflect.Type, tval interface{}, mval1 *ref return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) } - if val.Convert(reflect.TypeOf(int(1))).Int() < 0 { + if val.Type().Kind() != reflect.Uint64 && val.Convert(reflect.TypeOf(int(1))).Int() < 0 { return reflect.ValueOf(nil), fmt.Errorf("%v(%T) is negative so does not fit in %v", tval, tval, mtype.String()) } if reflect.Indirect(reflect.New(mtype)).OverflowUint(val.Convert(reflect.TypeOf(uint64(0))).Uint()) { diff --git a/vendor/github.com/pelletier/go-toml/parser.go b/vendor/github.com/pelletier/go-toml/parser.go index f5e1a44fb..b3726d0dd 100644 --- a/vendor/github.com/pelletier/go-toml/parser.go +++ b/vendor/github.com/pelletier/go-toml/parser.go @@ -293,42 +293,41 @@ func (p *tomlParser) parseRvalue() interface{} { return math.NaN() case tokenInteger: cleanedVal := cleanupNumberToken(tok.val) - var err error - var val int64 + base := 10 + s := cleanedVal + checkInvalidUnderscore := numberContainsInvalidUnderscore if len(cleanedVal) >= 3 && cleanedVal[0] == '0' { switch cleanedVal[1] { case 'x': - err = hexNumberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - val, err = strconv.ParseInt(cleanedVal[2:], 16, 64) + checkInvalidUnderscore = hexNumberContainsInvalidUnderscore + base = 16 case 'o': - err = numberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - val, err = strconv.ParseInt(cleanedVal[2:], 8, 64) + base = 8 case 'b': - err = numberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - val, err = strconv.ParseInt(cleanedVal[2:], 2, 64) + base = 2 default: panic("invalid base") // the lexer should catch this first } - } else { - err = numberContainsInvalidUnderscore(tok.val) - if err != nil { - p.raiseError(tok, "%s", err) - } - val, err = strconv.ParseInt(cleanedVal, 10, 64) + s = cleanedVal[2:] } + + err := checkInvalidUnderscore(tok.val) if err != nil { p.raiseError(tok, "%s", err) } - return val + + var val interface{} + val, err = strconv.ParseInt(s, base, 64) + if err == nil { + return val + } + + if s[0] != '-' { + if val, err = strconv.ParseUint(s, base, 64); err == nil { + return val + } + } + p.raiseError(tok, "%s", err) case tokenFloat: err := numberContainsInvalidUnderscore(tok.val) if err != nil { diff --git a/vendor/github.com/pelletier/go-toml/toml.go b/vendor/github.com/pelletier/go-toml/toml.go index 6d82587c4..5541b941f 100644 --- a/vendor/github.com/pelletier/go-toml/toml.go +++ b/vendor/github.com/pelletier/go-toml/toml.go @@ -471,7 +471,7 @@ func LoadBytes(b []byte) (tree *Tree, err error) { if _, ok := r.(runtime.Error); ok { panic(r) } - err = errors.New(r.(string)) + err = fmt.Errorf("%s", r) } }() diff --git a/vendor/github.com/pelletier/go-toml/v2/.dockerignore b/vendor/github.com/pelletier/go-toml/v2/.dockerignore new file mode 100644 index 000000000..7b5883475 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/.dockerignore @@ -0,0 +1,2 @@ +cmd/tomll/tomll +cmd/tomljson/tomljson diff --git a/vendor/github.com/pelletier/go-toml/v2/.gitattributes b/vendor/github.com/pelletier/go-toml/v2/.gitattributes new file mode 100644 index 000000000..34a0a21a3 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/.gitattributes @@ -0,0 +1,4 @@ +* text=auto + +benchmark/benchmark.toml text eol=lf +testdata/** text eol=lf diff --git a/vendor/github.com/pelletier/go-toml/v2/.gitignore b/vendor/github.com/pelletier/go-toml/v2/.gitignore new file mode 100644 index 000000000..a69e2b0eb --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/.gitignore @@ -0,0 +1,6 @@ +test_program/test_program_bin +fuzz/ +cmd/tomll/tomll +cmd/tomljson/tomljson +cmd/tomltestgen/tomltestgen +dist \ No newline at end of file diff --git a/vendor/github.com/pelletier/go-toml/v2/.golangci.toml b/vendor/github.com/pelletier/go-toml/v2/.golangci.toml new file mode 100644 index 000000000..067db5517 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/.golangci.toml @@ -0,0 +1,84 @@ +[service] +golangci-lint-version = "1.39.0" + +[linters-settings.wsl] +allow-assign-and-anything = true + +[linters-settings.exhaustive] +default-signifies-exhaustive = true + +[linters] +disable-all = true +enable = [ + "asciicheck", + "bodyclose", + "cyclop", + "deadcode", + "depguard", + "dogsled", + "dupl", + "durationcheck", + "errcheck", + "errorlint", + "exhaustive", + # "exhaustivestruct", + "exportloopref", + "forbidigo", + # "forcetypeassert", + "funlen", + "gci", + # "gochecknoglobals", + "gochecknoinits", + "gocognit", + "goconst", + "gocritic", + "gocyclo", + "godot", + "godox", + # "goerr113", + "gofmt", + "gofumpt", + "goheader", + "goimports", + "golint", + "gomnd", + # "gomoddirectives", + "gomodguard", + "goprintffuncname", + "gosec", + "gosimple", + "govet", + # "ifshort", + "importas", + "ineffassign", + "lll", + "makezero", + "misspell", + "nakedret", + "nestif", + "nilerr", + # "nlreturn", + "noctx", + "nolintlint", + #"paralleltest", + "prealloc", + "predeclared", + "revive", + "rowserrcheck", + "sqlclosecheck", + "staticcheck", + "structcheck", + "stylecheck", + # "testpackage", + "thelper", + "tparallel", + "typecheck", + "unconvert", + "unparam", + "unused", + "varcheck", + "wastedassign", + "whitespace", + # "wrapcheck", + # "wsl" +] diff --git a/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml b/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml new file mode 100644 index 000000000..793fb1849 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/.goreleaser.yaml @@ -0,0 +1,111 @@ +before: + hooks: + - go mod tidy + - go fmt ./... + - go test ./... +builds: + - id: tomll + main: ./cmd/tomll + binary: tomll + env: + - CGO_ENABLED=0 + flags: + - -trimpath + ldflags: + - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} + mod_timestamp: '{{ .CommitTimestamp }}' + targets: + - linux_amd64 + - windows_amd64 + - darwin_amd64 + - darwin_arm64 + - id: tomljson + main: ./cmd/tomljson + binary: tomljson + env: + - CGO_ENABLED=0 + flags: + - -trimpath + ldflags: + - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} + mod_timestamp: '{{ .CommitTimestamp }}' + targets: + - linux_amd64 + - windows_amd64 + - darwin_amd64 + - darwin_arm64 + - id: jsontoml + main: ./cmd/jsontoml + binary: jsontoml + env: + - CGO_ENABLED=0 + flags: + - -trimpath + ldflags: + - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.CommitDate}} + mod_timestamp: '{{ .CommitTimestamp }}' + targets: + - linux_amd64 + - windows_amd64 + - darwin_amd64 + - darwin_arm64 +universal_binaries: + - id: tomll + replace: true + name_template: tomll + - id: tomljson + replace: true + name_template: tomljson + - id: jsontoml + replace: true + name_template: jsontoml +archives: +- id: jsontoml + format: tar.xz + builds: + - jsontoml + files: + - none* + name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}" +- id: tomljson + format: tar.xz + builds: + - tomljson + files: + - none* + name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}" +- id: tomll + format: tar.xz + builds: + - tomll + files: + - none* + name_template: "{{ .Binary }}_{{.Version}}_{{ .Os }}_{{ .Arch }}" +dockers: + - id: tools + goos: linux + goarch: amd64 + ids: + - jsontoml + - tomljson + - tomll + image_templates: + - "ghcr.io/pelletier/go-toml:latest" + - "ghcr.io/pelletier/go-toml:{{ .Tag }}" + - "ghcr.io/pelletier/go-toml:v{{ .Major }}" + skip_push: false +checksum: + name_template: 'sha256sums.txt' +snapshot: + name_template: "{{ incpatch .Version }}-next" +release: + github: + owner: pelletier + name: go-toml + draft: true + prerelease: auto + mode: replace +changelog: + use: github-native +announce: + skip: true diff --git a/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md b/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md new file mode 100644 index 000000000..04dd12bcb --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/CONTRIBUTING.md @@ -0,0 +1,196 @@ +# Contributing + +Thank you for your interest in go-toml! We appreciate you considering +contributing to go-toml! + +The main goal is the project is to provide an easy-to-use and efficient TOML +implementation for Go that gets the job done and gets out of your way – dealing +with TOML is probably not the central piece of your project. + +As the single maintainer of go-toml, time is scarce. All help, big or small, is +more than welcomed! + +## Ask questions + +Any question you may have, somebody else might have it too. Always feel free to +ask them on the [discussion board][discussions]. We will try to answer them as +clearly and quickly as possible, time permitting. + +Asking questions also helps us identify areas where the documentation needs +improvement, or new features that weren't envisioned before. Sometimes, a +seemingly innocent question leads to the fix of a bug. Don't hesitate and ask +away! + +[discussions]: https://github.com/pelletier/go-toml/discussions + +## Improve the documentation + +The best way to share your knowledge and experience with go-toml is to improve +the documentation. Fix a typo, clarify an interface, add an example, anything +goes! + +The documentation is present in the [README][readme] and thorough the source +code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a change +to the documentation, create a pull request with your proposed changes. For +simple changes like that, the easiest way to go is probably the "Fork this +project and edit the file" button on Github, displayed at the top right of the +file. Unless it's a trivial change (for example a typo), provide a little bit of +context in your pull request description or commit message. + +## Report a bug + +Found a bug! Sorry to hear that :(. Help us and other track them down and fix by +reporting it. [File a new bug report][bug-report] on the [issues +tracker][issues-tracker]. The template should provide enough guidance on what to +include. When in doubt: add more details! By reducing ambiguity and providing +more information, it decreases back and forth and saves everyone time. + +## Code changes + +Want to contribute a patch? Very happy to hear that! + +First, some high-level rules: + +- A short proposal with some POC code is better than a lengthy piece of text + with no code. Code speaks louder than words. That being said, bigger changes + should probably start with a [discussion][discussions]. +- No backward-incompatible patch will be accepted unless discussed. Sometimes + it's hard, but we try not to break people's programs unless we absolutely have + to. +- If you are writing a new feature or extending an existing one, make sure to + write some documentation. +- Bug fixes need to be accompanied with regression tests. +- New code needs to be tested. +- Your commit messages need to explain why the change is needed, even if already + included in the PR description. + +It does sound like a lot, but those best practices are here to save time overall +and continuously improve the quality of the project, which is something everyone +benefits from. + +### Get started + +The fairly standard code contribution process looks like that: + +1. [Fork the project][fork]. +2. Make your changes, commit on any branch you like. +3. [Open up a pull request][pull-request] +4. Review, potential ask for changes. +5. Merge. + +Feel free to ask for help! You can create draft pull requests to gather +some early feedback! + +### Run the tests + +You can run tests for go-toml using Go's test tool: `go test -race ./...`. + +During the pull request process, all tests will be ran on Linux, Windows, and +MacOS on the last two versions of Go. + +However, given GitHub's new policy to _not_ run Actions on pull requests until a +maintainer clicks on button, it is highly recommended that you run them locally +as you make changes. + +### Check coverage + +We use `go tool cover` to compute test coverage. Most code editors have a way to +run and display code coverage, but at the end of the day, we do this: + +``` +go test -covermode=atomic -coverprofile=coverage.out +go tool cover -func=coverage.out +``` + +and verify that the overall percentage of tested code does not go down. This is +a requirement. As a rule of thumb, all lines of code touched by your changes +should be covered. On Unix you can use `./ci.sh coverage -d v2` to check if your +code lowers the coverage. + +### Verify performance + +Go-toml aims to stay efficient. We rely on a set of scenarios executed with Go's +builtin benchmark systems. Because of their noisy nature, containers provided by +Github Actions cannot be reliably used for benchmarking. As a result, you are +responsible for checking that your changes do not incur a performance penalty. +You can run their following to execute benchmarks: + +``` +go test ./... -bench=. -count=10 +``` + +Benchmark results should be compared against each other with +[benchstat][benchstat]. Typical flow looks like this: + +1. On the `v2` branch, run `go test ./... -bench=. -count 10` and save output to + a file (for example `old.txt`). +2. Make some code changes. +3. Run `go test ....` again, and save the output to an other file (for example + `new.txt`). +4. Run `benchstat old.txt new.txt` to check that time/op does not go up in any + test. + +On Unix you can use `./ci.sh benchmark -d v2` to verify how your code impacts +performance. + +It is highly encouraged to add the benchstat results to your pull request +description. Pull requests that lower performance will receive more scrutiny. + +[benchstat]: https://pkg.go.dev/golang.org/x/perf/cmd/benchstat + +### Style + +Try to look around and follow the same format and structure as the rest of the +code. We enforce using `go fmt` on the whole code base. + +--- + +## Maintainers-only + +### Merge pull request + +Checklist: + +- Passing CI. +- Does not introduce backward-incompatible changes (unless discussed). +- Has relevant doc changes. +- Benchstat does not show performance regression. +- Pull request is [labeled appropriately][pr-labels]. +- Title will be understandable in the changelog. + +1. Merge using "squash and merge". +2. Make sure to edit the commit message to keep all the useful information + nice and clean. +3. Make sure the commit title is clear and contains the PR number (#123). + +### New release + +1. Decide on the next version number. Use semver. +2. Generate release notes using [`gh`][gh]. Example: +``` +$ gh api -X POST \ + -F tag_name='v2.0.0-beta.5' \ + -F target_commitish='v2' \ + -F previous_tag_name='v2.0.0-beta.4' \ + --jq '.body' \ + repos/pelletier/go-toml/releases/generate-notes +``` +3. Look for "Other changes". That would indicate a pull request not labeled + properly. Tweak labels and pull request titles until changelog looks good for + users. +4. [Draft new release][new-release]. +5. Fill tag and target with the same value used to generate the changelog. +6. Set title to the new tag value. +7. Paste the generated changelog. +8. Check "create discussion", in the "Releases" category. +9. Check pre-release if new version is an alpha or beta. + +[issues-tracker]: https://github.com/pelletier/go-toml/issues +[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md +[pkg.go.dev]: https://pkg.go.dev/github.com/pelletier/go-toml +[readme]: ./README.md +[fork]: https://help.github.com/articles/fork-a-repo +[pull-request]: https://help.github.com/en/articles/creating-a-pull-request +[new-release]: https://github.com/pelletier/go-toml/releases/new +[gh]: https://github.com/cli/cli +[pr-labels]: https://github.com/pelletier/go-toml/blob/v2/.github/release.yml diff --git a/vendor/github.com/pelletier/go-toml/v2/Dockerfile b/vendor/github.com/pelletier/go-toml/v2/Dockerfile new file mode 100644 index 000000000..b9e933237 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/Dockerfile @@ -0,0 +1,5 @@ +FROM scratch +ENV PATH "$PATH:/bin" +COPY tomll /bin/tomll +COPY tomljson /bin/tomljson +COPY jsontoml /bin/jsontoml diff --git a/vendor/github.com/pelletier/go-toml/v2/LICENSE b/vendor/github.com/pelletier/go-toml/v2/LICENSE new file mode 100644 index 000000000..6839d51cd --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 - 2022 Thomas Pelletier, Eric Anderton + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/pelletier/go-toml/v2/README.md b/vendor/github.com/pelletier/go-toml/v2/README.md new file mode 100644 index 000000000..9d3bb1022 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/README.md @@ -0,0 +1,545 @@ +# go-toml v2 + +Go library for the [TOML](https://toml.io/en/) format. + +This library supports [TOML v1.0.0](https://toml.io/en/v1.0.0). + +[🐞 Bug Reports](https://github.com/pelletier/go-toml/issues) + +[💬 Anything else](https://github.com/pelletier/go-toml/discussions) + +## Documentation + +Full API, examples, and implementation notes are available in the Go +documentation. + +[![Go Reference](https://pkg.go.dev/badge/github.com/pelletier/go-toml/v2.svg)](https://pkg.go.dev/github.com/pelletier/go-toml/v2) + +## Import + +```go +import "github.com/pelletier/go-toml/v2" +``` + +See [Modules](#Modules). + +## Features + +### Stdlib behavior + +As much as possible, this library is designed to behave similarly as the +standard library's `encoding/json`. + +### Performance + +While go-toml favors usability, it is written with performance in mind. Most +operations should not be shockingly slow. See [benchmarks](#benchmarks). + +### Strict mode + +`Decoder` can be set to "strict mode", which makes it error when some parts of +the TOML document was not prevent in the target structure. This is a great way +to check for typos. [See example in the documentation][strict]. + +[strict]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#example-Decoder.DisallowUnknownFields + +### Contextualized errors + +When most decoding errors occur, go-toml returns [`DecodeError`][decode-err]), +which contains a human readable contextualized version of the error. For +example: + +``` +2| key1 = "value1" +3| key2 = "missing2" + | ~~~~ missing field +4| key3 = "missing3" +5| key4 = "value4" +``` + +[decode-err]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#DecodeError + +### Local date and time support + +TOML supports native [local date/times][ldt]. It allows to represent a given +date, time, or date-time without relation to a timezone or offset. To support +this use-case, go-toml provides [`LocalDate`][tld], [`LocalTime`][tlt], and +[`LocalDateTime`][tldt]. Those types can be transformed to and from `time.Time`, +making them convenient yet unambiguous structures for their respective TOML +representation. + +[ldt]: https://toml.io/en/v1.0.0#local-date-time +[tld]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalDate +[tlt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalTime +[tldt]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#LocalDateTime + +## Getting started + +Given the following struct, let's see how to read it and write it as TOML: + +```go +type MyConfig struct { + Version int + Name string + Tags []string +} +``` + +### Unmarshaling + +[`Unmarshal`][unmarshal] reads a TOML document and fills a Go structure with its +content. For example: + +```go +doc := ` +version = 2 +name = "go-toml" +tags = ["go", "toml"] +` + +var cfg MyConfig +err := toml.Unmarshal([]byte(doc), &cfg) +if err != nil { + panic(err) +} +fmt.Println("version:", cfg.Version) +fmt.Println("name:", cfg.Name) +fmt.Println("tags:", cfg.Tags) + +// Output: +// version: 2 +// name: go-toml +// tags: [go toml] +``` + +[unmarshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Unmarshal + +### Marshaling + +[`Marshal`][marshal] is the opposite of Unmarshal: it represents a Go structure +as a TOML document: + +```go +cfg := MyConfig{ + Version: 2, + Name: "go-toml", + Tags: []string{"go", "toml"}, +} + +b, err := toml.Marshal(cfg) +if err != nil { + panic(err) +} +fmt.Println(string(b)) + +// Output: +// Version = 2 +// Name = 'go-toml' +// Tags = ['go', 'toml'] +``` + +[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal + +## Benchmarks + +Execution time speedup compared to other Go TOML libraries: + + + + + + + + + + + + + +
Benchmarkgo-toml v1BurntSushi/toml
Marshal/HugoFrontMatter-21.9x1.9x
Marshal/ReferenceFile/map-21.7x1.8x
Marshal/ReferenceFile/struct-22.2x2.5x
Unmarshal/HugoFrontMatter-22.9x2.9x
Unmarshal/ReferenceFile/map-22.6x2.9x
Unmarshal/ReferenceFile/struct-24.4x5.3x
+
See more +

The table above has the results of the most common use-cases. The table below +contains the results of all benchmarks, including unrealistic ones. It is +provided for completeness.

+ + + + + + + + + + + + + + + + + + +
Benchmarkgo-toml v1BurntSushi/toml
Marshal/SimpleDocument/map-21.8x2.9x
Marshal/SimpleDocument/struct-22.7x4.2x
Unmarshal/SimpleDocument/map-24.5x3.1x
Unmarshal/SimpleDocument/struct-26.2x3.9x
UnmarshalDataset/example-23.1x3.5x
UnmarshalDataset/code-22.3x3.1x
UnmarshalDataset/twitter-22.5x2.6x
UnmarshalDataset/citm_catalog-22.1x2.2x
UnmarshalDataset/canada-21.6x1.3x
UnmarshalDataset/config-24.3x3.2x
[Geo mean]2.7x2.8x
+

This table can be generated with ./ci.sh benchmark -a -html.

+
+ +## Modules + +go-toml uses Go's standard modules system. + +Installation instructions: + +- Go ≥ 1.16: Nothing to do. Use the import in your code. The `go` command deals + with it automatically. +- Go ≥ 1.13: `GO111MODULE=on go get github.com/pelletier/go-toml/v2`. + +In case of trouble: [Go Modules FAQ][mod-faq]. + +[mod-faq]: https://github.com/golang/go/wiki/Modules#why-does-installing-a-tool-via-go-get-fail-with-error-cannot-find-main-module + +## Tools + +Go-toml provides three handy command line tools: + + * `tomljson`: Reads a TOML file and outputs its JSON representation. + + ``` + $ go install github.com/pelletier/go-toml/v2/cmd/tomljson@latest + $ tomljson --help + ``` + + * `jsontoml`: Reads a JSON file and outputs a TOML representation. + + ``` + $ go install github.com/pelletier/go-toml/v2/cmd/jsontoml@latest + $ jsontoml --help + ``` + + * `tomll`: Lints and reformats a TOML file. + + ``` + $ go install github.com/pelletier/go-toml/v2/cmd/tomll@latest + $ tomll --help + ``` + +### Docker image + +Those tools are also available as a [Docker image][docker]. For example, to use +`tomljson`: + +``` +docker run -i ghcr.io/pelletier/go-toml:v2 tomljson < example.toml +``` + +Multiple versions are availble on [ghcr.io][docker]. + +[docker]: https://github.com/pelletier/go-toml/pkgs/container/go-toml + +## Migrating from v1 + +This section describes the differences between v1 and v2, with some pointers on +how to get the original behavior when possible. + +### Decoding / Unmarshal + +#### Automatic field name guessing + +When unmarshaling to a struct, if a key in the TOML document does not exactly +match the name of a struct field or any of the `toml`-tagged field, v1 tries +multiple variations of the key ([code][v1-keys]). + +V2 instead does a case-insensitive matching, like `encoding/json`. + +This could impact you if you are relying on casing to differentiate two fields, +and one of them is a not using the `toml` struct tag. The recommended solution +is to be specific about tag names for those fields using the `toml` struct tag. + +[v1-keys]: https://github.com/pelletier/go-toml/blob/a2e52561804c6cd9392ebf0048ca64fe4af67a43/marshal.go#L775-L781 + +#### Ignore preexisting value in interface + +When decoding into a non-nil `interface{}`, go-toml v1 uses the type of the +element in the interface to decode the object. For example: + +```go +type inner struct { + B interface{} +} +type doc struct { + A interface{} +} + +d := doc{ + A: inner{ + B: "Before", + }, +} + +data := ` +[A] +B = "After" +` + +toml.Unmarshal([]byte(data), &d) +fmt.Printf("toml v1: %#v\n", d) + +// toml v1: main.doc{A:main.inner{B:"After"}} +``` + +In this case, field `A` is of type `interface{}`, containing a `inner` struct. +V1 sees that type and uses it when decoding the object. + +When decoding an object into an `interface{}`, V2 instead disregards whatever +value the `interface{}` may contain and replaces it with a +`map[string]interface{}`. With the same data structure as above, here is what +the result looks like: + +```go +toml.Unmarshal([]byte(data), &d) +fmt.Printf("toml v2: %#v\n", d) + +// toml v2: main.doc{A:map[string]interface {}{"B":"After"}} +``` + +This is to match `encoding/json`'s behavior. There is no way to make the v2 +decoder behave like v1. + +#### Values out of array bounds ignored + +When decoding into an array, v1 returns an error when the number of elements +contained in the doc is superior to the capacity of the array. For example: + +```go +type doc struct { + A [2]string +} +d := doc{} +err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d) +fmt.Println(err) + +// (1, 1): unmarshal: TOML array length (3) exceeds destination array length (2) +``` + +In the same situation, v2 ignores the last value: + +```go +err := toml.Unmarshal([]byte(`A = ["one", "two", "many"]`), &d) +fmt.Println("err:", err, "d:", d) +// err: d: {[one two]} +``` + +This is to match `encoding/json`'s behavior. There is no way to make the v2 +decoder behave like v1. + +#### Support for `toml.Unmarshaler` has been dropped + +This method was not widely used, poorly defined, and added a lot of complexity. +A similar effect can be achieved by implementing the `encoding.TextUnmarshaler` +interface and use strings. + +#### Support for `default` struct tag has been dropped + +This feature adds complexity and a poorly defined API for an effect that can be +accomplished outside of the library. + +It does not seem like other format parsers in Go support that feature (the +project referenced in the original ticket #202 has not been updated since 2017). +Given that go-toml v2 should not touch values not in the document, the same +effect can be achieved by pre-filling the struct with defaults (libraries like +[go-defaults][go-defaults] can help). Also, string representation is not well +defined for all types: it creates issues like #278. + +The recommended replacement is pre-filling the struct before unmarshaling. + +[go-defaults]: https://github.com/mcuadros/go-defaults + +#### `toml.Tree` replacement + +This structure was the initial attempt at providing a document model for +go-toml. It allows manipulating the structure of any document, encoding and +decoding from their TOML representation. While a more robust feature was +initially planned in go-toml v2, this has been ultimately [removed from +scope][nodoc] of this library, with no plan to add it back at the moment. The +closest equivalent at the moment would be to unmarshal into an `interface{}` and +use type assertions and/or reflection to manipulate the arbitrary +structure. However this would fall short of providing all of the TOML features +such as adding comments and be specific about whitespace. + + +#### `toml.Position` are not retrievable anymore + +The API for retrieving the position (line, column) of a specific TOML element do +not exist anymore. This was done to minimize the amount of concepts introduced +by the library (query path), and avoid the performance hit related to storing +positions in the absence of a document model, for a feature that seemed to have +little use. Errors however have gained more detailed position +information. Position retrieval seems better fitted for a document model, which +has been [removed from the scope][nodoc] of go-toml v2 at the moment. + +### Encoding / Marshal + +#### Default struct fields order + +V1 emits struct fields order alphabetically by default. V2 struct fields are +emitted in order they are defined. For example: + +```go +type S struct { + B string + A string +} + +data := S{ + B: "B", + A: "A", +} + +b, _ := tomlv1.Marshal(data) +fmt.Println("v1:\n" + string(b)) + +b, _ = tomlv2.Marshal(data) +fmt.Println("v2:\n" + string(b)) + +// Output: +// v1: +// A = "A" +// B = "B" + +// v2: +// B = 'B' +// A = 'A' +``` + +There is no way to make v2 encoder behave like v1. A workaround could be to +manually sort the fields alphabetically in the struct definition, or generate +struct types using `reflect.StructOf`. + +#### No indentation by default + +V1 automatically indents content of tables by default. V2 does not. However the +same behavior can be obtained using [`Encoder.SetIndentTables`][sit]. For example: + +```go +data := map[string]interface{}{ + "table": map[string]string{ + "key": "value", + }, +} + +b, _ := tomlv1.Marshal(data) +fmt.Println("v1:\n" + string(b)) + +b, _ = tomlv2.Marshal(data) +fmt.Println("v2:\n" + string(b)) + +buf := bytes.Buffer{} +enc := tomlv2.NewEncoder(&buf) +enc.SetIndentTables(true) +enc.Encode(data) +fmt.Println("v2 Encoder:\n" + string(buf.Bytes())) + +// Output: +// v1: +// +// [table] +// key = "value" +// +// v2: +// [table] +// key = 'value' +// +// +// v2 Encoder: +// [table] +// key = 'value' +``` + +[sit]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Encoder.SetIndentTables + +#### Keys and strings are single quoted + +V1 always uses double quotes (`"`) around strings and keys that cannot be +represented bare (unquoted). V2 uses single quotes instead by default (`'`), +unless a character cannot be represented, then falls back to double quotes. As a +result of this change, `Encoder.QuoteMapKeys` has been removed, as it is not +useful anymore. + +There is no way to make v2 encoder behave like v1. + +#### `TextMarshaler` emits as a string, not TOML + +Types that implement [`encoding.TextMarshaler`][tm] can emit arbitrary TOML in +v1. The encoder would append the result to the output directly. In v2 the result +is wrapped in a string. As a result, this interface cannot be implemented by the +root object. + +There is no way to make v2 encoder behave like v1. + +[tm]: https://golang.org/pkg/encoding/#TextMarshaler + +#### `Encoder.CompactComments` has been removed + +Emitting compact comments is now the default behavior of go-toml. This option +is not necessary anymore. + +#### Struct tags have been merged + +V1 used to provide multiple struct tags: `comment`, `commented`, `multiline`, +`toml`, and `omitempty`. To behave more like the standard library, v2 has merged +`toml`, `multiline`, and `omitempty`. For example: + +```go +type doc struct { + // v1 + F string `toml:"field" multiline:"true" omitempty:"true"` + // v2 + F string `toml:"field,multiline,omitempty"` +} +``` + +Has a result, the `Encoder.SetTag*` methods have been removed, as there is just +one tag now. + + +#### `commented` tag has been removed + +There is no replacement for the `commented` tag. This feature would be better +suited in a proper document model for go-toml v2, which has been [cut from +scope][nodoc] at the moment. + +#### `Encoder.ArraysWithOneElementPerLine` has been renamed + +The new name is `Encoder.SetArraysMultiline`. The behavior should be the same. + +#### `Encoder.Indentation` has been renamed + +The new name is `Encoder.SetIndentSymbol`. The behavior should be the same. + + +#### Embedded structs behave like stdlib + +V1 defaults to merging embedded struct fields into the embedding struct. This +behavior was unexpected because it does not follow the standard library. To +avoid breaking backward compatibility, the `Encoder.PromoteAnonymous` method was +added to make the encoder behave correctly. Given backward compatibility is not +a problem anymore, v2 does the right thing by default: it follows the behavior +of `encoding/json`. `Encoder.PromoteAnonymous` has been removed. + +[nodoc]: https://github.com/pelletier/go-toml/discussions/506#discussioncomment-1526038 + +### `query` + +go-toml v1 provided the [`go-toml/query`][query] package. It allowed to run +JSONPath-style queries on TOML files. This feature is not available in v2. For a +replacement, check out [dasel][dasel]. + +This package has been removed because it was essentially not supported anymore +(last commit May 2020), increased the complexity of the code base, and more +complete solutions exist out there. + +[query]: https://github.com/pelletier/go-toml/tree/f99d6bbca119636aeafcf351ee52b3d202782627/query +[dasel]: https://github.com/TomWright/dasel + +## License + +The MIT License (MIT). Read [LICENSE](LICENSE). diff --git a/vendor/github.com/pelletier/go-toml/v2/SECURITY.md b/vendor/github.com/pelletier/go-toml/v2/SECURITY.md new file mode 100644 index 000000000..b2f21cfc9 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/SECURITY.md @@ -0,0 +1,19 @@ +# Security Policy + +## Supported Versions + +Use this section to tell people about which versions of your project are +currently being supported with security updates. + +| Version | Supported | +| ---------- | ------------------ | +| Latest 2.x | :white_check_mark: | +| All 1.x | :x: | +| All 0.x | :x: | + +## Reporting a Vulnerability + +Email a vulnerability report to `security@pelletier.codes`. Make sure to include +as many details as possible to reproduce the vulnerability. This is a +side-project: I will try to get back to you as quickly as possible, time +permitting in my personal life. Providing a working patch helps very much! diff --git a/vendor/github.com/pelletier/go-toml/v2/ci.sh b/vendor/github.com/pelletier/go-toml/v2/ci.sh new file mode 100644 index 000000000..d916c5f23 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/ci.sh @@ -0,0 +1,279 @@ +#!/usr/bin/env bash + + +stderr() { + echo "$@" 1>&2 +} + +usage() { + b=$(basename "$0") + echo $b: ERROR: "$@" 1>&2 + + cat 1>&2 < coverage.out + go tool cover -func=coverage.out + popd + + if [ "${branch}" != "HEAD" ]; then + git worktree remove --force "$dir" + fi +} + +coverage() { + case "$1" in + -d) + shift + target="${1?Need to provide a target branch argument}" + + output_dir="$(mktemp -d)" + target_out="${output_dir}/target.txt" + head_out="${output_dir}/head.txt" + + cover "${target}" > "${target_out}" + cover "HEAD" > "${head_out}" + + cat "${target_out}" + cat "${head_out}" + + echo "" + + target_pct="$(tail -n2 ${target_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%.*/\1/')" + head_pct="$(tail -n2 ${head_out} | head -n1 | sed -E 's/.*total.*\t([0-9.]+)%/\1/')" + echo "Results: ${target} ${target_pct}% HEAD ${head_pct}%" + + delta_pct=$(echo "$head_pct - $target_pct" | bc -l) + echo "Delta: ${delta_pct}" + + if [[ $delta_pct = \-* ]]; then + echo "Regression!"; + + target_diff="${output_dir}/target.diff.txt" + head_diff="${output_dir}/head.diff.txt" + cat "${target_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${target_diff}" + cat "${head_out}" | grep -E '^github.com/pelletier/go-toml' | tr -s "\t " | cut -f 2,3 | sort > "${head_diff}" + + diff --side-by-side --suppress-common-lines "${target_diff}" "${head_diff}" + return 1 + fi + return 0 + ;; + esac + + cover "${1-HEAD}" +} + +bench() { + branch="${1}" + out="${2}" + replace="${3}" + dir="$(mktemp -d)" + + stderr "Executing benchmark for ${branch} at ${dir}" + + if [ "${branch}" = "HEAD" ]; then + cp -r . "${dir}/" + else + git worktree add "$dir" "$branch" + fi + + pushd "$dir" + + if [ "${replace}" != "" ]; then + find ./benchmark/ -iname '*.go' -exec sed -i -E "s|github.com/pelletier/go-toml/v2|${replace}|g" {} \; + go get "${replace}" + fi + + export GOMAXPROCS=2 + nice -n -19 taskset --cpu-list 0,1 go test '-bench=^Benchmark(Un)?[mM]arshal' -count=5 -run=Nothing ./... | tee "${out}" + popd + + if [ "${branch}" != "HEAD" ]; then + git worktree remove --force "$dir" + fi +} + +fmktemp() { + if mktemp --version|grep GNU >/dev/null; then + mktemp --suffix=-$1; + else + mktemp -t $1; + fi +} + +benchstathtml() { +python3 - $1 <<'EOF' +import sys + +lines = [] +stop = False + +with open(sys.argv[1]) as f: + for line in f.readlines(): + line = line.strip() + if line == "": + stop = True + if not stop: + lines.append(line.split(',')) + +results = [] +for line in reversed(lines[1:]): + v2 = float(line[1]) + results.append([ + line[0].replace("-32", ""), + "%.1fx" % (float(line[3])/v2), # v1 + "%.1fx" % (float(line[5])/v2), # bs + ]) +# move geomean to the end +results.append(results[0]) +del results[0] + + +def printtable(data): + print(""" + + + + + """) + + for r in data: + print(" ".format(*r)) + + print(""" +
Benchmarkgo-toml v1BurntSushi/toml
{}{}{}
""") + + +def match(x): + return "ReferenceFile" in x[0] or "HugoFrontMatter" in x[0] + +above = [x for x in results if match(x)] +below = [x for x in results if not match(x)] + +printtable(above) +print("
See more") +print("""

The table above has the results of the most common use-cases. The table below +contains the results of all benchmarks, including unrealistic ones. It is +provided for completeness.

""") +printtable(below) +print('

This table can be generated with ./ci.sh benchmark -a -html.

') +print("
") + +EOF +} + +benchmark() { + case "$1" in + -d) + shift + target="${1?Need to provide a target branch argument}" + + old=`fmktemp ${target}` + bench "${target}" "${old}" + + new=`fmktemp HEAD` + bench HEAD "${new}" + + benchstat "${old}" "${new}" + return 0 + ;; + -a) + shift + + v2stats=`fmktemp go-toml-v2` + bench HEAD "${v2stats}" "github.com/pelletier/go-toml/v2" + v1stats=`fmktemp go-toml-v1` + bench HEAD "${v1stats}" "github.com/pelletier/go-toml" + bsstats=`fmktemp bs-toml` + bench HEAD "${bsstats}" "github.com/BurntSushi/toml" + + cp "${v2stats}" go-toml-v2.txt + cp "${v1stats}" go-toml-v1.txt + cp "${bsstats}" bs-toml.txt + + if [ "$1" = "-html" ]; then + tmpcsv=`fmktemp csv` + benchstat -csv -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt > $tmpcsv + benchstathtml $tmpcsv + else + benchstat -geomean go-toml-v2.txt go-toml-v1.txt bs-toml.txt + fi + + rm -f go-toml-v2.txt go-toml-v1.txt bs-toml.txt + return $? + esac + + bench "${1-HEAD}" `mktemp` +} + +case "$1" in + coverage) shift; coverage $@;; + benchmark) shift; benchmark $@;; + *) usage "bad argument $1";; +esac diff --git a/vendor/github.com/pelletier/go-toml/v2/decode.go b/vendor/github.com/pelletier/go-toml/v2/decode.go new file mode 100644 index 000000000..4af965360 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/decode.go @@ -0,0 +1,544 @@ +package toml + +import ( + "fmt" + "math" + "strconv" + "time" +) + +func parseInteger(b []byte) (int64, error) { + if len(b) > 2 && b[0] == '0' { + switch b[1] { + case 'x': + return parseIntHex(b) + case 'b': + return parseIntBin(b) + case 'o': + return parseIntOct(b) + default: + panic(fmt.Errorf("invalid base '%c', should have been checked by scanIntOrFloat", b[1])) + } + } + + return parseIntDec(b) +} + +func parseLocalDate(b []byte) (LocalDate, error) { + // full-date = date-fullyear "-" date-month "-" date-mday + // date-fullyear = 4DIGIT + // date-month = 2DIGIT ; 01-12 + // date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year + var date LocalDate + + if len(b) != 10 || b[4] != '-' || b[7] != '-' { + return date, newDecodeError(b, "dates are expected to have the format YYYY-MM-DD") + } + + var err error + + date.Year, err = parseDecimalDigits(b[0:4]) + if err != nil { + return LocalDate{}, err + } + + date.Month, err = parseDecimalDigits(b[5:7]) + if err != nil { + return LocalDate{}, err + } + + date.Day, err = parseDecimalDigits(b[8:10]) + if err != nil { + return LocalDate{}, err + } + + if !isValidDate(date.Year, date.Month, date.Day) { + return LocalDate{}, newDecodeError(b, "impossible date") + } + + return date, nil +} + +func parseDecimalDigits(b []byte) (int, error) { + v := 0 + + for i, c := range b { + if c < '0' || c > '9' { + return 0, newDecodeError(b[i:i+1], "expected digit (0-9)") + } + v *= 10 + v += int(c - '0') + } + + return v, nil +} + +func parseDateTime(b []byte) (time.Time, error) { + // offset-date-time = full-date time-delim full-time + // full-time = partial-time time-offset + // time-offset = "Z" / time-numoffset + // time-numoffset = ( "+" / "-" ) time-hour ":" time-minute + + dt, b, err := parseLocalDateTime(b) + if err != nil { + return time.Time{}, err + } + + var zone *time.Location + + if len(b) == 0 { + // parser should have checked that when assigning the date time node + panic("date time should have a timezone") + } + + if b[0] == 'Z' || b[0] == 'z' { + b = b[1:] + zone = time.UTC + } else { + const dateTimeByteLen = 6 + if len(b) != dateTimeByteLen { + return time.Time{}, newDecodeError(b, "invalid date-time timezone") + } + var direction int + switch b[0] { + case '-': + direction = -1 + case '+': + direction = +1 + default: + return time.Time{}, newDecodeError(b[:1], "invalid timezone offset character") + } + + if b[3] != ':' { + return time.Time{}, newDecodeError(b[3:4], "expected a : separator") + } + + hours, err := parseDecimalDigits(b[1:3]) + if err != nil { + return time.Time{}, err + } + if hours > 23 { + return time.Time{}, newDecodeError(b[:1], "invalid timezone offset hours") + } + + minutes, err := parseDecimalDigits(b[4:6]) + if err != nil { + return time.Time{}, err + } + if minutes > 59 { + return time.Time{}, newDecodeError(b[:1], "invalid timezone offset minutes") + } + + seconds := direction * (hours*3600 + minutes*60) + if seconds == 0 { + zone = time.UTC + } else { + zone = time.FixedZone("", seconds) + } + b = b[dateTimeByteLen:] + } + + if len(b) > 0 { + return time.Time{}, newDecodeError(b, "extra bytes at the end of the timezone") + } + + t := time.Date( + dt.Year, + time.Month(dt.Month), + dt.Day, + dt.Hour, + dt.Minute, + dt.Second, + dt.Nanosecond, + zone) + + return t, nil +} + +func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) { + var dt LocalDateTime + + const localDateTimeByteMinLen = 11 + if len(b) < localDateTimeByteMinLen { + return dt, nil, newDecodeError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]") + } + + date, err := parseLocalDate(b[:10]) + if err != nil { + return dt, nil, err + } + dt.LocalDate = date + + sep := b[10] + if sep != 'T' && sep != ' ' && sep != 't' { + return dt, nil, newDecodeError(b[10:11], "datetime separator is expected to be T or a space") + } + + t, rest, err := parseLocalTime(b[11:]) + if err != nil { + return dt, nil, err + } + dt.LocalTime = t + + return dt, rest, nil +} + +// parseLocalTime is a bit different because it also returns the remaining +// []byte that is didn't need. This is to allow parseDateTime to parse those +// remaining bytes as a timezone. +func parseLocalTime(b []byte) (LocalTime, []byte, error) { + var ( + nspow = [10]int{0, 1e8, 1e7, 1e6, 1e5, 1e4, 1e3, 1e2, 1e1, 1e0} + t LocalTime + ) + + // check if b matches to have expected format HH:MM:SS[.NNNNNN] + const localTimeByteLen = 8 + if len(b) < localTimeByteLen { + return t, nil, newDecodeError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]") + } + + var err error + + t.Hour, err = parseDecimalDigits(b[0:2]) + if err != nil { + return t, nil, err + } + + if t.Hour > 23 { + return t, nil, newDecodeError(b[0:2], "hour cannot be greater 23") + } + if b[2] != ':' { + return t, nil, newDecodeError(b[2:3], "expecting colon between hours and minutes") + } + + t.Minute, err = parseDecimalDigits(b[3:5]) + if err != nil { + return t, nil, err + } + if t.Minute > 59 { + return t, nil, newDecodeError(b[3:5], "minutes cannot be greater 59") + } + if b[5] != ':' { + return t, nil, newDecodeError(b[5:6], "expecting colon between minutes and seconds") + } + + t.Second, err = parseDecimalDigits(b[6:8]) + if err != nil { + return t, nil, err + } + + if t.Second > 60 { + return t, nil, newDecodeError(b[6:8], "seconds cannot be greater 60") + } + + b = b[8:] + + if len(b) >= 1 && b[0] == '.' { + frac := 0 + precision := 0 + digits := 0 + + for i, c := range b[1:] { + if !isDigit(c) { + if i == 0 { + return t, nil, newDecodeError(b[0:1], "need at least one digit after fraction point") + } + break + } + digits++ + + const maxFracPrecision = 9 + if i >= maxFracPrecision { + // go-toml allows decoding fractional seconds + // beyond the supported precision of 9 + // digits. It truncates the fractional component + // to the supported precision and ignores the + // remaining digits. + // + // https://github.com/pelletier/go-toml/discussions/707 + continue + } + + frac *= 10 + frac += int(c - '0') + precision++ + } + + if precision == 0 { + return t, nil, newDecodeError(b[:1], "nanoseconds need at least one digit") + } + + t.Nanosecond = frac * nspow[precision] + t.Precision = precision + + return t, b[1+digits:], nil + } + return t, b, nil +} + +//nolint:cyclop +func parseFloat(b []byte) (float64, error) { + if len(b) == 4 && (b[0] == '+' || b[0] == '-') && b[1] == 'n' && b[2] == 'a' && b[3] == 'n' { + return math.NaN(), nil + } + + cleaned, err := checkAndRemoveUnderscoresFloats(b) + if err != nil { + return 0, err + } + + if cleaned[0] == '.' { + return 0, newDecodeError(b, "float cannot start with a dot") + } + + if cleaned[len(cleaned)-1] == '.' { + return 0, newDecodeError(b, "float cannot end with a dot") + } + + dotAlreadySeen := false + for i, c := range cleaned { + if c == '.' { + if dotAlreadySeen { + return 0, newDecodeError(b[i:i+1], "float can have at most one decimal point") + } + if !isDigit(cleaned[i-1]) { + return 0, newDecodeError(b[i-1:i+1], "float decimal point must be preceded by a digit") + } + if !isDigit(cleaned[i+1]) { + return 0, newDecodeError(b[i:i+2], "float decimal point must be followed by a digit") + } + dotAlreadySeen = true + } + } + + start := 0 + if cleaned[0] == '+' || cleaned[0] == '-' { + start = 1 + } + if cleaned[start] == '0' && isDigit(cleaned[start+1]) { + return 0, newDecodeError(b, "float integer part cannot have leading zeroes") + } + + f, err := strconv.ParseFloat(string(cleaned), 64) + if err != nil { + return 0, newDecodeError(b, "unable to parse float: %w", err) + } + + return f, nil +} + +func parseIntHex(b []byte) (int64, error) { + cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:]) + if err != nil { + return 0, err + } + + i, err := strconv.ParseInt(string(cleaned), 16, 64) + if err != nil { + return 0, newDecodeError(b, "couldn't parse hexadecimal number: %w", err) + } + + return i, nil +} + +func parseIntOct(b []byte) (int64, error) { + cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:]) + if err != nil { + return 0, err + } + + i, err := strconv.ParseInt(string(cleaned), 8, 64) + if err != nil { + return 0, newDecodeError(b, "couldn't parse octal number: %w", err) + } + + return i, nil +} + +func parseIntBin(b []byte) (int64, error) { + cleaned, err := checkAndRemoveUnderscoresIntegers(b[2:]) + if err != nil { + return 0, err + } + + i, err := strconv.ParseInt(string(cleaned), 2, 64) + if err != nil { + return 0, newDecodeError(b, "couldn't parse binary number: %w", err) + } + + return i, nil +} + +func isSign(b byte) bool { + return b == '+' || b == '-' +} + +func parseIntDec(b []byte) (int64, error) { + cleaned, err := checkAndRemoveUnderscoresIntegers(b) + if err != nil { + return 0, err + } + + startIdx := 0 + + if isSign(cleaned[0]) { + startIdx++ + } + + if len(cleaned) > startIdx+1 && cleaned[startIdx] == '0' { + return 0, newDecodeError(b, "leading zero not allowed on decimal number") + } + + i, err := strconv.ParseInt(string(cleaned), 10, 64) + if err != nil { + return 0, newDecodeError(b, "couldn't parse decimal number: %w", err) + } + + return i, nil +} + +func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) { + start := 0 + if b[start] == '+' || b[start] == '-' { + start++ + } + + if len(b) == start { + return b, nil + } + + if b[start] == '_' { + return nil, newDecodeError(b[start:start+1], "number cannot start with underscore") + } + + if b[len(b)-1] == '_' { + return nil, newDecodeError(b[len(b)-1:], "number cannot end with underscore") + } + + // fast path + i := 0 + for ; i < len(b); i++ { + if b[i] == '_' { + break + } + } + if i == len(b) { + return b, nil + } + + before := false + cleaned := make([]byte, i, len(b)) + copy(cleaned, b) + + for i++; i < len(b); i++ { + c := b[i] + if c == '_' { + if !before { + return nil, newDecodeError(b[i-1:i+1], "number must have at least one digit between underscores") + } + before = false + } else { + before = true + cleaned = append(cleaned, c) + } + } + + return cleaned, nil +} + +func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) { + if b[0] == '_' { + return nil, newDecodeError(b[0:1], "number cannot start with underscore") + } + + if b[len(b)-1] == '_' { + return nil, newDecodeError(b[len(b)-1:], "number cannot end with underscore") + } + + // fast path + i := 0 + for ; i < len(b); i++ { + if b[i] == '_' { + break + } + } + if i == len(b) { + return b, nil + } + + before := false + cleaned := make([]byte, 0, len(b)) + + for i := 0; i < len(b); i++ { + c := b[i] + + switch c { + case '_': + if !before { + return nil, newDecodeError(b[i-1:i+1], "number must have at least one digit between underscores") + } + if i < len(b)-1 && (b[i+1] == 'e' || b[i+1] == 'E') { + return nil, newDecodeError(b[i+1:i+2], "cannot have underscore before exponent") + } + before = false + case '+', '-': + // signed exponents + cleaned = append(cleaned, c) + before = false + case 'e', 'E': + if i < len(b)-1 && b[i+1] == '_' { + return nil, newDecodeError(b[i+1:i+2], "cannot have underscore after exponent") + } + cleaned = append(cleaned, c) + case '.': + if i < len(b)-1 && b[i+1] == '_' { + return nil, newDecodeError(b[i+1:i+2], "cannot have underscore after decimal point") + } + if i > 0 && b[i-1] == '_' { + return nil, newDecodeError(b[i-1:i], "cannot have underscore before decimal point") + } + cleaned = append(cleaned, c) + default: + before = true + cleaned = append(cleaned, c) + } + } + + return cleaned, nil +} + +// isValidDate checks if a provided date is a date that exists. +func isValidDate(year int, month int, day int) bool { + return month > 0 && month < 13 && day > 0 && day <= daysIn(month, year) +} + +// daysBefore[m] counts the number of days in a non-leap year +// before month m begins. There is an entry for m=12, counting +// the number of days before January of next year (365). +var daysBefore = [...]int32{ + 0, + 31, + 31 + 28, + 31 + 28 + 31, + 31 + 28 + 31 + 30, + 31 + 28 + 31 + 30 + 31, + 31 + 28 + 31 + 30 + 31 + 30, + 31 + 28 + 31 + 30 + 31 + 30 + 31, + 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31, + 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30, + 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31, + 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30, + 31 + 28 + 31 + 30 + 31 + 30 + 31 + 31 + 30 + 31 + 30 + 31, +} + +func daysIn(m int, year int) int { + if m == 2 && isLeap(year) { + return 29 + } + return int(daysBefore[m] - daysBefore[m-1]) +} + +func isLeap(year int) bool { + return year%4 == 0 && (year%100 != 0 || year%400 == 0) +} diff --git a/vendor/github.com/pelletier/go-toml/v2/doc.go b/vendor/github.com/pelletier/go-toml/v2/doc.go new file mode 100644 index 000000000..b7bc599bd --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/doc.go @@ -0,0 +1,2 @@ +// Package toml is a library to read and write TOML documents. +package toml diff --git a/vendor/github.com/pelletier/go-toml/v2/errors.go b/vendor/github.com/pelletier/go-toml/v2/errors.go new file mode 100644 index 000000000..5e6635c3e --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/errors.go @@ -0,0 +1,269 @@ +package toml + +import ( + "fmt" + "strconv" + "strings" + + "github.com/pelletier/go-toml/v2/internal/danger" +) + +// DecodeError represents an error encountered during the parsing or decoding +// of a TOML document. +// +// In addition to the error message, it contains the position in the document +// where it happened, as well as a human-readable representation that shows +// where the error occurred in the document. +type DecodeError struct { + message string + line int + column int + key Key + + human string +} + +// StrictMissingError occurs in a TOML document that does not have a +// corresponding field in the target value. It contains all the missing fields +// in Errors. +// +// Emitted by Decoder when DisallowUnknownFields() was called. +type StrictMissingError struct { + // One error per field that could not be found. + Errors []DecodeError +} + +// Error returns the canonical string for this error. +func (s *StrictMissingError) Error() string { + return "strict mode: fields in the document are missing in the target struct" +} + +// String returns a human readable description of all errors. +func (s *StrictMissingError) String() string { + var buf strings.Builder + + for i, e := range s.Errors { + if i > 0 { + buf.WriteString("\n---\n") + } + + buf.WriteString(e.String()) + } + + return buf.String() +} + +type Key []string + +// internal version of DecodeError that is used as the base to create a +// DecodeError with full context. +type decodeError struct { + highlight []byte + message string + key Key // optional +} + +func (de *decodeError) Error() string { + return de.message +} + +func newDecodeError(highlight []byte, format string, args ...interface{}) error { + return &decodeError{ + highlight: highlight, + message: fmt.Errorf(format, args...).Error(), + } +} + +// Error returns the error message contained in the DecodeError. +func (e *DecodeError) Error() string { + return "toml: " + e.message +} + +// String returns the human-readable contextualized error. This string is multi-line. +func (e *DecodeError) String() string { + return e.human +} + +// Position returns the (line, column) pair indicating where the error +// occurred in the document. Positions are 1-indexed. +func (e *DecodeError) Position() (row int, column int) { + return e.line, e.column +} + +// Key that was being processed when the error occurred. The key is present only +// if this DecodeError is part of a StrictMissingError. +func (e *DecodeError) Key() Key { + return e.key +} + +// decodeErrorFromHighlight creates a DecodeError referencing a highlighted +// range of bytes from document. +// +// highlight needs to be a sub-slice of document, or this function panics. +// +// The function copies all bytes used in DecodeError, so that document and +// highlight can be freely deallocated. +//nolint:funlen +func wrapDecodeError(document []byte, de *decodeError) *DecodeError { + offset := danger.SubsliceOffset(document, de.highlight) + + errMessage := de.Error() + errLine, errColumn := positionAtEnd(document[:offset]) + before, after := linesOfContext(document, de.highlight, offset, 3) + + var buf strings.Builder + + maxLine := errLine + len(after) - 1 + lineColumnWidth := len(strconv.Itoa(maxLine)) + + // Write the lines of context strictly before the error. + for i := len(before) - 1; i > 0; i-- { + line := errLine - i + buf.WriteString(formatLineNumber(line, lineColumnWidth)) + buf.WriteString("|") + + if len(before[i]) > 0 { + buf.WriteString(" ") + buf.Write(before[i]) + } + + buf.WriteRune('\n') + } + + // Write the document line that contains the error. + + buf.WriteString(formatLineNumber(errLine, lineColumnWidth)) + buf.WriteString("| ") + + if len(before) > 0 { + buf.Write(before[0]) + } + + buf.Write(de.highlight) + + if len(after) > 0 { + buf.Write(after[0]) + } + + buf.WriteRune('\n') + + // Write the line with the error message itself (so it does not have a line + // number). + + buf.WriteString(strings.Repeat(" ", lineColumnWidth)) + buf.WriteString("| ") + + if len(before) > 0 { + buf.WriteString(strings.Repeat(" ", len(before[0]))) + } + + buf.WriteString(strings.Repeat("~", len(de.highlight))) + + if len(errMessage) > 0 { + buf.WriteString(" ") + buf.WriteString(errMessage) + } + + // Write the lines of context strictly after the error. + + for i := 1; i < len(after); i++ { + buf.WriteRune('\n') + line := errLine + i + buf.WriteString(formatLineNumber(line, lineColumnWidth)) + buf.WriteString("|") + + if len(after[i]) > 0 { + buf.WriteString(" ") + buf.Write(after[i]) + } + } + + return &DecodeError{ + message: errMessage, + line: errLine, + column: errColumn, + key: de.key, + human: buf.String(), + } +} + +func formatLineNumber(line int, width int) string { + format := "%" + strconv.Itoa(width) + "d" + + return fmt.Sprintf(format, line) +} + +func linesOfContext(document []byte, highlight []byte, offset int, linesAround int) ([][]byte, [][]byte) { + return beforeLines(document, offset, linesAround), afterLines(document, highlight, offset, linesAround) +} + +func beforeLines(document []byte, offset int, linesAround int) [][]byte { + var beforeLines [][]byte + + // Walk the document backward from the highlight to find previous lines + // of context. + rest := document[:offset] +backward: + for o := len(rest) - 1; o >= 0 && len(beforeLines) <= linesAround && len(rest) > 0; { + switch { + case rest[o] == '\n': + // handle individual lines + beforeLines = append(beforeLines, rest[o+1:]) + rest = rest[:o] + o = len(rest) - 1 + case o == 0: + // add the first line only if it's non-empty + beforeLines = append(beforeLines, rest) + + break backward + default: + o-- + } + } + + return beforeLines +} + +func afterLines(document []byte, highlight []byte, offset int, linesAround int) [][]byte { + var afterLines [][]byte + + // Walk the document forward from the highlight to find the following + // lines of context. + rest := document[offset+len(highlight):] +forward: + for o := 0; o < len(rest) && len(afterLines) <= linesAround; { + switch { + case rest[o] == '\n': + // handle individual lines + afterLines = append(afterLines, rest[:o]) + rest = rest[o+1:] + o = 0 + + case o == len(rest)-1: + // add last line only if it's non-empty + afterLines = append(afterLines, rest) + + break forward + default: + o++ + } + } + + return afterLines +} + +func positionAtEnd(b []byte) (row int, column int) { + row = 1 + column = 1 + + for _, c := range b { + if c == '\n' { + row++ + column = 1 + } else { + column++ + } + } + + return +} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/ast/ast.go b/vendor/github.com/pelletier/go-toml/v2/internal/ast/ast.go new file mode 100644 index 000000000..33c7f9155 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/internal/ast/ast.go @@ -0,0 +1,144 @@ +package ast + +import ( + "fmt" + "unsafe" + + "github.com/pelletier/go-toml/v2/internal/danger" +) + +// Iterator starts uninitialized, you need to call Next() first. +// +// For example: +// +// it := n.Children() +// for it.Next() { +// it.Node() +// } +type Iterator struct { + started bool + node *Node +} + +// Next moves the iterator forward and returns true if points to a +// node, false otherwise. +func (c *Iterator) Next() bool { + if !c.started { + c.started = true + } else if c.node.Valid() { + c.node = c.node.Next() + } + return c.node.Valid() +} + +// IsLast returns true if the current node of the iterator is the last +// one. Subsequent call to Next() will return false. +func (c *Iterator) IsLast() bool { + return c.node.next == 0 +} + +// Node returns a copy of the node pointed at by the iterator. +func (c *Iterator) Node() *Node { + return c.node +} + +// Root contains a full AST. +// +// It is immutable once constructed with Builder. +type Root struct { + nodes []Node +} + +// Iterator over the top level nodes. +func (r *Root) Iterator() Iterator { + it := Iterator{} + if len(r.nodes) > 0 { + it.node = &r.nodes[0] + } + return it +} + +func (r *Root) at(idx Reference) *Node { + return &r.nodes[idx] +} + +// Arrays have one child per element in the array. InlineTables have +// one child per key-value pair in the table. KeyValues have at least +// two children. The first one is the value. The rest make a +// potentially dotted key. Table and Array table have one child per +// element of the key they represent (same as KeyValue, but without +// the last node being the value). +type Node struct { + Kind Kind + Raw Range // Raw bytes from the input. + Data []byte // Node value (either allocated or referencing the input). + + // References to other nodes, as offsets in the backing array + // from this node. References can go backward, so those can be + // negative. + next int // 0 if last element + child int // 0 if no child +} + +type Range struct { + Offset uint32 + Length uint32 +} + +// Next returns a copy of the next node, or an invalid Node if there +// is no next node. +func (n *Node) Next() *Node { + if n.next == 0 { + return nil + } + ptr := unsafe.Pointer(n) + size := unsafe.Sizeof(Node{}) + return (*Node)(danger.Stride(ptr, size, n.next)) +} + +// Child returns a copy of the first child node of this node. Other +// children can be accessed calling Next on the first child. Returns +// an invalid Node if there is none. +func (n *Node) Child() *Node { + if n.child == 0 { + return nil + } + ptr := unsafe.Pointer(n) + size := unsafe.Sizeof(Node{}) + return (*Node)(danger.Stride(ptr, size, n.child)) +} + +// Valid returns true if the node's kind is set (not to Invalid). +func (n *Node) Valid() bool { + return n != nil +} + +// Key returns the child nodes making the Key on a supported +// node. Panics otherwise. They are guaranteed to be all be of the +// Kind Key. A simple key would return just one element. +func (n *Node) Key() Iterator { + switch n.Kind { + case KeyValue: + value := n.Child() + if !value.Valid() { + panic(fmt.Errorf("KeyValue should have at least two children")) + } + return Iterator{node: value.Next()} + case Table, ArrayTable: + return Iterator{node: n.Child()} + default: + panic(fmt.Errorf("Key() is not supported on a %s", n.Kind)) + } +} + +// Value returns a pointer to the value node of a KeyValue. +// Guaranteed to be non-nil. Panics if not called on a KeyValue node, +// or if the Children are malformed. +func (n *Node) Value() *Node { + return n.Child() +} + +// Children returns an iterator over a node's children. +func (n *Node) Children() Iterator { + return Iterator{node: n.Child()} +} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/ast/builder.go b/vendor/github.com/pelletier/go-toml/v2/internal/ast/builder.go new file mode 100644 index 000000000..120f16e5c --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/internal/ast/builder.go @@ -0,0 +1,51 @@ +package ast + +type Reference int + +const InvalidReference Reference = -1 + +func (r Reference) Valid() bool { + return r != InvalidReference +} + +type Builder struct { + tree Root + lastIdx int +} + +func (b *Builder) Tree() *Root { + return &b.tree +} + +func (b *Builder) NodeAt(ref Reference) *Node { + return b.tree.at(ref) +} + +func (b *Builder) Reset() { + b.tree.nodes = b.tree.nodes[:0] + b.lastIdx = 0 +} + +func (b *Builder) Push(n Node) Reference { + b.lastIdx = len(b.tree.nodes) + b.tree.nodes = append(b.tree.nodes, n) + return Reference(b.lastIdx) +} + +func (b *Builder) PushAndChain(n Node) Reference { + newIdx := len(b.tree.nodes) + b.tree.nodes = append(b.tree.nodes, n) + if b.lastIdx >= 0 { + b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx + } + b.lastIdx = newIdx + return Reference(b.lastIdx) +} + +func (b *Builder) AttachChild(parent Reference, child Reference) { + b.tree.nodes[parent].child = int(child) - int(parent) +} + +func (b *Builder) Chain(from Reference, to Reference) { + b.tree.nodes[from].next = int(to) - int(from) +} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/ast/kind.go b/vendor/github.com/pelletier/go-toml/v2/internal/ast/kind.go new file mode 100644 index 000000000..2b50c67fc --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/internal/ast/kind.go @@ -0,0 +1,69 @@ +package ast + +import "fmt" + +type Kind int + +const ( + // meta + Invalid Kind = iota + Comment + Key + + // top level structures + Table + ArrayTable + KeyValue + + // containers values + Array + InlineTable + + // values + String + Bool + Float + Integer + LocalDate + LocalTime + LocalDateTime + DateTime +) + +func (k Kind) String() string { + switch k { + case Invalid: + return "Invalid" + case Comment: + return "Comment" + case Key: + return "Key" + case Table: + return "Table" + case ArrayTable: + return "ArrayTable" + case KeyValue: + return "KeyValue" + case Array: + return "Array" + case InlineTable: + return "InlineTable" + case String: + return "String" + case Bool: + return "Bool" + case Float: + return "Float" + case Integer: + return "Integer" + case LocalDate: + return "LocalDate" + case LocalTime: + return "LocalTime" + case LocalDateTime: + return "LocalDateTime" + case DateTime: + return "DateTime" + } + panic(fmt.Errorf("Kind.String() not implemented for '%d'", k)) +} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go b/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go new file mode 100644 index 000000000..e38e1131b --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/internal/danger/danger.go @@ -0,0 +1,65 @@ +package danger + +import ( + "fmt" + "reflect" + "unsafe" +) + +const maxInt = uintptr(int(^uint(0) >> 1)) + +func SubsliceOffset(data []byte, subslice []byte) int { + datap := (*reflect.SliceHeader)(unsafe.Pointer(&data)) + hlp := (*reflect.SliceHeader)(unsafe.Pointer(&subslice)) + + if hlp.Data < datap.Data { + panic(fmt.Errorf("subslice address (%d) is before data address (%d)", hlp.Data, datap.Data)) + } + offset := hlp.Data - datap.Data + + if offset > maxInt { + panic(fmt.Errorf("slice offset larger than int (%d)", offset)) + } + + intoffset := int(offset) + + if intoffset > datap.Len { + panic(fmt.Errorf("slice offset (%d) is farther than data length (%d)", intoffset, datap.Len)) + } + + if intoffset+hlp.Len > datap.Len { + panic(fmt.Errorf("slice ends (%d+%d) is farther than data length (%d)", intoffset, hlp.Len, datap.Len)) + } + + return intoffset +} + +func BytesRange(start []byte, end []byte) []byte { + if start == nil || end == nil { + panic("cannot call BytesRange with nil") + } + startp := (*reflect.SliceHeader)(unsafe.Pointer(&start)) + endp := (*reflect.SliceHeader)(unsafe.Pointer(&end)) + + if startp.Data > endp.Data { + panic(fmt.Errorf("start pointer address (%d) is after end pointer address (%d)", startp.Data, endp.Data)) + } + + l := startp.Len + endLen := int(endp.Data-startp.Data) + endp.Len + if endLen > l { + l = endLen + } + + if l > startp.Cap { + panic(fmt.Errorf("range length is larger than capacity")) + } + + return start[:l] +} + +func Stride(ptr unsafe.Pointer, size uintptr, offset int) unsafe.Pointer { + // TODO: replace with unsafe.Add when Go 1.17 is released + // https://github.com/golang/go/issues/40481 + return unsafe.Pointer(uintptr(ptr) + uintptr(int(size)*offset)) +} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go b/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go new file mode 100644 index 000000000..9d41c28a2 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/internal/danger/typeid.go @@ -0,0 +1,23 @@ +package danger + +import ( + "reflect" + "unsafe" +) + +// typeID is used as key in encoder and decoder caches to enable using +// the optimize runtime.mapaccess2_fast64 function instead of the more +// expensive lookup if we were to use reflect.Type as map key. +// +// typeID holds the pointer to the reflect.Type value, which is unique +// in the program. +// +// https://github.com/segmentio/encoding/blob/master/json/codec.go#L59-L61 +type TypeID unsafe.Pointer + +func MakeTypeID(t reflect.Type) TypeID { + // reflect.Type has the fields: + // typ unsafe.Pointer + // ptr unsafe.Pointer + return TypeID((*[2]unsafe.Pointer)(unsafe.Pointer(&t))[1]) +} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go new file mode 100644 index 000000000..7c148f48d --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/key.go @@ -0,0 +1,50 @@ +package tracker + +import ( + "github.com/pelletier/go-toml/v2/internal/ast" +) + +// KeyTracker is a tracker that keeps track of the current Key as the AST is +// walked. +type KeyTracker struct { + k []string +} + +// UpdateTable sets the state of the tracker with the AST table node. +func (t *KeyTracker) UpdateTable(node *ast.Node) { + t.reset() + t.Push(node) +} + +// UpdateArrayTable sets the state of the tracker with the AST array table node. +func (t *KeyTracker) UpdateArrayTable(node *ast.Node) { + t.reset() + t.Push(node) +} + +// Push the given key on the stack. +func (t *KeyTracker) Push(node *ast.Node) { + it := node.Key() + for it.Next() { + t.k = append(t.k, string(it.Node().Data)) + } +} + +// Pop key from stack. +func (t *KeyTracker) Pop(node *ast.Node) { + it := node.Key() + for it.Next() { + t.k = t.k[:len(t.k)-1] + } +} + +// Key returns the current key +func (t *KeyTracker) Key() []string { + k := make([]string, len(t.k)) + copy(k, t.k) + return k +} + +func (t *KeyTracker) reset() { + t.k = t.k[:0] +} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go new file mode 100644 index 000000000..a7ee05ba6 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/seen.go @@ -0,0 +1,356 @@ +package tracker + +import ( + "bytes" + "fmt" + "sync" + + "github.com/pelletier/go-toml/v2/internal/ast" +) + +type keyKind uint8 + +const ( + invalidKind keyKind = iota + valueKind + tableKind + arrayTableKind +) + +func (k keyKind) String() string { + switch k { + case invalidKind: + return "invalid" + case valueKind: + return "value" + case tableKind: + return "table" + case arrayTableKind: + return "array table" + } + panic("missing keyKind string mapping") +} + +// SeenTracker tracks which keys have been seen with which TOML type to flag +// duplicates and mismatches according to the spec. +// +// Each node in the visited tree is represented by an entry. Each entry has an +// identifier, which is provided by a counter. Entries are stored in the array +// entries. As new nodes are discovered (referenced for the first time in the +// TOML document), entries are created and appended to the array. An entry +// points to its parent using its id. +// +// To find whether a given key (sequence of []byte) has already been visited, +// the entries are linearly searched, looking for one with the right name and +// parent id. +// +// Given that all keys appear in the document after their parent, it is +// guaranteed that all descendants of a node are stored after the node, this +// speeds up the search process. +// +// When encountering [[array tables]], the descendants of that node are removed +// to allow that branch of the tree to be "rediscovered". To maintain the +// invariant above, the deletion process needs to keep the order of entries. +// This results in more copies in that case. +type SeenTracker struct { + entries []entry + currentIdx int +} + +var pool sync.Pool + +func (s *SeenTracker) reset() { + // Always contains a root element at index 0. + s.currentIdx = 0 + if len(s.entries) == 0 { + s.entries = make([]entry, 1, 2) + } else { + s.entries = s.entries[:1] + } + s.entries[0].child = -1 + s.entries[0].next = -1 +} + +type entry struct { + // Use -1 to indicate no child or no sibling. + child int + next int + + name []byte + kind keyKind + explicit bool + kv bool +} + +// Find the index of the child of parentIdx with key k. Returns -1 if +// it does not exist. +func (s *SeenTracker) find(parentIdx int, k []byte) int { + for i := s.entries[parentIdx].child; i >= 0; i = s.entries[i].next { + if bytes.Equal(s.entries[i].name, k) { + return i + } + } + return -1 +} + +// Remove all descendants of node at position idx. +func (s *SeenTracker) clear(idx int) { + if idx >= len(s.entries) { + return + } + + for i := s.entries[idx].child; i >= 0; { + next := s.entries[i].next + n := s.entries[0].next + s.entries[0].next = i + s.entries[i].next = n + s.entries[i].name = nil + s.clear(i) + i = next + } + + s.entries[idx].child = -1 +} + +func (s *SeenTracker) create(parentIdx int, name []byte, kind keyKind, explicit bool, kv bool) int { + e := entry{ + child: -1, + next: s.entries[parentIdx].child, + + name: name, + kind: kind, + explicit: explicit, + kv: kv, + } + var idx int + if s.entries[0].next >= 0 { + idx = s.entries[0].next + s.entries[0].next = s.entries[idx].next + s.entries[idx] = e + } else { + idx = len(s.entries) + s.entries = append(s.entries, e) + } + + s.entries[parentIdx].child = idx + + return idx +} + +func (s *SeenTracker) setExplicitFlag(parentIdx int) { + for i := s.entries[parentIdx].child; i >= 0; i = s.entries[i].next { + if s.entries[i].kv { + s.entries[i].explicit = true + s.entries[i].kv = false + } + s.setExplicitFlag(i) + } +} + +// CheckExpression takes a top-level node and checks that it does not contain +// keys that have been seen in previous calls, and validates that types are +// consistent. +func (s *SeenTracker) CheckExpression(node *ast.Node) error { + if s.entries == nil { + s.reset() + } + switch node.Kind { + case ast.KeyValue: + return s.checkKeyValue(node) + case ast.Table: + return s.checkTable(node) + case ast.ArrayTable: + return s.checkArrayTable(node) + default: + panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind)) + } +} + +func (s *SeenTracker) checkTable(node *ast.Node) error { + if s.currentIdx >= 0 { + s.setExplicitFlag(s.currentIdx) + } + + it := node.Key() + + parentIdx := 0 + + // This code is duplicated in checkArrayTable. This is because factoring + // it in a function requires to copy the iterator, or allocate it to the + // heap, which is not cheap. + for it.Next() { + if it.IsLast() { + break + } + + k := it.Node().Data + + idx := s.find(parentIdx, k) + + if idx < 0 { + idx = s.create(parentIdx, k, tableKind, false, false) + } else { + entry := s.entries[idx] + if entry.kind == valueKind { + return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind) + } + } + parentIdx = idx + } + + k := it.Node().Data + idx := s.find(parentIdx, k) + + if idx >= 0 { + kind := s.entries[idx].kind + if kind != tableKind { + return fmt.Errorf("toml: key %s should be a table, not a %s", string(k), kind) + } + if s.entries[idx].explicit { + return fmt.Errorf("toml: table %s already exists", string(k)) + } + s.entries[idx].explicit = true + } else { + idx = s.create(parentIdx, k, tableKind, true, false) + } + + s.currentIdx = idx + + return nil +} + +func (s *SeenTracker) checkArrayTable(node *ast.Node) error { + if s.currentIdx >= 0 { + s.setExplicitFlag(s.currentIdx) + } + + it := node.Key() + + parentIdx := 0 + + for it.Next() { + if it.IsLast() { + break + } + + k := it.Node().Data + + idx := s.find(parentIdx, k) + + if idx < 0 { + idx = s.create(parentIdx, k, tableKind, false, false) + } else { + entry := s.entries[idx] + if entry.kind == valueKind { + return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind) + } + } + + parentIdx = idx + } + + k := it.Node().Data + idx := s.find(parentIdx, k) + + if idx >= 0 { + kind := s.entries[idx].kind + if kind != arrayTableKind { + return fmt.Errorf("toml: key %s already exists as a %s, but should be an array table", kind, string(k)) + } + s.clear(idx) + } else { + idx = s.create(parentIdx, k, arrayTableKind, true, false) + } + + s.currentIdx = idx + + return nil +} + +func (s *SeenTracker) checkKeyValue(node *ast.Node) error { + parentIdx := s.currentIdx + it := node.Key() + + for it.Next() { + k := it.Node().Data + + idx := s.find(parentIdx, k) + + if idx < 0 { + idx = s.create(parentIdx, k, tableKind, false, true) + } else { + entry := s.entries[idx] + if it.IsLast() { + return fmt.Errorf("toml: key %s is already defined", string(k)) + } else if entry.kind != tableKind { + return fmt.Errorf("toml: expected %s to be a table, not a %s", string(k), entry.kind) + } else if entry.explicit { + return fmt.Errorf("toml: cannot redefine table %s that has already been explicitly defined", string(k)) + } + } + + parentIdx = idx + } + + s.entries[parentIdx].kind = valueKind + + value := node.Value() + + switch value.Kind { + case ast.InlineTable: + return s.checkInlineTable(value) + case ast.Array: + return s.checkArray(value) + } + + return nil +} + +func (s *SeenTracker) checkArray(node *ast.Node) error { + it := node.Children() + for it.Next() { + n := it.Node() + switch n.Kind { + case ast.InlineTable: + err := s.checkInlineTable(n) + if err != nil { + return err + } + case ast.Array: + err := s.checkArray(n) + if err != nil { + return err + } + } + } + return nil +} + +func (s *SeenTracker) checkInlineTable(node *ast.Node) error { + if pool.New == nil { + pool.New = func() interface{} { + return &SeenTracker{} + } + } + + s = pool.Get().(*SeenTracker) + s.reset() + + it := node.Children() + for it.Next() { + n := it.Node() + err := s.checkKeyValue(n) + if err != nil { + return err + } + } + + // As inline tables are self-contained, the tracker does not + // need to retain the details of what they contain. The + // keyValue element that creates the inline table is kept to + // mark the presence of the inline table and prevent + // redefinition of its keys: check* functions cannot walk into + // a value. + pool.Put(s) + return nil +} diff --git a/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go new file mode 100644 index 000000000..bf0317392 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/internal/tracker/tracker.go @@ -0,0 +1 @@ +package tracker diff --git a/vendor/github.com/pelletier/go-toml/v2/localtime.go b/vendor/github.com/pelletier/go-toml/v2/localtime.go new file mode 100644 index 000000000..30a31dcbd --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/localtime.go @@ -0,0 +1,120 @@ +package toml + +import ( + "fmt" + "strings" + "time" +) + +// LocalDate represents a calendar day in no specific timezone. +type LocalDate struct { + Year int + Month int + Day int +} + +// AsTime converts d into a specific time instance at midnight in zone. +func (d LocalDate) AsTime(zone *time.Location) time.Time { + return time.Date(d.Year, time.Month(d.Month), d.Day, 0, 0, 0, 0, zone) +} + +// String returns RFC 3339 representation of d. +func (d LocalDate) String() string { + return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day) +} + +// MarshalText returns RFC 3339 representation of d. +func (d LocalDate) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText parses b using RFC 3339 to fill d. +func (d *LocalDate) UnmarshalText(b []byte) error { + res, err := parseLocalDate(b) + if err != nil { + return err + } + *d = res + return nil +} + +// LocalTime represents a time of day of no specific day in no specific +// timezone. +type LocalTime struct { + Hour int // Hour of the day: [0; 24[ + Minute int // Minute of the hour: [0; 60[ + Second int // Second of the minute: [0; 60[ + Nanosecond int // Nanoseconds within the second: [0, 1000000000[ + Precision int // Number of digits to display for Nanosecond. +} + +// String returns RFC 3339 representation of d. +// If d.Nanosecond and d.Precision are zero, the time won't have a nanosecond +// component. If d.Nanosecond > 0 but d.Precision = 0, then the minimum number +// of digits for nanoseconds is provided. +func (d LocalTime) String() string { + s := fmt.Sprintf("%02d:%02d:%02d", d.Hour, d.Minute, d.Second) + + if d.Precision > 0 { + s += fmt.Sprintf(".%09d", d.Nanosecond)[:d.Precision+1] + } else if d.Nanosecond > 0 { + // Nanoseconds are specified, but precision is not provided. Use the + // minimum. + s += strings.Trim(fmt.Sprintf(".%09d", d.Nanosecond), "0") + } + + return s +} + +// MarshalText returns RFC 3339 representation of d. +func (d LocalTime) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText parses b using RFC 3339 to fill d. +func (d *LocalTime) UnmarshalText(b []byte) error { + res, left, err := parseLocalTime(b) + if err == nil && len(left) != 0 { + err = newDecodeError(left, "extra characters") + } + if err != nil { + return err + } + *d = res + return nil +} + +// LocalDateTime represents a time of a specific day in no specific timezone. +type LocalDateTime struct { + LocalDate + LocalTime +} + +// AsTime converts d into a specific time instance in zone. +func (d LocalDateTime) AsTime(zone *time.Location) time.Time { + return time.Date(d.Year, time.Month(d.Month), d.Day, d.Hour, d.Minute, d.Second, d.Nanosecond, zone) +} + +// String returns RFC 3339 representation of d. +func (d LocalDateTime) String() string { + return d.LocalDate.String() + "T" + d.LocalTime.String() +} + +// MarshalText returns RFC 3339 representation of d. +func (d LocalDateTime) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText parses b using RFC 3339 to fill d. +func (d *LocalDateTime) UnmarshalText(data []byte) error { + res, left, err := parseLocalDateTime(data) + if err == nil && len(left) != 0 { + err = newDecodeError(left, "extra characters") + } + if err != nil { + return err + } + + *d = res + return nil +} diff --git a/vendor/github.com/pelletier/go-toml/v2/marshaler.go b/vendor/github.com/pelletier/go-toml/v2/marshaler.go new file mode 100644 index 000000000..91f3b3c20 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/marshaler.go @@ -0,0 +1,950 @@ +package toml + +import ( + "bytes" + "encoding" + "fmt" + "io" + "math" + "reflect" + "sort" + "strconv" + "strings" + "time" + "unicode" +) + +// Marshal serializes a Go value as a TOML document. +// +// It is a shortcut for Encoder.Encode() with the default options. +func Marshal(v interface{}) ([]byte, error) { + var buf bytes.Buffer + enc := NewEncoder(&buf) + + err := enc.Encode(v) + if err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +// Encoder writes a TOML document to an output stream. +type Encoder struct { + // output + w io.Writer + + // global settings + tablesInline bool + arraysMultiline bool + indentSymbol string + indentTables bool +} + +// NewEncoder returns a new Encoder that writes to w. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: w, + indentSymbol: " ", + } +} + +// SetTablesInline forces the encoder to emit all tables inline. +// +// This behavior can be controlled on an individual struct field basis with the +// inline tag: +// +// MyField `inline:"true"` +func (enc *Encoder) SetTablesInline(inline bool) *Encoder { + enc.tablesInline = inline + return enc +} + +// SetArraysMultiline forces the encoder to emit all arrays with one element per +// line. +// +// This behavior can be controlled on an individual struct field basis with the multiline tag: +// +// MyField `multiline:"true"` +func (enc *Encoder) SetArraysMultiline(multiline bool) *Encoder { + enc.arraysMultiline = multiline + return enc +} + +// SetIndentSymbol defines the string that should be used for indentation. The +// provided string is repeated for each indentation level. Defaults to two +// spaces. +func (enc *Encoder) SetIndentSymbol(s string) *Encoder { + enc.indentSymbol = s + return enc +} + +// SetIndentTables forces the encoder to intent tables and array tables. +func (enc *Encoder) SetIndentTables(indent bool) *Encoder { + enc.indentTables = indent + return enc +} + +// Encode writes a TOML representation of v to the stream. +// +// If v cannot be represented to TOML it returns an error. +// +// Encoding rules +// +// A top level slice containing only maps or structs is encoded as [[table +// array]]. +// +// All slices not matching rule 1 are encoded as [array]. As a result, any map +// or struct they contain is encoded as an {inline table}. +// +// Nil interfaces and nil pointers are not supported. +// +// Keys in key-values always have one part. +// +// Intermediate tables are always printed. +// +// By default, strings are encoded as literal string, unless they contain either +// a newline character or a single quote. In that case they are emitted as +// quoted strings. +// +// When encoding structs, fields are encoded in order of definition, with their +// exact name. +// +// Struct tags +// +// The encoding of each public struct field can be customized by the format +// string in the "toml" key of the struct field's tag. This follows +// encoding/json's convention. The format string starts with the name of the +// field, optionally followed by a comma-separated list of options. The name may +// be empty in order to provide options without overriding the default name. +// +// The "multiline" option emits strings as quoted multi-line TOML strings. It +// has no effect on fields that would not be encoded as strings. +// +// The "inline" option turns fields that would be emitted as tables into inline +// tables instead. It has no effect on other fields. +// +// The "omitempty" option prevents empty values or groups from being emitted. +// +// In addition to the "toml" tag struct tag, a "comment" tag can be used to emit +// a TOML comment before the value being annotated. Comments are ignored inside +// inline tables. +func (enc *Encoder) Encode(v interface{}) error { + var ( + b []byte + ctx encoderCtx + ) + + ctx.inline = enc.tablesInline + + if v == nil { + return fmt.Errorf("toml: cannot encode a nil interface") + } + + b, err := enc.encode(b, ctx, reflect.ValueOf(v)) + if err != nil { + return err + } + + _, err = enc.w.Write(b) + if err != nil { + return fmt.Errorf("toml: cannot write: %w", err) + } + + return nil +} + +type valueOptions struct { + multiline bool + omitempty bool + comment string +} + +type encoderCtx struct { + // Current top-level key. + parentKey []string + + // Key that should be used for a KV. + key string + // Extra flag to account for the empty string + hasKey bool + + // Set to true to indicate that the encoder is inside a KV, so that all + // tables need to be inlined. + insideKv bool + + // Set to true to skip the first table header in an array table. + skipTableHeader bool + + // Should the next table be encoded as inline + inline bool + + // Indentation level + indent int + + // Options coming from struct tags + options valueOptions +} + +func (ctx *encoderCtx) shiftKey() { + if ctx.hasKey { + ctx.parentKey = append(ctx.parentKey, ctx.key) + ctx.clearKey() + } +} + +func (ctx *encoderCtx) setKey(k string) { + ctx.key = k + ctx.hasKey = true +} + +func (ctx *encoderCtx) clearKey() { + ctx.key = "" + ctx.hasKey = false +} + +func (ctx *encoderCtx) isRoot() bool { + return len(ctx.parentKey) == 0 && !ctx.hasKey +} + +func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { + i := v.Interface() + + switch x := i.(type) { + case time.Time: + if x.Nanosecond() > 0 { + return x.AppendFormat(b, time.RFC3339Nano), nil + } + return x.AppendFormat(b, time.RFC3339), nil + case LocalTime: + return append(b, x.String()...), nil + case LocalDate: + return append(b, x.String()...), nil + case LocalDateTime: + return append(b, x.String()...), nil + } + + hasTextMarshaler := v.Type().Implements(textMarshalerType) + if hasTextMarshaler || (v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) { + if !hasTextMarshaler { + v = v.Addr() + } + + if ctx.isRoot() { + return nil, fmt.Errorf("toml: type %s implementing the TextMarshaler interface cannot be a root element", v.Type()) + } + + text, err := v.Interface().(encoding.TextMarshaler).MarshalText() + if err != nil { + return nil, err + } + + b = enc.encodeString(b, string(text), ctx.options) + + return b, nil + } + + switch v.Kind() { + // containers + case reflect.Map: + return enc.encodeMap(b, ctx, v) + case reflect.Struct: + return enc.encodeStruct(b, ctx, v) + case reflect.Slice: + return enc.encodeSlice(b, ctx, v) + case reflect.Interface: + if v.IsNil() { + return nil, fmt.Errorf("toml: encoding a nil interface is not supported") + } + + return enc.encode(b, ctx, v.Elem()) + case reflect.Ptr: + if v.IsNil() { + return enc.encode(b, ctx, reflect.Zero(v.Type().Elem())) + } + + return enc.encode(b, ctx, v.Elem()) + + // values + case reflect.String: + b = enc.encodeString(b, v.String(), ctx.options) + case reflect.Float32: + f := v.Float() + + if math.IsNaN(f) { + b = append(b, "nan"...) + } else if f > math.MaxFloat32 { + b = append(b, "inf"...) + } else if f < -math.MaxFloat32 { + b = append(b, "-inf"...) + } else if math.Trunc(f) == f { + b = strconv.AppendFloat(b, f, 'f', 1, 32) + } else { + b = strconv.AppendFloat(b, f, 'f', -1, 32) + } + case reflect.Float64: + f := v.Float() + if math.IsNaN(f) { + b = append(b, "nan"...) + } else if f > math.MaxFloat64 { + b = append(b, "inf"...) + } else if f < -math.MaxFloat64 { + b = append(b, "-inf"...) + } else if math.Trunc(f) == f { + b = strconv.AppendFloat(b, f, 'f', 1, 64) + } else { + b = strconv.AppendFloat(b, f, 'f', -1, 64) + } + case reflect.Bool: + if v.Bool() { + b = append(b, "true"...) + } else { + b = append(b, "false"...) + } + case reflect.Uint64, reflect.Uint32, reflect.Uint16, reflect.Uint8, reflect.Uint: + b = strconv.AppendUint(b, v.Uint(), 10) + case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int: + b = strconv.AppendInt(b, v.Int(), 10) + default: + return nil, fmt.Errorf("toml: cannot encode value of type %s", v.Kind()) + } + + return b, nil +} + +func isNil(v reflect.Value) bool { + switch v.Kind() { + case reflect.Ptr, reflect.Interface, reflect.Map: + return v.IsNil() + default: + return false + } +} + +func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v reflect.Value) ([]byte, error) { + var err error + + if (ctx.options.omitempty || options.omitempty) && isEmptyValue(v) { + return b, nil + } + + if !ctx.inline { + b = enc.encodeComment(ctx.indent, options.comment, b) + } + + b = enc.indent(ctx.indent, b) + b = enc.encodeKey(b, ctx.key) + b = append(b, " = "...) + + // create a copy of the context because the value of a KV shouldn't + // modify the global context. + subctx := ctx + subctx.insideKv = true + subctx.shiftKey() + subctx.options = options + + b, err = enc.encode(b, subctx, v) + if err != nil { + return nil, err + } + + return b, nil +} + +func isEmptyValue(v reflect.Value) bool { + switch v.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + } + return false +} + +const literalQuote = '\'' + +func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byte { + if needsQuoting(v) { + return enc.encodeQuotedString(options.multiline, b, v) + } + + return enc.encodeLiteralString(b, v) +} + +func needsQuoting(v string) bool { + // TODO: vectorize + for _, b := range []byte(v) { + if b == '\'' || b == '\r' || b == '\n' || invalidAscii(b) { + return true + } + } + return false +} + +// caller should have checked that the string does not contain new lines or ' . +func (enc *Encoder) encodeLiteralString(b []byte, v string) []byte { + b = append(b, literalQuote) + b = append(b, v...) + b = append(b, literalQuote) + + return b +} + +//nolint:cyclop +func (enc *Encoder) encodeQuotedString(multiline bool, b []byte, v string) []byte { + stringQuote := `"` + + if multiline { + stringQuote = `"""` + } + + b = append(b, stringQuote...) + if multiline { + b = append(b, '\n') + } + + const ( + hextable = "0123456789ABCDEF" + // U+0000 to U+0008, U+000A to U+001F, U+007F + nul = 0x0 + bs = 0x8 + lf = 0xa + us = 0x1f + del = 0x7f + ) + + for _, r := range []byte(v) { + switch r { + case '\\': + b = append(b, `\\`...) + case '"': + b = append(b, `\"`...) + case '\b': + b = append(b, `\b`...) + case '\f': + b = append(b, `\f`...) + case '\n': + if multiline { + b = append(b, r) + } else { + b = append(b, `\n`...) + } + case '\r': + b = append(b, `\r`...) + case '\t': + b = append(b, `\t`...) + default: + switch { + case r >= nul && r <= bs, r >= lf && r <= us, r == del: + b = append(b, `\u00`...) + b = append(b, hextable[r>>4]) + b = append(b, hextable[r&0x0f]) + default: + b = append(b, r) + } + } + } + + b = append(b, stringQuote...) + + return b +} + +// caller should have checked that the string is in A-Z / a-z / 0-9 / - / _ . +func (enc *Encoder) encodeUnquotedKey(b []byte, v string) []byte { + return append(b, v...) +} + +func (enc *Encoder) encodeTableHeader(ctx encoderCtx, b []byte) ([]byte, error) { + if len(ctx.parentKey) == 0 { + return b, nil + } + + b = enc.encodeComment(ctx.indent, ctx.options.comment, b) + + b = enc.indent(ctx.indent, b) + + b = append(b, '[') + + b = enc.encodeKey(b, ctx.parentKey[0]) + + for _, k := range ctx.parentKey[1:] { + b = append(b, '.') + b = enc.encodeKey(b, k) + } + + b = append(b, "]\n"...) + + return b, nil +} + +//nolint:cyclop +func (enc *Encoder) encodeKey(b []byte, k string) []byte { + needsQuotation := false + cannotUseLiteral := false + + if len(k) == 0 { + return append(b, "''"...) + } + + for _, c := range k { + if (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') || c == '-' || c == '_' { + continue + } + + if c == literalQuote { + cannotUseLiteral = true + } + + needsQuotation = true + } + + if needsQuotation && needsQuoting(k) { + cannotUseLiteral = true + } + + switch { + case cannotUseLiteral: + return enc.encodeQuotedString(false, b, k) + case needsQuotation: + return enc.encodeLiteralString(b, k) + default: + return enc.encodeUnquotedKey(b, k) + } +} + +func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { + if v.Type().Key().Kind() != reflect.String { + return nil, fmt.Errorf("toml: type %s is not supported as a map key", v.Type().Key().Kind()) + } + + var ( + t table + emptyValueOptions valueOptions + ) + + iter := v.MapRange() + for iter.Next() { + k := iter.Key().String() + v := iter.Value() + + if isNil(v) { + continue + } + + if willConvertToTableOrArrayTable(ctx, v) { + t.pushTable(k, v, emptyValueOptions) + } else { + t.pushKV(k, v, emptyValueOptions) + } + } + + sortEntriesByKey(t.kvs) + sortEntriesByKey(t.tables) + + return enc.encodeTable(b, ctx, t) +} + +func sortEntriesByKey(e []entry) { + sort.Slice(e, func(i, j int) bool { + return e[i].Key < e[j].Key + }) +} + +type entry struct { + Key string + Value reflect.Value + Options valueOptions +} + +type table struct { + kvs []entry + tables []entry +} + +func (t *table) pushKV(k string, v reflect.Value, options valueOptions) { + for _, e := range t.kvs { + if e.Key == k { + return + } + } + + t.kvs = append(t.kvs, entry{Key: k, Value: v, Options: options}) +} + +func (t *table) pushTable(k string, v reflect.Value, options valueOptions) { + for _, e := range t.tables { + if e.Key == k { + return + } + } + t.tables = append(t.tables, entry{Key: k, Value: v, Options: options}) +} + +func walkStruct(ctx encoderCtx, t *table, v reflect.Value) { + // TODO: cache this + typ := v.Type() + for i := 0; i < typ.NumField(); i++ { + fieldType := typ.Field(i) + + // only consider exported fields + if fieldType.PkgPath != "" { + continue + } + + tag := fieldType.Tag.Get("toml") + + // special field name to skip field + if tag == "-" { + continue + } + + k, opts := parseTag(tag) + if !isValidName(k) { + k = "" + } + + f := v.Field(i) + + if k == "" { + if fieldType.Anonymous { + if fieldType.Type.Kind() == reflect.Struct { + walkStruct(ctx, t, f) + } + continue + } else { + k = fieldType.Name + } + } + + if isNil(f) { + continue + } + + options := valueOptions{ + multiline: opts.multiline, + omitempty: opts.omitempty, + comment: fieldType.Tag.Get("comment"), + } + + if opts.inline || !willConvertToTableOrArrayTable(ctx, f) { + t.pushKV(k, f, options) + } else { + t.pushTable(k, f, options) + } + } +} + +func (enc *Encoder) encodeStruct(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { + var t table + + walkStruct(ctx, &t, v) + + return enc.encodeTable(b, ctx, t) +} + +func (enc *Encoder) encodeComment(indent int, comment string, b []byte) []byte { + if comment != "" { + b = enc.indent(indent, b) + b = append(b, "# "...) + b = append(b, comment...) + b = append(b, '\n') + } + return b +} + +func isValidName(s string) bool { + if s == "" { + return false + } + for _, c := range s { + switch { + case strings.ContainsRune("!#$%&()*+-./:;<=>?@[]^_{|}~ ", c): + // Backslash and quote chars are reserved, but + // otherwise any punctuation chars are allowed + // in a tag name. + case !unicode.IsLetter(c) && !unicode.IsDigit(c): + return false + } + } + return true +} + +type tagOptions struct { + multiline bool + inline bool + omitempty bool +} + +func parseTag(tag string) (string, tagOptions) { + opts := tagOptions{} + + idx := strings.Index(tag, ",") + if idx == -1 { + return tag, opts + } + + raw := tag[idx+1:] + tag = string(tag[:idx]) + for raw != "" { + var o string + i := strings.Index(raw, ",") + if i >= 0 { + o, raw = raw[:i], raw[i+1:] + } else { + o, raw = raw, "" + } + switch o { + case "multiline": + opts.multiline = true + case "inline": + opts.inline = true + case "omitempty": + opts.omitempty = true + } + } + + return tag, opts +} + +func (enc *Encoder) encodeTable(b []byte, ctx encoderCtx, t table) ([]byte, error) { + var err error + + ctx.shiftKey() + + if ctx.insideKv || (ctx.inline && !ctx.isRoot()) { + return enc.encodeTableInline(b, ctx, t) + } + + if !ctx.skipTableHeader { + b, err = enc.encodeTableHeader(ctx, b) + if err != nil { + return nil, err + } + + if enc.indentTables && len(ctx.parentKey) > 0 { + ctx.indent++ + } + } + ctx.skipTableHeader = false + + for _, kv := range t.kvs { + ctx.setKey(kv.Key) + + b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value) + if err != nil { + return nil, err + } + + b = append(b, '\n') + } + + for _, table := range t.tables { + ctx.setKey(table.Key) + + ctx.options = table.Options + + b, err = enc.encode(b, ctx, table.Value) + if err != nil { + return nil, err + } + + b = append(b, '\n') + } + + return b, nil +} + +func (enc *Encoder) encodeTableInline(b []byte, ctx encoderCtx, t table) ([]byte, error) { + var err error + + b = append(b, '{') + + first := true + for _, kv := range t.kvs { + if first { + first = false + } else { + b = append(b, `, `...) + } + + ctx.setKey(kv.Key) + + b, err = enc.encodeKv(b, ctx, kv.Options, kv.Value) + if err != nil { + return nil, err + } + } + + if len(t.tables) > 0 { + panic("inline table cannot contain nested tables, online key-values") + } + + b = append(b, "}"...) + + return b, nil +} + +func willConvertToTable(ctx encoderCtx, v reflect.Value) bool { + if !v.IsValid() { + return false + } + if v.Type() == timeType || v.Type().Implements(textMarshalerType) || (v.Kind() != reflect.Ptr && v.CanAddr() && reflect.PtrTo(v.Type()).Implements(textMarshalerType)) { + return false + } + + t := v.Type() + switch t.Kind() { + case reflect.Map, reflect.Struct: + return !ctx.inline + case reflect.Interface: + return willConvertToTable(ctx, v.Elem()) + case reflect.Ptr: + if v.IsNil() { + return false + } + + return willConvertToTable(ctx, v.Elem()) + default: + return false + } +} + +func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) bool { + if ctx.insideKv { + return false + } + t := v.Type() + + if t.Kind() == reflect.Interface { + return willConvertToTableOrArrayTable(ctx, v.Elem()) + } + + if t.Kind() == reflect.Slice { + if v.Len() == 0 { + // An empty slice should be a kv = []. + return false + } + + for i := 0; i < v.Len(); i++ { + t := willConvertToTable(ctx, v.Index(i)) + + if !t { + return false + } + } + + return true + } + + return willConvertToTable(ctx, v) +} + +func (enc *Encoder) encodeSlice(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { + if v.Len() == 0 { + b = append(b, "[]"...) + + return b, nil + } + + if willConvertToTableOrArrayTable(ctx, v) { + return enc.encodeSliceAsArrayTable(b, ctx, v) + } + + return enc.encodeSliceAsArray(b, ctx, v) +} + +// caller should have checked that v is a slice that only contains values that +// encode into tables. +func (enc *Encoder) encodeSliceAsArrayTable(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { + ctx.shiftKey() + + scratch := make([]byte, 0, 64) + scratch = append(scratch, "[["...) + + for i, k := range ctx.parentKey { + if i > 0 { + scratch = append(scratch, '.') + } + + scratch = enc.encodeKey(scratch, k) + } + + scratch = append(scratch, "]]\n"...) + ctx.skipTableHeader = true + + for i := 0; i < v.Len(); i++ { + b = append(b, scratch...) + + var err error + b, err = enc.encode(b, ctx, v.Index(i)) + if err != nil { + return nil, err + } + } + + return b, nil +} + +func (enc *Encoder) encodeSliceAsArray(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) { + multiline := ctx.options.multiline || enc.arraysMultiline + separator := ", " + + b = append(b, '[') + + subCtx := ctx + subCtx.options = valueOptions{} + + if multiline { + separator = ",\n" + + b = append(b, '\n') + + subCtx.indent++ + } + + var err error + first := true + + for i := 0; i < v.Len(); i++ { + if first { + first = false + } else { + b = append(b, separator...) + } + + if multiline { + b = enc.indent(subCtx.indent, b) + } + + b, err = enc.encode(b, subCtx, v.Index(i)) + if err != nil { + return nil, err + } + } + + if multiline { + b = append(b, '\n') + b = enc.indent(ctx.indent, b) + } + + b = append(b, ']') + + return b, nil +} + +func (enc *Encoder) indent(level int, b []byte) []byte { + for i := 0; i < level; i++ { + b = append(b, enc.indentSymbol...) + } + + return b +} diff --git a/vendor/github.com/pelletier/go-toml/v2/parser.go b/vendor/github.com/pelletier/go-toml/v2/parser.go new file mode 100644 index 000000000..9859a795b --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/parser.go @@ -0,0 +1,1086 @@ +package toml + +import ( + "bytes" + "unicode" + + "github.com/pelletier/go-toml/v2/internal/ast" + "github.com/pelletier/go-toml/v2/internal/danger" +) + +type parser struct { + builder ast.Builder + ref ast.Reference + data []byte + left []byte + err error + first bool +} + +func (p *parser) Range(b []byte) ast.Range { + return ast.Range{ + Offset: uint32(danger.SubsliceOffset(p.data, b)), + Length: uint32(len(b)), + } +} + +func (p *parser) Raw(raw ast.Range) []byte { + return p.data[raw.Offset : raw.Offset+raw.Length] +} + +func (p *parser) Reset(b []byte) { + p.builder.Reset() + p.ref = ast.InvalidReference + p.data = b + p.left = b + p.err = nil + p.first = true +} + +//nolint:cyclop +func (p *parser) NextExpression() bool { + if len(p.left) == 0 || p.err != nil { + return false + } + + p.builder.Reset() + p.ref = ast.InvalidReference + + for { + if len(p.left) == 0 || p.err != nil { + return false + } + + if !p.first { + p.left, p.err = p.parseNewline(p.left) + } + + if len(p.left) == 0 || p.err != nil { + return false + } + + p.ref, p.left, p.err = p.parseExpression(p.left) + + if p.err != nil { + return false + } + + p.first = false + + if p.ref.Valid() { + return true + } + } +} + +func (p *parser) Expression() *ast.Node { + return p.builder.NodeAt(p.ref) +} + +func (p *parser) Error() error { + return p.err +} + +func (p *parser) parseNewline(b []byte) ([]byte, error) { + if b[0] == '\n' { + return b[1:], nil + } + + if b[0] == '\r' { + _, rest, err := scanWindowsNewline(b) + return rest, err + } + + return nil, newDecodeError(b[0:1], "expected newline but got %#U", b[0]) +} + +func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) { + // expression = ws [ comment ] + // expression =/ ws keyval ws [ comment ] + // expression =/ ws table ws [ comment ] + ref := ast.InvalidReference + + b = p.parseWhitespace(b) + + if len(b) == 0 { + return ref, b, nil + } + + if b[0] == '#' { + _, rest, err := scanComment(b) + return ref, rest, err + } + + if b[0] == '\n' || b[0] == '\r' { + return ref, b, nil + } + + var err error + if b[0] == '[' { + ref, b, err = p.parseTable(b) + } else { + ref, b, err = p.parseKeyval(b) + } + + if err != nil { + return ref, nil, err + } + + b = p.parseWhitespace(b) + + if len(b) > 0 && b[0] == '#' { + _, rest, err := scanComment(b) + return ref, rest, err + } + + return ref, b, nil +} + +func (p *parser) parseTable(b []byte) (ast.Reference, []byte, error) { + // table = std-table / array-table + if len(b) > 1 && b[1] == '[' { + return p.parseArrayTable(b) + } + + return p.parseStdTable(b) +} + +func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) { + // array-table = array-table-open key array-table-close + // array-table-open = %x5B.5B ws ; [[ Double left square bracket + // array-table-close = ws %x5D.5D ; ]] Double right square bracket + ref := p.builder.Push(ast.Node{ + Kind: ast.ArrayTable, + }) + + b = b[2:] + b = p.parseWhitespace(b) + + k, b, err := p.parseKey(b) + if err != nil { + return ref, nil, err + } + + p.builder.AttachChild(ref, k) + b = p.parseWhitespace(b) + + b, err = expect(']', b) + if err != nil { + return ref, nil, err + } + + b, err = expect(']', b) + + return ref, b, err +} + +func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) { + // std-table = std-table-open key std-table-close + // std-table-open = %x5B ws ; [ Left square bracket + // std-table-close = ws %x5D ; ] Right square bracket + ref := p.builder.Push(ast.Node{ + Kind: ast.Table, + }) + + b = b[1:] + b = p.parseWhitespace(b) + + key, b, err := p.parseKey(b) + if err != nil { + return ref, nil, err + } + + p.builder.AttachChild(ref, key) + + b = p.parseWhitespace(b) + + b, err = expect(']', b) + + return ref, b, err +} + +func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) { + // keyval = key keyval-sep val + ref := p.builder.Push(ast.Node{ + Kind: ast.KeyValue, + }) + + key, b, err := p.parseKey(b) + if err != nil { + return ast.InvalidReference, nil, err + } + + // keyval-sep = ws %x3D ws ; = + + b = p.parseWhitespace(b) + + if len(b) == 0 { + return ast.InvalidReference, nil, newDecodeError(b, "expected = after a key, but the document ends there") + } + + b, err = expect('=', b) + if err != nil { + return ast.InvalidReference, nil, err + } + + b = p.parseWhitespace(b) + + valRef, b, err := p.parseVal(b) + if err != nil { + return ref, b, err + } + + p.builder.Chain(valRef, key) + p.builder.AttachChild(ref, valRef) + + return ref, b, err +} + +//nolint:cyclop,funlen +func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) { + // val = string / boolean / array / inline-table / date-time / float / integer + ref := ast.InvalidReference + + if len(b) == 0 { + return ref, nil, newDecodeError(b, "expected value, not eof") + } + + var err error + c := b[0] + + switch c { + case '"': + var raw []byte + var v []byte + if scanFollowsMultilineBasicStringDelimiter(b) { + raw, v, b, err = p.parseMultilineBasicString(b) + } else { + raw, v, b, err = p.parseBasicString(b) + } + + if err == nil { + ref = p.builder.Push(ast.Node{ + Kind: ast.String, + Raw: p.Range(raw), + Data: v, + }) + } + + return ref, b, err + case '\'': + var raw []byte + var v []byte + if scanFollowsMultilineLiteralStringDelimiter(b) { + raw, v, b, err = p.parseMultilineLiteralString(b) + } else { + raw, v, b, err = p.parseLiteralString(b) + } + + if err == nil { + ref = p.builder.Push(ast.Node{ + Kind: ast.String, + Raw: p.Range(raw), + Data: v, + }) + } + + return ref, b, err + case 't': + if !scanFollowsTrue(b) { + return ref, nil, newDecodeError(atmost(b, 4), "expected 'true'") + } + + ref = p.builder.Push(ast.Node{ + Kind: ast.Bool, + Data: b[:4], + }) + + return ref, b[4:], nil + case 'f': + if !scanFollowsFalse(b) { + return ref, nil, newDecodeError(atmost(b, 5), "expected 'false'") + } + + ref = p.builder.Push(ast.Node{ + Kind: ast.Bool, + Data: b[:5], + }) + + return ref, b[5:], nil + case '[': + return p.parseValArray(b) + case '{': + return p.parseInlineTable(b) + default: + return p.parseIntOrFloatOrDateTime(b) + } +} + +func atmost(b []byte, n int) []byte { + if n >= len(b) { + return b + } + + return b[:n] +} + +func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) { + v, rest, err := scanLiteralString(b) + if err != nil { + return nil, nil, nil, err + } + + return v, v[1 : len(v)-1], rest, nil +} + +func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) { + // inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close + // inline-table-open = %x7B ws ; { + // inline-table-close = ws %x7D ; } + // inline-table-sep = ws %x2C ws ; , Comma + // inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ] + parent := p.builder.Push(ast.Node{ + Kind: ast.InlineTable, + }) + + first := true + + var child ast.Reference + + b = b[1:] + + var err error + + for len(b) > 0 { + previousB := b + b = p.parseWhitespace(b) + + if len(b) == 0 { + return parent, nil, newDecodeError(previousB[:1], "inline table is incomplete") + } + + if b[0] == '}' { + break + } + + if !first { + b, err = expect(',', b) + if err != nil { + return parent, nil, err + } + b = p.parseWhitespace(b) + } + + var kv ast.Reference + + kv, b, err = p.parseKeyval(b) + if err != nil { + return parent, nil, err + } + + if first { + p.builder.AttachChild(parent, kv) + } else { + p.builder.Chain(child, kv) + } + child = kv + + first = false + } + + rest, err := expect('}', b) + + return parent, rest, err +} + +//nolint:funlen,cyclop +func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) { + // array = array-open [ array-values ] ws-comment-newline array-close + // array-open = %x5B ; [ + // array-close = %x5D ; ] + // array-values = ws-comment-newline val ws-comment-newline array-sep array-values + // array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ] + // array-sep = %x2C ; , Comma + // ws-comment-newline = *( wschar / [ comment ] newline ) + arrayStart := b + b = b[1:] + + parent := p.builder.Push(ast.Node{ + Kind: ast.Array, + }) + + first := true + + var lastChild ast.Reference + + var err error + for len(b) > 0 { + b, err = p.parseOptionalWhitespaceCommentNewline(b) + if err != nil { + return parent, nil, err + } + + if len(b) == 0 { + return parent, nil, newDecodeError(arrayStart[:1], "array is incomplete") + } + + if b[0] == ']' { + break + } + + if b[0] == ',' { + if first { + return parent, nil, newDecodeError(b[0:1], "array cannot start with comma") + } + b = b[1:] + + b, err = p.parseOptionalWhitespaceCommentNewline(b) + if err != nil { + return parent, nil, err + } + } else if !first { + return parent, nil, newDecodeError(b[0:1], "array elements must be separated by commas") + } + + // TOML allows trailing commas in arrays. + if len(b) > 0 && b[0] == ']' { + break + } + + var valueRef ast.Reference + valueRef, b, err = p.parseVal(b) + if err != nil { + return parent, nil, err + } + + if first { + p.builder.AttachChild(parent, valueRef) + } else { + p.builder.Chain(lastChild, valueRef) + } + lastChild = valueRef + + b, err = p.parseOptionalWhitespaceCommentNewline(b) + if err != nil { + return parent, nil, err + } + first = false + } + + rest, err := expect(']', b) + + return parent, rest, err +} + +func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error) { + for len(b) > 0 { + var err error + b = p.parseWhitespace(b) + + if len(b) > 0 && b[0] == '#' { + _, b, err = scanComment(b) + if err != nil { + return nil, err + } + } + + if len(b) == 0 { + break + } + + if b[0] == '\n' || b[0] == '\r' { + b, err = p.parseNewline(b) + if err != nil { + return nil, err + } + } else { + break + } + } + + return b, nil +} + +func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) { + token, rest, err := scanMultilineLiteralString(b) + if err != nil { + return nil, nil, nil, err + } + + i := 3 + + // skip the immediate new line + if token[i] == '\n' { + i++ + } else if token[i] == '\r' && token[i+1] == '\n' { + i += 2 + } + + return token, token[i : len(token)-3], rest, err +} + +//nolint:funlen,gocognit,cyclop +func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) { + // ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body + // ml-basic-string-delim + // ml-basic-string-delim = 3quotation-mark + // ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ] + // + // mlb-content = mlb-char / newline / mlb-escaped-nl + // mlb-char = mlb-unescaped / escaped + // mlb-quotes = 1*2quotation-mark + // mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii + // mlb-escaped-nl = escape ws newline *( wschar / newline ) + token, escaped, rest, err := scanMultilineBasicString(b) + if err != nil { + return nil, nil, nil, err + } + + i := 3 + + // skip the immediate new line + if token[i] == '\n' { + i++ + } else if token[i] == '\r' && token[i+1] == '\n' { + i += 2 + } + + // fast path + startIdx := i + endIdx := len(token) - len(`"""`) + + if !escaped { + str := token[startIdx:endIdx] + verr := utf8TomlValidAlreadyEscaped(str) + if verr.Zero() { + return token, str, rest, nil + } + return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8") + } + + var builder bytes.Buffer + + // The scanner ensures that the token starts and ends with quotes and that + // escapes are balanced. + for i < len(token)-3 { + c := token[i] + + //nolint:nestif + if c == '\\' { + // When the last non-whitespace character on a line is an unescaped \, + // it will be trimmed along with all whitespace (including newlines) up + // to the next non-whitespace character or closing delimiter. + + isLastNonWhitespaceOnLine := false + j := 1 + findEOLLoop: + for ; j < len(token)-3-i; j++ { + switch token[i+j] { + case ' ', '\t': + continue + case '\r': + if token[i+j+1] == '\n' { + continue + } + case '\n': + isLastNonWhitespaceOnLine = true + } + break findEOLLoop + } + if isLastNonWhitespaceOnLine { + i += j + for ; i < len(token)-3; i++ { + c := token[i] + if !(c == '\n' || c == '\r' || c == ' ' || c == '\t') { + i-- + break + } + } + i++ + continue + } + + // handle escaping + i++ + c = token[i] + + switch c { + case '"', '\\': + builder.WriteByte(c) + case 'b': + builder.WriteByte('\b') + case 'f': + builder.WriteByte('\f') + case 'n': + builder.WriteByte('\n') + case 'r': + builder.WriteByte('\r') + case 't': + builder.WriteByte('\t') + case 'e': + builder.WriteByte(0x1B) + case 'u': + x, err := hexToRune(atmost(token[i+1:], 4), 4) + if err != nil { + return nil, nil, nil, err + } + builder.WriteRune(x) + i += 4 + case 'U': + x, err := hexToRune(atmost(token[i+1:], 8), 8) + if err != nil { + return nil, nil, nil, err + } + + builder.WriteRune(x) + i += 8 + default: + return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c) + } + i++ + } else { + size := utf8ValidNext(token[i:]) + if size == 0 { + return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c) + } + builder.Write(token[i : i+size]) + i += size + } + } + + return token, builder.Bytes(), rest, nil +} + +func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) { + // key = simple-key / dotted-key + // simple-key = quoted-key / unquoted-key + // + // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ + // quoted-key = basic-string / literal-string + // dotted-key = simple-key 1*( dot-sep simple-key ) + // + // dot-sep = ws %x2E ws ; . Period + raw, key, b, err := p.parseSimpleKey(b) + if err != nil { + return ast.InvalidReference, nil, err + } + + ref := p.builder.Push(ast.Node{ + Kind: ast.Key, + Raw: p.Range(raw), + Data: key, + }) + + for { + b = p.parseWhitespace(b) + if len(b) > 0 && b[0] == '.' { + b = p.parseWhitespace(b[1:]) + + raw, key, b, err = p.parseSimpleKey(b) + if err != nil { + return ref, nil, err + } + + p.builder.PushAndChain(ast.Node{ + Kind: ast.Key, + Raw: p.Range(raw), + Data: key, + }) + } else { + break + } + } + + return ref, b, nil +} + +func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) { + if len(b) == 0 { + return nil, nil, nil, newDecodeError(b, "expected key but found none") + } + + // simple-key = quoted-key / unquoted-key + // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ + // quoted-key = basic-string / literal-string + switch { + case b[0] == '\'': + return p.parseLiteralString(b) + case b[0] == '"': + return p.parseBasicString(b) + case isUnquotedKeyChar(b[0]): + key, rest = scanUnquotedKey(b) + return key, key, rest, nil + default: + return nil, nil, nil, newDecodeError(b[0:1], "invalid character at start of key: %c", b[0]) + } +} + +//nolint:funlen,cyclop +func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) { + // basic-string = quotation-mark *basic-char quotation-mark + // quotation-mark = %x22 ; " + // basic-char = basic-unescaped / escaped + // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii + // escaped = escape escape-seq-char + // escape-seq-char = %x22 ; " quotation mark U+0022 + // escape-seq-char =/ %x5C ; \ reverse solidus U+005C + // escape-seq-char =/ %x62 ; b backspace U+0008 + // escape-seq-char =/ %x66 ; f form feed U+000C + // escape-seq-char =/ %x6E ; n line feed U+000A + // escape-seq-char =/ %x72 ; r carriage return U+000D + // escape-seq-char =/ %x74 ; t tab U+0009 + // escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX + // escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX + token, escaped, rest, err := scanBasicString(b) + if err != nil { + return nil, nil, nil, err + } + + startIdx := len(`"`) + endIdx := len(token) - len(`"`) + + // Fast path. If there is no escape sequence, the string should just be + // an UTF-8 encoded string, which is the same as Go. In that case, + // validate the string and return a direct reference to the buffer. + if !escaped { + str := token[startIdx:endIdx] + verr := utf8TomlValidAlreadyEscaped(str) + if verr.Zero() { + return token, str, rest, nil + } + return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8") + } + + i := startIdx + + var builder bytes.Buffer + + // The scanner ensures that the token starts and ends with quotes and that + // escapes are balanced. + for i < len(token)-1 { + c := token[i] + if c == '\\' { + i++ + c = token[i] + + switch c { + case '"', '\\': + builder.WriteByte(c) + case 'b': + builder.WriteByte('\b') + case 'f': + builder.WriteByte('\f') + case 'n': + builder.WriteByte('\n') + case 'r': + builder.WriteByte('\r') + case 't': + builder.WriteByte('\t') + case 'e': + builder.WriteByte(0x1B) + case 'u': + x, err := hexToRune(token[i+1:len(token)-1], 4) + if err != nil { + return nil, nil, nil, err + } + + builder.WriteRune(x) + i += 4 + case 'U': + x, err := hexToRune(token[i+1:len(token)-1], 8) + if err != nil { + return nil, nil, nil, err + } + + builder.WriteRune(x) + i += 8 + default: + return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c) + } + i++ + } else { + size := utf8ValidNext(token[i:]) + if size == 0 { + return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c) + } + builder.Write(token[i : i+size]) + i += size + } + } + + return token, builder.Bytes(), rest, nil +} + +func hexToRune(b []byte, length int) (rune, error) { + if len(b) < length { + return -1, newDecodeError(b, "unicode point needs %d character, not %d", length, len(b)) + } + b = b[:length] + + var r uint32 + for i, c := range b { + d := uint32(0) + switch { + case '0' <= c && c <= '9': + d = uint32(c - '0') + case 'a' <= c && c <= 'f': + d = uint32(c - 'a' + 10) + case 'A' <= c && c <= 'F': + d = uint32(c - 'A' + 10) + default: + return -1, newDecodeError(b[i:i+1], "non-hex character") + } + r = r*16 + d + } + + if r > unicode.MaxRune || 0xD800 <= r && r < 0xE000 { + return -1, newDecodeError(b, "escape sequence is invalid Unicode code point") + } + + return rune(r), nil +} + +func (p *parser) parseWhitespace(b []byte) []byte { + // ws = *wschar + // wschar = %x20 ; Space + // wschar =/ %x09 ; Horizontal tab + _, rest := scanWhitespace(b) + + return rest +} + +//nolint:cyclop +func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, error) { + switch b[0] { + case 'i': + if !scanFollowsInf(b) { + return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'inf'") + } + + return p.builder.Push(ast.Node{ + Kind: ast.Float, + Data: b[:3], + }), b[3:], nil + case 'n': + if !scanFollowsNan(b) { + return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'nan'") + } + + return p.builder.Push(ast.Node{ + Kind: ast.Float, + Data: b[:3], + }), b[3:], nil + case '+', '-': + return p.scanIntOrFloat(b) + } + + if len(b) < 3 { + return p.scanIntOrFloat(b) + } + + s := 5 + if len(b) < s { + s = len(b) + } + + for idx, c := range b[:s] { + if isDigit(c) { + continue + } + + if idx == 2 && c == ':' || (idx == 4 && c == '-') { + return p.scanDateTime(b) + } + + break + } + + return p.scanIntOrFloat(b) +} + +func (p *parser) scanDateTime(b []byte) (ast.Reference, []byte, error) { + // scans for contiguous characters in [0-9T:Z.+-], and up to one space if + // followed by a digit. + hasDate := false + hasTime := false + hasTz := false + seenSpace := false + + i := 0 +byteLoop: + for ; i < len(b); i++ { + c := b[i] + + switch { + case isDigit(c): + case c == '-': + hasDate = true + const minOffsetOfTz = 8 + if i >= minOffsetOfTz { + hasTz = true + } + case c == 'T' || c == 't' || c == ':' || c == '.': + hasTime = true + case c == '+' || c == '-' || c == 'Z' || c == 'z': + hasTz = true + case c == ' ': + if !seenSpace && i+1 < len(b) && isDigit(b[i+1]) { + i += 2 + // Avoid reaching past the end of the document in case the time + // is malformed. See TestIssue585. + if i >= len(b) { + i-- + } + seenSpace = true + hasTime = true + } else { + break byteLoop + } + default: + break byteLoop + } + } + + var kind ast.Kind + + if hasTime { + if hasDate { + if hasTz { + kind = ast.DateTime + } else { + kind = ast.LocalDateTime + } + } else { + kind = ast.LocalTime + } + } else { + kind = ast.LocalDate + } + + return p.builder.Push(ast.Node{ + Kind: kind, + Data: b[:i], + }), b[i:], nil +} + +//nolint:funlen,gocognit,cyclop +func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) { + i := 0 + + if len(b) > 2 && b[0] == '0' && b[1] != '.' && b[1] != 'e' && b[1] != 'E' { + var isValidRune validRuneFn + + switch b[1] { + case 'x': + isValidRune = isValidHexRune + case 'o': + isValidRune = isValidOctalRune + case 'b': + isValidRune = isValidBinaryRune + default: + i++ + } + + if isValidRune != nil { + i += 2 + for ; i < len(b); i++ { + if !isValidRune(b[i]) { + break + } + } + } + + return p.builder.Push(ast.Node{ + Kind: ast.Integer, + Data: b[:i], + }), b[i:], nil + } + + isFloat := false + + for ; i < len(b); i++ { + c := b[i] + + if c >= '0' && c <= '9' || c == '+' || c == '-' || c == '_' { + continue + } + + if c == '.' || c == 'e' || c == 'E' { + isFloat = true + + continue + } + + if c == 'i' { + if scanFollowsInf(b[i:]) { + return p.builder.Push(ast.Node{ + Kind: ast.Float, + Data: b[:i+3], + }), b[i+3:], nil + } + + return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number") + } + + if c == 'n' { + if scanFollowsNan(b[i:]) { + return p.builder.Push(ast.Node{ + Kind: ast.Float, + Data: b[:i+3], + }), b[i+3:], nil + } + + return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number") + } + + break + } + + if i == 0 { + return ast.InvalidReference, b, newDecodeError(b, "incomplete number") + } + + kind := ast.Integer + + if isFloat { + kind = ast.Float + } + + return p.builder.Push(ast.Node{ + Kind: kind, + Data: b[:i], + }), b[i:], nil +} + +func isDigit(r byte) bool { + return r >= '0' && r <= '9' +} + +type validRuneFn func(r byte) bool + +func isValidHexRune(r byte) bool { + return r >= 'a' && r <= 'f' || + r >= 'A' && r <= 'F' || + r >= '0' && r <= '9' || + r == '_' +} + +func isValidOctalRune(r byte) bool { + return r >= '0' && r <= '7' || r == '_' +} + +func isValidBinaryRune(r byte) bool { + return r == '0' || r == '1' || r == '_' +} + +func expect(x byte, b []byte) ([]byte, error) { + if len(b) == 0 { + return nil, newDecodeError(b, "expected character %c but the document ended here", x) + } + + if b[0] != x { + return nil, newDecodeError(b[0:1], "expected character %c", x) + } + + return b[1:], nil +} diff --git a/vendor/github.com/pelletier/go-toml/v2/scanner.go b/vendor/github.com/pelletier/go-toml/v2/scanner.go new file mode 100644 index 000000000..bb445fab4 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/scanner.go @@ -0,0 +1,269 @@ +package toml + +func scanFollows(b []byte, pattern string) bool { + n := len(pattern) + + return len(b) >= n && string(b[:n]) == pattern +} + +func scanFollowsMultilineBasicStringDelimiter(b []byte) bool { + return scanFollows(b, `"""`) +} + +func scanFollowsMultilineLiteralStringDelimiter(b []byte) bool { + return scanFollows(b, `'''`) +} + +func scanFollowsTrue(b []byte) bool { + return scanFollows(b, `true`) +} + +func scanFollowsFalse(b []byte) bool { + return scanFollows(b, `false`) +} + +func scanFollowsInf(b []byte) bool { + return scanFollows(b, `inf`) +} + +func scanFollowsNan(b []byte) bool { + return scanFollows(b, `nan`) +} + +func scanUnquotedKey(b []byte) ([]byte, []byte) { + // unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ + for i := 0; i < len(b); i++ { + if !isUnquotedKeyChar(b[i]) { + return b[:i], b[i:] + } + } + + return b, b[len(b):] +} + +func isUnquotedKeyChar(r byte) bool { + return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '-' || r == '_' +} + +func scanLiteralString(b []byte) ([]byte, []byte, error) { + // literal-string = apostrophe *literal-char apostrophe + // apostrophe = %x27 ; ' apostrophe + // literal-char = %x09 / %x20-26 / %x28-7E / non-ascii + for i := 1; i < len(b); { + switch b[i] { + case '\'': + return b[:i+1], b[i+1:], nil + case '\n', '\r': + return nil, nil, newDecodeError(b[i:i+1], "literal strings cannot have new lines") + } + size := utf8ValidNext(b[i:]) + if size == 0 { + return nil, nil, newDecodeError(b[i:i+1], "invalid character") + } + i += size + } + + return nil, nil, newDecodeError(b[len(b):], "unterminated literal string") +} + +func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) { + // ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body + // ml-literal-string-delim + // ml-literal-string-delim = 3apostrophe + // ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ] + // + // mll-content = mll-char / newline + // mll-char = %x09 / %x20-26 / %x28-7E / non-ascii + // mll-quotes = 1*2apostrophe + for i := 3; i < len(b); { + switch b[i] { + case '\'': + if scanFollowsMultilineLiteralStringDelimiter(b[i:]) { + i += 3 + + // At that point we found 3 apostrophe, and i is the + // index of the byte after the third one. The scanner + // needs to be eager, because there can be an extra 2 + // apostrophe that can be accepted at the end of the + // string. + + if i >= len(b) || b[i] != '\'' { + return b[:i], b[i:], nil + } + i++ + + if i >= len(b) || b[i] != '\'' { + return b[:i], b[i:], nil + } + i++ + + if i < len(b) && b[i] == '\'' { + return nil, nil, newDecodeError(b[i-3:i+1], "''' not allowed in multiline literal string") + } + + return b[:i], b[i:], nil + } + case '\r': + if len(b) < i+2 { + return nil, nil, newDecodeError(b[len(b):], `need a \n after \r`) + } + if b[i+1] != '\n' { + return nil, nil, newDecodeError(b[i:i+2], `need a \n after \r`) + } + i += 2 // skip the \n + continue + } + size := utf8ValidNext(b[i:]) + if size == 0 { + return nil, nil, newDecodeError(b[i:i+1], "invalid character") + } + i += size + } + + return nil, nil, newDecodeError(b[len(b):], `multiline literal string not terminated by '''`) +} + +func scanWindowsNewline(b []byte) ([]byte, []byte, error) { + const lenCRLF = 2 + if len(b) < lenCRLF { + return nil, nil, newDecodeError(b, "windows new line expected") + } + + if b[1] != '\n' { + return nil, nil, newDecodeError(b, `windows new line should be \r\n`) + } + + return b[:lenCRLF], b[lenCRLF:], nil +} + +func scanWhitespace(b []byte) ([]byte, []byte) { + for i := 0; i < len(b); i++ { + switch b[i] { + case ' ', '\t': + continue + default: + return b[:i], b[i:] + } + } + + return b, b[len(b):] +} + +//nolint:unparam +func scanComment(b []byte) ([]byte, []byte, error) { + // comment-start-symbol = %x23 ; # + // non-ascii = %x80-D7FF / %xE000-10FFFF + // non-eol = %x09 / %x20-7F / non-ascii + // + // comment = comment-start-symbol *non-eol + + for i := 1; i < len(b); { + if b[i] == '\n' { + return b[:i], b[i:], nil + } + if b[i] == '\r' { + if i+1 < len(b) && b[i+1] == '\n' { + return b[:i+1], b[i+1:], nil + } + return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment") + } + size := utf8ValidNext(b[i:]) + if size == 0 { + return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment") + } + + i += size + } + + return b, b[len(b):], nil +} + +func scanBasicString(b []byte) ([]byte, bool, []byte, error) { + // basic-string = quotation-mark *basic-char quotation-mark + // quotation-mark = %x22 ; " + // basic-char = basic-unescaped / escaped + // basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii + // escaped = escape escape-seq-char + escaped := false + i := 1 + + for ; i < len(b); i++ { + switch b[i] { + case '"': + return b[:i+1], escaped, b[i+1:], nil + case '\n', '\r': + return nil, escaped, nil, newDecodeError(b[i:i+1], "basic strings cannot have new lines") + case '\\': + if len(b) < i+2 { + return nil, escaped, nil, newDecodeError(b[i:i+1], "need a character after \\") + } + escaped = true + i++ // skip the next character + } + } + + return nil, escaped, nil, newDecodeError(b[len(b):], `basic string not terminated by "`) +} + +func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) { + // ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body + // ml-basic-string-delim + // ml-basic-string-delim = 3quotation-mark + // ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ] + // + // mlb-content = mlb-char / newline / mlb-escaped-nl + // mlb-char = mlb-unescaped / escaped + // mlb-quotes = 1*2quotation-mark + // mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii + // mlb-escaped-nl = escape ws newline *( wschar / newline ) + + escaped := false + i := 3 + + for ; i < len(b); i++ { + switch b[i] { + case '"': + if scanFollowsMultilineBasicStringDelimiter(b[i:]) { + i += 3 + + // At that point we found 3 apostrophe, and i is the + // index of the byte after the third one. The scanner + // needs to be eager, because there can be an extra 2 + // apostrophe that can be accepted at the end of the + // string. + + if i >= len(b) || b[i] != '"' { + return b[:i], escaped, b[i:], nil + } + i++ + + if i >= len(b) || b[i] != '"' { + return b[:i], escaped, b[i:], nil + } + i++ + + if i < len(b) && b[i] == '"' { + return nil, escaped, nil, newDecodeError(b[i-3:i+1], `""" not allowed in multiline basic string`) + } + + return b[:i], escaped, b[i:], nil + } + case '\\': + if len(b) < i+2 { + return nil, escaped, nil, newDecodeError(b[len(b):], "need a character after \\") + } + escaped = true + i++ // skip the next character + case '\r': + if len(b) < i+2 { + return nil, escaped, nil, newDecodeError(b[len(b):], `need a \n after \r`) + } + if b[i+1] != '\n' { + return nil, escaped, nil, newDecodeError(b[i:i+2], `need a \n after \r`) + } + i++ // skip the \n + } + } + + return nil, escaped, nil, newDecodeError(b[len(b):], `multiline basic string not terminated by """`) +} diff --git a/vendor/github.com/pelletier/go-toml/v2/strict.go b/vendor/github.com/pelletier/go-toml/v2/strict.go new file mode 100644 index 000000000..b7830d139 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/strict.go @@ -0,0 +1,107 @@ +package toml + +import ( + "github.com/pelletier/go-toml/v2/internal/ast" + "github.com/pelletier/go-toml/v2/internal/danger" + "github.com/pelletier/go-toml/v2/internal/tracker" +) + +type strict struct { + Enabled bool + + // Tracks the current key being processed. + key tracker.KeyTracker + + missing []decodeError +} + +func (s *strict) EnterTable(node *ast.Node) { + if !s.Enabled { + return + } + + s.key.UpdateTable(node) +} + +func (s *strict) EnterArrayTable(node *ast.Node) { + if !s.Enabled { + return + } + + s.key.UpdateArrayTable(node) +} + +func (s *strict) EnterKeyValue(node *ast.Node) { + if !s.Enabled { + return + } + + s.key.Push(node) +} + +func (s *strict) ExitKeyValue(node *ast.Node) { + if !s.Enabled { + return + } + + s.key.Pop(node) +} + +func (s *strict) MissingTable(node *ast.Node) { + if !s.Enabled { + return + } + + s.missing = append(s.missing, decodeError{ + highlight: keyLocation(node), + message: "missing table", + key: s.key.Key(), + }) +} + +func (s *strict) MissingField(node *ast.Node) { + if !s.Enabled { + return + } + + s.missing = append(s.missing, decodeError{ + highlight: keyLocation(node), + message: "missing field", + key: s.key.Key(), + }) +} + +func (s *strict) Error(doc []byte) error { + if !s.Enabled || len(s.missing) == 0 { + return nil + } + + err := &StrictMissingError{ + Errors: make([]DecodeError, 0, len(s.missing)), + } + + for _, derr := range s.missing { + derr := derr + err.Errors = append(err.Errors, *wrapDecodeError(doc, &derr)) + } + + return err +} + +func keyLocation(node *ast.Node) []byte { + k := node.Key() + + hasOne := k.Next() + if !hasOne { + panic("should not be called with empty key") + } + + start := k.Node().Data + end := k.Node().Data + + for k.Next() { + end = k.Node().Data + } + + return danger.BytesRange(start, end) +} diff --git a/vendor/github.com/pelletier/go-toml/v2/toml.abnf b/vendor/github.com/pelletier/go-toml/v2/toml.abnf new file mode 100644 index 000000000..473f3749e --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/toml.abnf @@ -0,0 +1,243 @@ +;; This document describes TOML's syntax, using the ABNF format (defined in +;; RFC 5234 -- https://www.ietf.org/rfc/rfc5234.txt). +;; +;; All valid TOML documents will match this description, however certain +;; invalid documents would need to be rejected as per the semantics described +;; in the supporting text description. + +;; It is possible to try this grammar interactively, using instaparse. +;; http://instaparse.mojombo.com/ +;; +;; To do so, in the lower right, click on Options and change `:input-format` to +;; ':abnf'. Then paste this entire ABNF document into the grammar entry box +;; (above the options). Then you can type or paste a sample TOML document into +;; the beige box on the left. Tada! + +;; Overall Structure + +toml = expression *( newline expression ) + +expression = ws [ comment ] +expression =/ ws keyval ws [ comment ] +expression =/ ws table ws [ comment ] + +;; Whitespace + +ws = *wschar +wschar = %x20 ; Space +wschar =/ %x09 ; Horizontal tab + +;; Newline + +newline = %x0A ; LF +newline =/ %x0D.0A ; CRLF + +;; Comment + +comment-start-symbol = %x23 ; # +non-ascii = %x80-D7FF / %xE000-10FFFF +non-eol = %x09 / %x20-7F / non-ascii + +comment = comment-start-symbol *non-eol + +;; Key-Value pairs + +keyval = key keyval-sep val + +key = simple-key / dotted-key +simple-key = quoted-key / unquoted-key + +unquoted-key = 1*( ALPHA / DIGIT / %x2D / %x5F ) ; A-Z / a-z / 0-9 / - / _ +quoted-key = basic-string / literal-string +dotted-key = simple-key 1*( dot-sep simple-key ) + +dot-sep = ws %x2E ws ; . Period +keyval-sep = ws %x3D ws ; = + +val = string / boolean / array / inline-table / date-time / float / integer + +;; String + +string = ml-basic-string / basic-string / ml-literal-string / literal-string + +;; Basic String + +basic-string = quotation-mark *basic-char quotation-mark + +quotation-mark = %x22 ; " + +basic-char = basic-unescaped / escaped +basic-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii +escaped = escape escape-seq-char + +escape = %x5C ; \ +escape-seq-char = %x22 ; " quotation mark U+0022 +escape-seq-char =/ %x5C ; \ reverse solidus U+005C +escape-seq-char =/ %x62 ; b backspace U+0008 +escape-seq-char =/ %x66 ; f form feed U+000C +escape-seq-char =/ %x6E ; n line feed U+000A +escape-seq-char =/ %x72 ; r carriage return U+000D +escape-seq-char =/ %x74 ; t tab U+0009 +escape-seq-char =/ %x75 4HEXDIG ; uXXXX U+XXXX +escape-seq-char =/ %x55 8HEXDIG ; UXXXXXXXX U+XXXXXXXX + +;; Multiline Basic String + +ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body + ml-basic-string-delim +ml-basic-string-delim = 3quotation-mark +ml-basic-body = *mlb-content *( mlb-quotes 1*mlb-content ) [ mlb-quotes ] + +mlb-content = mlb-char / newline / mlb-escaped-nl +mlb-char = mlb-unescaped / escaped +mlb-quotes = 1*2quotation-mark +mlb-unescaped = wschar / %x21 / %x23-5B / %x5D-7E / non-ascii +mlb-escaped-nl = escape ws newline *( wschar / newline ) + +;; Literal String + +literal-string = apostrophe *literal-char apostrophe + +apostrophe = %x27 ; ' apostrophe + +literal-char = %x09 / %x20-26 / %x28-7E / non-ascii + +;; Multiline Literal String + +ml-literal-string = ml-literal-string-delim [ newline ] ml-literal-body + ml-literal-string-delim +ml-literal-string-delim = 3apostrophe +ml-literal-body = *mll-content *( mll-quotes 1*mll-content ) [ mll-quotes ] + +mll-content = mll-char / newline +mll-char = %x09 / %x20-26 / %x28-7E / non-ascii +mll-quotes = 1*2apostrophe + +;; Integer + +integer = dec-int / hex-int / oct-int / bin-int + +minus = %x2D ; - +plus = %x2B ; + +underscore = %x5F ; _ +digit1-9 = %x31-39 ; 1-9 +digit0-7 = %x30-37 ; 0-7 +digit0-1 = %x30-31 ; 0-1 + +hex-prefix = %x30.78 ; 0x +oct-prefix = %x30.6F ; 0o +bin-prefix = %x30.62 ; 0b + +dec-int = [ minus / plus ] unsigned-dec-int +unsigned-dec-int = DIGIT / digit1-9 1*( DIGIT / underscore DIGIT ) + +hex-int = hex-prefix HEXDIG *( HEXDIG / underscore HEXDIG ) +oct-int = oct-prefix digit0-7 *( digit0-7 / underscore digit0-7 ) +bin-int = bin-prefix digit0-1 *( digit0-1 / underscore digit0-1 ) + +;; Float + +float = float-int-part ( exp / frac [ exp ] ) +float =/ special-float + +float-int-part = dec-int +frac = decimal-point zero-prefixable-int +decimal-point = %x2E ; . +zero-prefixable-int = DIGIT *( DIGIT / underscore DIGIT ) + +exp = "e" float-exp-part +float-exp-part = [ minus / plus ] zero-prefixable-int + +special-float = [ minus / plus ] ( inf / nan ) +inf = %x69.6e.66 ; inf +nan = %x6e.61.6e ; nan + +;; Boolean + +boolean = true / false + +true = %x74.72.75.65 ; true +false = %x66.61.6C.73.65 ; false + +;; Date and Time (as defined in RFC 3339) + +date-time = offset-date-time / local-date-time / local-date / local-time + +date-fullyear = 4DIGIT +date-month = 2DIGIT ; 01-12 +date-mday = 2DIGIT ; 01-28, 01-29, 01-30, 01-31 based on month/year +time-delim = "T" / %x20 ; T, t, or space +time-hour = 2DIGIT ; 00-23 +time-minute = 2DIGIT ; 00-59 +time-second = 2DIGIT ; 00-58, 00-59, 00-60 based on leap second rules +time-secfrac = "." 1*DIGIT +time-numoffset = ( "+" / "-" ) time-hour ":" time-minute +time-offset = "Z" / time-numoffset + +partial-time = time-hour ":" time-minute ":" time-second [ time-secfrac ] +full-date = date-fullyear "-" date-month "-" date-mday +full-time = partial-time time-offset + +;; Offset Date-Time + +offset-date-time = full-date time-delim full-time + +;; Local Date-Time + +local-date-time = full-date time-delim partial-time + +;; Local Date + +local-date = full-date + +;; Local Time + +local-time = partial-time + +;; Array + +array = array-open [ array-values ] ws-comment-newline array-close + +array-open = %x5B ; [ +array-close = %x5D ; ] + +array-values = ws-comment-newline val ws-comment-newline array-sep array-values +array-values =/ ws-comment-newline val ws-comment-newline [ array-sep ] + +array-sep = %x2C ; , Comma + +ws-comment-newline = *( wschar / [ comment ] newline ) + +;; Table + +table = std-table / array-table + +;; Standard Table + +std-table = std-table-open key std-table-close + +std-table-open = %x5B ws ; [ Left square bracket +std-table-close = ws %x5D ; ] Right square bracket + +;; Inline Table + +inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close + +inline-table-open = %x7B ws ; { +inline-table-close = ws %x7D ; } +inline-table-sep = ws %x2C ws ; , Comma + +inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ] + +;; Array Table + +array-table = array-table-open key array-table-close + +array-table-open = %x5B.5B ws ; [[ Double left square bracket +array-table-close = ws %x5D.5D ; ]] Double right square bracket + +;; Built-in ABNF terms, reproduced here for clarity + +ALPHA = %x41-5A / %x61-7A ; A-Z / a-z +DIGIT = %x30-39 ; 0-9 +HEXDIG = DIGIT / "A" / "B" / "C" / "D" / "E" / "F" diff --git a/vendor/github.com/pelletier/go-toml/v2/types.go b/vendor/github.com/pelletier/go-toml/v2/types.go new file mode 100644 index 000000000..630a45466 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/types.go @@ -0,0 +1,14 @@ +package toml + +import ( + "encoding" + "reflect" + "time" +) + +var timeType = reflect.TypeOf(time.Time{}) +var textMarshalerType = reflect.TypeOf(new(encoding.TextMarshaler)).Elem() +var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem() +var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}{}) +var sliceInterfaceType = reflect.TypeOf([]interface{}{}) +var stringType = reflect.TypeOf("") diff --git a/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go new file mode 100644 index 000000000..2219f704e --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/unmarshaler.go @@ -0,0 +1,1189 @@ +package toml + +import ( + "encoding" + "errors" + "fmt" + "io" + "io/ioutil" + "math" + "reflect" + "strings" + "sync/atomic" + "time" + + "github.com/pelletier/go-toml/v2/internal/ast" + "github.com/pelletier/go-toml/v2/internal/danger" + "github.com/pelletier/go-toml/v2/internal/tracker" +) + +// Unmarshal deserializes a TOML document into a Go value. +// +// It is a shortcut for Decoder.Decode() with the default options. +func Unmarshal(data []byte, v interface{}) error { + p := parser{} + p.Reset(data) + d := decoder{p: &p} + + return d.FromParser(v) +} + +// Decoder reads and decode a TOML document from an input stream. +type Decoder struct { + // input + r io.Reader + + // global settings + strict bool +} + +// NewDecoder creates a new Decoder that will read from r. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{r: r} +} + +// DisallowUnknownFields causes the Decoder to return an error when the +// destination is a struct and the input contains a key that does not match a +// non-ignored field. +// +// In that case, the Decoder returns a StrictMissingError that can be used to +// retrieve the individual errors as well as generate a human readable +// description of the missing fields. +func (d *Decoder) DisallowUnknownFields() *Decoder { + d.strict = true + return d +} + +// Decode the whole content of r into v. +// +// By default, values in the document that don't exist in the target Go value +// are ignored. See Decoder.DisallowUnknownFields() to change this behavior. +// +// When a TOML local date, time, or date-time is decoded into a time.Time, its +// value is represented in time.Local timezone. Otherwise the approriate Local* +// structure is used. For time values, precision up to the nanosecond is +// supported by truncating extra digits. +// +// Empty tables decoded in an interface{} create an empty initialized +// map[string]interface{}. +// +// Types implementing the encoding.TextUnmarshaler interface are decoded from a +// TOML string. +// +// When decoding a number, go-toml will return an error if the number is out of +// bounds for the target type (which includes negative numbers when decoding +// into an unsigned int). +// +// If an error occurs while decoding the content of the document, this function +// returns a toml.DecodeError, providing context about the issue. When using +// strict mode and a field is missing, a `toml.StrictMissingError` is +// returned. In any other case, this function returns a standard Go error. +// +// Type mapping +// +// List of supported TOML types and their associated accepted Go types: +// +// String -> string +// Integer -> uint*, int*, depending on size +// Float -> float*, depending on size +// Boolean -> bool +// Offset Date-Time -> time.Time +// Local Date-time -> LocalDateTime, time.Time +// Local Date -> LocalDate, time.Time +// Local Time -> LocalTime, time.Time +// Array -> slice and array, depending on elements types +// Table -> map and struct +// Inline Table -> same as Table +// Array of Tables -> same as Array and Table +func (d *Decoder) Decode(v interface{}) error { + b, err := ioutil.ReadAll(d.r) + if err != nil { + return fmt.Errorf("toml: %w", err) + } + + p := parser{} + p.Reset(b) + dec := decoder{ + p: &p, + strict: strict{ + Enabled: d.strict, + }, + } + + return dec.FromParser(v) +} + +type decoder struct { + // Which parser instance in use for this decoding session. + p *parser + + // Flag indicating that the current expression is stashed. + // If set to true, calling nextExpr will not actually pull a new expression + // but turn off the flag instead. + stashedExpr bool + + // Skip expressions until a table is found. This is set to true when a + // table could not be create (missing field in map), so all KV expressions + // need to be skipped. + skipUntilTable bool + + // Tracks position in Go arrays. + // This is used when decoding [[array tables]] into Go arrays. Given array + // tables are separate TOML expression, we need to keep track of where we + // are at in the Go array, as we can't just introspect its size. + arrayIndexes map[reflect.Value]int + + // Tracks keys that have been seen, with which type. + seen tracker.SeenTracker + + // Strict mode + strict strict + + // Current context for the error. + errorContext *errorContext +} + +type errorContext struct { + Struct reflect.Type + Field []int +} + +func (d *decoder) typeMismatchError(toml string, target reflect.Type) error { + if d.errorContext != nil && d.errorContext.Struct != nil { + ctx := d.errorContext + f := ctx.Struct.FieldByIndex(ctx.Field) + return fmt.Errorf("toml: cannot decode TOML %s into struct field %s.%s of type %s", toml, ctx.Struct, f.Name, f.Type) + } + return fmt.Errorf("toml: cannot decode TOML %s into a Go value of type %s", toml, target) +} + +func (d *decoder) expr() *ast.Node { + return d.p.Expression() +} + +func (d *decoder) nextExpr() bool { + if d.stashedExpr { + d.stashedExpr = false + return true + } + return d.p.NextExpression() +} + +func (d *decoder) stashExpr() { + d.stashedExpr = true +} + +func (d *decoder) arrayIndex(shouldAppend bool, v reflect.Value) int { + if d.arrayIndexes == nil { + d.arrayIndexes = make(map[reflect.Value]int, 1) + } + + idx, ok := d.arrayIndexes[v] + + if !ok { + d.arrayIndexes[v] = 0 + } else if shouldAppend { + idx++ + d.arrayIndexes[v] = idx + } + + return idx +} + +func (d *decoder) FromParser(v interface{}) error { + r := reflect.ValueOf(v) + if r.Kind() != reflect.Ptr { + return fmt.Errorf("toml: decoding can only be performed into a pointer, not %s", r.Kind()) + } + + if r.IsNil() { + return fmt.Errorf("toml: decoding pointer target cannot be nil") + } + + r = r.Elem() + if r.Kind() == reflect.Interface && r.IsNil() { + newMap := map[string]interface{}{} + r.Set(reflect.ValueOf(newMap)) + } + + err := d.fromParser(r) + if err == nil { + return d.strict.Error(d.p.data) + } + + var e *decodeError + if errors.As(err, &e) { + return wrapDecodeError(d.p.data, e) + } + + return err +} + +func (d *decoder) fromParser(root reflect.Value) error { + for d.nextExpr() { + err := d.handleRootExpression(d.expr(), root) + if err != nil { + return err + } + } + + return d.p.Error() +} + +/* +Rules for the unmarshal code: + +- The stack is used to keep track of which values need to be set where. +- handle* functions <=> switch on a given ast.Kind. +- unmarshalX* functions need to unmarshal a node of kind X. +- An "object" is either a struct or a map. +*/ + +func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error { + var x reflect.Value + var err error + + if !(d.skipUntilTable && expr.Kind == ast.KeyValue) { + err = d.seen.CheckExpression(expr) + if err != nil { + return err + } + } + + switch expr.Kind { + case ast.KeyValue: + if d.skipUntilTable { + return nil + } + x, err = d.handleKeyValue(expr, v) + case ast.Table: + d.skipUntilTable = false + d.strict.EnterTable(expr) + x, err = d.handleTable(expr.Key(), v) + case ast.ArrayTable: + d.skipUntilTable = false + d.strict.EnterArrayTable(expr) + x, err = d.handleArrayTable(expr.Key(), v) + default: + panic(fmt.Errorf("parser should not permit expression of kind %s at document root", expr.Kind)) + } + + if d.skipUntilTable { + if expr.Kind == ast.Table || expr.Kind == ast.ArrayTable { + d.strict.MissingTable(expr) + } + } else if err == nil && x.IsValid() { + v.Set(x) + } + + return err +} + +func (d *decoder) handleArrayTable(key ast.Iterator, v reflect.Value) (reflect.Value, error) { + if key.Next() { + return d.handleArrayTablePart(key, v) + } + return d.handleKeyValues(v) +} + +func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Value) (reflect.Value, error) { + switch v.Kind() { + case reflect.Interface: + elem := v.Elem() + if !elem.IsValid() { + elem = reflect.New(sliceInterfaceType).Elem() + elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16)) + } else if elem.Kind() == reflect.Slice { + if elem.Type() != sliceInterfaceType { + elem = reflect.New(sliceInterfaceType).Elem() + elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16)) + } else if !elem.CanSet() { + nelem := reflect.New(sliceInterfaceType).Elem() + nelem.Set(reflect.MakeSlice(sliceInterfaceType, elem.Len(), elem.Cap())) + reflect.Copy(nelem, elem) + elem = nelem + } + } + return d.handleArrayTableCollectionLast(key, elem) + case reflect.Ptr: + elem := v.Elem() + if !elem.IsValid() { + ptr := reflect.New(v.Type().Elem()) + v.Set(ptr) + elem = ptr.Elem() + } + + elem, err := d.handleArrayTableCollectionLast(key, elem) + if err != nil { + return reflect.Value{}, err + } + v.Elem().Set(elem) + + return v, nil + case reflect.Slice: + elemType := v.Type().Elem() + var elem reflect.Value + if elemType.Kind() == reflect.Interface { + elem = makeMapStringInterface() + } else { + elem = reflect.New(elemType).Elem() + } + elem2, err := d.handleArrayTable(key, elem) + if err != nil { + return reflect.Value{}, err + } + if elem2.IsValid() { + elem = elem2 + } + return reflect.Append(v, elem), nil + case reflect.Array: + idx := d.arrayIndex(true, v) + if idx >= v.Len() { + return v, fmt.Errorf("toml: cannot decode array table into %s at position %d", v.Type(), idx) + } + elem := v.Index(idx) + _, err := d.handleArrayTable(key, elem) + return v, err + } + + return d.handleArrayTable(key, v) +} + +// When parsing an array table expression, each part of the key needs to be +// evaluated like a normal key, but if it returns a collection, it also needs to +// point to the last element of the collection. Unless it is the last part of +// the key, then it needs to create a new element at the end. +func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value) (reflect.Value, error) { + if key.IsLast() { + return d.handleArrayTableCollectionLast(key, v) + } + + switch v.Kind() { + case reflect.Ptr: + elem := v.Elem() + if !elem.IsValid() { + ptr := reflect.New(v.Type().Elem()) + v.Set(ptr) + elem = ptr.Elem() + } + + elem, err := d.handleArrayTableCollection(key, elem) + if err != nil { + return reflect.Value{}, err + } + if elem.IsValid() { + v.Elem().Set(elem) + } + + return v, nil + case reflect.Slice: + elem := v.Index(v.Len() - 1) + x, err := d.handleArrayTable(key, elem) + if err != nil || d.skipUntilTable { + return reflect.Value{}, err + } + if x.IsValid() { + elem.Set(x) + } + + return v, err + case reflect.Array: + idx := d.arrayIndex(false, v) + if idx >= v.Len() { + return v, fmt.Errorf("toml: cannot decode array table into %s at position %d", v.Type(), idx) + } + elem := v.Index(idx) + _, err := d.handleArrayTable(key, elem) + return v, err + } + + return d.handleArrayTable(key, v) +} + +func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) { + var rv reflect.Value + + // First, dispatch over v to make sure it is a valid object. + // There is no guarantee over what it could be. + switch v.Kind() { + case reflect.Ptr: + elem := v.Elem() + if !elem.IsValid() { + v.Set(reflect.New(v.Type().Elem())) + } + elem = v.Elem() + return d.handleKeyPart(key, elem, nextFn, makeFn) + case reflect.Map: + vt := v.Type() + + // Create the key for the map element. Convert to key type. + mk := reflect.ValueOf(string(key.Node().Data)).Convert(vt.Key()) + + // If the map does not exist, create it. + if v.IsNil() { + vt := v.Type() + v = reflect.MakeMap(vt) + rv = v + } + + mv := v.MapIndex(mk) + set := false + if !mv.IsValid() { + // If there is no value in the map, create a new one according to + // the map type. If the element type is interface, create either a + // map[string]interface{} or a []interface{} depending on whether + // this is the last part of the array table key. + + t := vt.Elem() + if t.Kind() == reflect.Interface { + mv = makeFn() + } else { + mv = reflect.New(t).Elem() + } + set = true + } else if mv.Kind() == reflect.Interface { + mv = mv.Elem() + if !mv.IsValid() { + mv = makeFn() + } + set = true + } else if !mv.CanAddr() { + vt := v.Type() + t := vt.Elem() + oldmv := mv + mv = reflect.New(t).Elem() + mv.Set(oldmv) + set = true + } + + x, err := nextFn(key, mv) + if err != nil { + return reflect.Value{}, err + } + + if x.IsValid() { + mv = x + set = true + } + + if set { + v.SetMapIndex(mk, mv) + } + case reflect.Struct: + path, found := structFieldPath(v, string(key.Node().Data)) + if !found { + d.skipUntilTable = true + return reflect.Value{}, nil + } + + if d.errorContext == nil { + d.errorContext = new(errorContext) + } + t := v.Type() + d.errorContext.Struct = t + d.errorContext.Field = path + + f := v.FieldByIndex(path) + x, err := nextFn(key, f) + if err != nil || d.skipUntilTable { + return reflect.Value{}, err + } + if x.IsValid() { + f.Set(x) + } + d.errorContext.Field = nil + d.errorContext.Struct = nil + case reflect.Interface: + if v.Elem().IsValid() { + v = v.Elem() + } else { + v = makeMapStringInterface() + } + + x, err := d.handleKeyPart(key, v, nextFn, makeFn) + if err != nil { + return reflect.Value{}, err + } + if x.IsValid() { + v = x + } + rv = v + default: + panic(fmt.Errorf("unhandled part: %s", v.Kind())) + } + + return rv, nil +} + +// HandleArrayTablePart navigates the Go structure v using the key v. It is +// only used for the prefix (non-last) parts of an array-table. When +// encountering a collection, it should go to the last element. +func (d *decoder) handleArrayTablePart(key ast.Iterator, v reflect.Value) (reflect.Value, error) { + var makeFn valueMakerFn + if key.IsLast() { + makeFn = makeSliceInterface + } else { + makeFn = makeMapStringInterface + } + return d.handleKeyPart(key, v, d.handleArrayTableCollection, makeFn) +} + +// HandleTable returns a reference when it has checked the next expression but +// cannot handle it. +func (d *decoder) handleTable(key ast.Iterator, v reflect.Value) (reflect.Value, error) { + if v.Kind() == reflect.Slice { + if v.Len() == 0 { + return reflect.Value{}, newDecodeError(key.Node().Data, "cannot store a table in a slice") + } + elem := v.Index(v.Len() - 1) + x, err := d.handleTable(key, elem) + if err != nil { + return reflect.Value{}, err + } + if x.IsValid() { + elem.Set(x) + } + return reflect.Value{}, nil + } + if key.Next() { + // Still scoping the key + return d.handleTablePart(key, v) + } + // Done scoping the key. + // Now handle all the key-value expressions in this table. + return d.handleKeyValues(v) +} + +// Handle root expressions until the end of the document or the next +// non-key-value. +func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) { + var rv reflect.Value + for d.nextExpr() { + expr := d.expr() + if expr.Kind != ast.KeyValue { + // Stash the expression so that fromParser can just loop and use + // the right handler. + // We could just recurse ourselves here, but at least this gives a + // chance to pop the stack a bit. + d.stashExpr() + break + } + + err := d.seen.CheckExpression(expr) + if err != nil { + return reflect.Value{}, err + } + + x, err := d.handleKeyValue(expr, v) + if err != nil { + return reflect.Value{}, err + } + if x.IsValid() { + v = x + rv = x + } + } + return rv, nil +} + +type ( + handlerFn func(key ast.Iterator, v reflect.Value) (reflect.Value, error) + valueMakerFn func() reflect.Value +) + +func makeMapStringInterface() reflect.Value { + return reflect.MakeMap(mapStringInterfaceType) +} + +func makeSliceInterface() reflect.Value { + return reflect.MakeSlice(sliceInterfaceType, 0, 16) +} + +func (d *decoder) handleTablePart(key ast.Iterator, v reflect.Value) (reflect.Value, error) { + return d.handleKeyPart(key, v, d.handleTable, makeMapStringInterface) +} + +func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, error) { + // Special case for time, because we allow to unmarshal to it from + // different kind of AST nodes. + if v.Type() == timeType { + return false, nil + } + + if v.CanAddr() && v.Addr().Type().Implements(textUnmarshalerType) { + err := v.Addr().Interface().(encoding.TextUnmarshaler).UnmarshalText(node.Data) + if err != nil { + return false, newDecodeError(d.p.Raw(node.Raw), "%w", err) + } + + return true, nil + } + + return false, nil +} + +func (d *decoder) handleValue(value *ast.Node, v reflect.Value) error { + for v.Kind() == reflect.Ptr { + v = initAndDereferencePointer(v) + } + + ok, err := d.tryTextUnmarshaler(value, v) + if ok || err != nil { + return err + } + + switch value.Kind { + case ast.String: + return d.unmarshalString(value, v) + case ast.Integer: + return d.unmarshalInteger(value, v) + case ast.Float: + return d.unmarshalFloat(value, v) + case ast.Bool: + return d.unmarshalBool(value, v) + case ast.DateTime: + return d.unmarshalDateTime(value, v) + case ast.LocalDate: + return d.unmarshalLocalDate(value, v) + case ast.LocalTime: + return d.unmarshalLocalTime(value, v) + case ast.LocalDateTime: + return d.unmarshalLocalDateTime(value, v) + case ast.InlineTable: + return d.unmarshalInlineTable(value, v) + case ast.Array: + return d.unmarshalArray(value, v) + default: + panic(fmt.Errorf("handleValue not implemented for %s", value.Kind)) + } +} + +func (d *decoder) unmarshalArray(array *ast.Node, v reflect.Value) error { + switch v.Kind() { + case reflect.Slice: + if v.IsNil() { + v.Set(reflect.MakeSlice(v.Type(), 0, 16)) + } else { + v.SetLen(0) + } + case reflect.Array: + // arrays are always initialized + case reflect.Interface: + elem := v.Elem() + if !elem.IsValid() { + elem = reflect.New(sliceInterfaceType).Elem() + elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16)) + } else if elem.Kind() == reflect.Slice { + if elem.Type() != sliceInterfaceType { + elem = reflect.New(sliceInterfaceType).Elem() + elem.Set(reflect.MakeSlice(sliceInterfaceType, 0, 16)) + } else if !elem.CanSet() { + nelem := reflect.New(sliceInterfaceType).Elem() + nelem.Set(reflect.MakeSlice(sliceInterfaceType, elem.Len(), elem.Cap())) + reflect.Copy(nelem, elem) + elem = nelem + } + } + err := d.unmarshalArray(array, elem) + if err != nil { + return err + } + v.Set(elem) + return nil + default: + // TODO: use newDecodeError, but first the parser needs to fill + // array.Data. + return d.typeMismatchError("array", v.Type()) + } + + elemType := v.Type().Elem() + + it := array.Children() + idx := 0 + for it.Next() { + n := it.Node() + + // TODO: optimize + if v.Kind() == reflect.Slice { + elem := reflect.New(elemType).Elem() + + err := d.handleValue(n, elem) + if err != nil { + return err + } + + v.Set(reflect.Append(v, elem)) + } else { // array + if idx >= v.Len() { + return nil + } + elem := v.Index(idx) + err := d.handleValue(n, elem) + if err != nil { + return err + } + idx++ + } + } + + return nil +} + +func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error { + // Make sure v is an initialized object. + switch v.Kind() { + case reflect.Map: + if v.IsNil() { + v.Set(reflect.MakeMap(v.Type())) + } + case reflect.Struct: + // structs are always initialized. + case reflect.Interface: + elem := v.Elem() + if !elem.IsValid() { + elem = makeMapStringInterface() + v.Set(elem) + } + return d.unmarshalInlineTable(itable, elem) + default: + return newDecodeError(itable.Data, "cannot store inline table in Go type %s", v.Kind()) + } + + it := itable.Children() + for it.Next() { + n := it.Node() + + x, err := d.handleKeyValue(n, v) + if err != nil { + return err + } + if x.IsValid() { + v = x + } + } + + return nil +} + +func (d *decoder) unmarshalDateTime(value *ast.Node, v reflect.Value) error { + dt, err := parseDateTime(value.Data) + if err != nil { + return err + } + + v.Set(reflect.ValueOf(dt)) + return nil +} + +func (d *decoder) unmarshalLocalDate(value *ast.Node, v reflect.Value) error { + ld, err := parseLocalDate(value.Data) + if err != nil { + return err + } + + if v.Type() == timeType { + cast := ld.AsTime(time.Local) + v.Set(reflect.ValueOf(cast)) + return nil + } + + v.Set(reflect.ValueOf(ld)) + + return nil +} + +func (d *decoder) unmarshalLocalTime(value *ast.Node, v reflect.Value) error { + lt, rest, err := parseLocalTime(value.Data) + if err != nil { + return err + } + + if len(rest) > 0 { + return newDecodeError(rest, "extra characters at the end of a local time") + } + + v.Set(reflect.ValueOf(lt)) + return nil +} + +func (d *decoder) unmarshalLocalDateTime(value *ast.Node, v reflect.Value) error { + ldt, rest, err := parseLocalDateTime(value.Data) + if err != nil { + return err + } + + if len(rest) > 0 { + return newDecodeError(rest, "extra characters at the end of a local date time") + } + + if v.Type() == timeType { + cast := ldt.AsTime(time.Local) + + v.Set(reflect.ValueOf(cast)) + return nil + } + + v.Set(reflect.ValueOf(ldt)) + + return nil +} + +func (d *decoder) unmarshalBool(value *ast.Node, v reflect.Value) error { + b := value.Data[0] == 't' + + switch v.Kind() { + case reflect.Bool: + v.SetBool(b) + case reflect.Interface: + v.Set(reflect.ValueOf(b)) + default: + return newDecodeError(value.Data, "cannot assign boolean to a %t", b) + } + + return nil +} + +func (d *decoder) unmarshalFloat(value *ast.Node, v reflect.Value) error { + f, err := parseFloat(value.Data) + if err != nil { + return err + } + + switch v.Kind() { + case reflect.Float64: + v.SetFloat(f) + case reflect.Float32: + if f > math.MaxFloat32 { + return newDecodeError(value.Data, "number %f does not fit in a float32", f) + } + v.SetFloat(f) + case reflect.Interface: + v.Set(reflect.ValueOf(f)) + default: + return newDecodeError(value.Data, "float cannot be assigned to %s", v.Kind()) + } + + return nil +} + +func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error { + const ( + maxInt = int64(^uint(0) >> 1) + minInt = -maxInt - 1 + ) + + i, err := parseInteger(value.Data) + if err != nil { + return err + } + + var r reflect.Value + + switch v.Kind() { + case reflect.Int64: + v.SetInt(i) + return nil + case reflect.Int32: + if i < math.MinInt32 || i > math.MaxInt32 { + return fmt.Errorf("toml: number %d does not fit in an int32", i) + } + + r = reflect.ValueOf(int32(i)) + case reflect.Int16: + if i < math.MinInt16 || i > math.MaxInt16 { + return fmt.Errorf("toml: number %d does not fit in an int16", i) + } + + r = reflect.ValueOf(int16(i)) + case reflect.Int8: + if i < math.MinInt8 || i > math.MaxInt8 { + return fmt.Errorf("toml: number %d does not fit in an int8", i) + } + + r = reflect.ValueOf(int8(i)) + case reflect.Int: + if i < minInt || i > maxInt { + return fmt.Errorf("toml: number %d does not fit in an int", i) + } + + r = reflect.ValueOf(int(i)) + case reflect.Uint64: + if i < 0 { + return fmt.Errorf("toml: negative number %d does not fit in an uint64", i) + } + + r = reflect.ValueOf(uint64(i)) + case reflect.Uint32: + if i < 0 || i > math.MaxUint32 { + return fmt.Errorf("toml: negative number %d does not fit in an uint32", i) + } + + r = reflect.ValueOf(uint32(i)) + case reflect.Uint16: + if i < 0 || i > math.MaxUint16 { + return fmt.Errorf("toml: negative number %d does not fit in an uint16", i) + } + + r = reflect.ValueOf(uint16(i)) + case reflect.Uint8: + if i < 0 || i > math.MaxUint8 { + return fmt.Errorf("toml: negative number %d does not fit in an uint8", i) + } + + r = reflect.ValueOf(uint8(i)) + case reflect.Uint: + if i < 0 { + return fmt.Errorf("toml: negative number %d does not fit in an uint", i) + } + + r = reflect.ValueOf(uint(i)) + case reflect.Interface: + r = reflect.ValueOf(i) + default: + return d.typeMismatchError("integer", v.Type()) + } + + if !r.Type().AssignableTo(v.Type()) { + r = r.Convert(v.Type()) + } + + v.Set(r) + + return nil +} + +func (d *decoder) unmarshalString(value *ast.Node, v reflect.Value) error { + switch v.Kind() { + case reflect.String: + v.SetString(string(value.Data)) + case reflect.Interface: + v.Set(reflect.ValueOf(string(value.Data))) + default: + return newDecodeError(d.p.Raw(value.Raw), "cannot store TOML string into a Go %s", v.Kind()) + } + + return nil +} + +func (d *decoder) handleKeyValue(expr *ast.Node, v reflect.Value) (reflect.Value, error) { + d.strict.EnterKeyValue(expr) + + v, err := d.handleKeyValueInner(expr.Key(), expr.Value(), v) + if d.skipUntilTable { + d.strict.MissingField(expr) + d.skipUntilTable = false + } + + d.strict.ExitKeyValue(expr) + + return v, err +} + +func (d *decoder) handleKeyValueInner(key ast.Iterator, value *ast.Node, v reflect.Value) (reflect.Value, error) { + if key.Next() { + // Still scoping the key + return d.handleKeyValuePart(key, value, v) + } + // Done scoping the key. + // v is whatever Go value we need to fill. + return reflect.Value{}, d.handleValue(value, v) +} + +func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflect.Value) (reflect.Value, error) { + // contains the replacement for v + var rv reflect.Value + + // First, dispatch over v to make sure it is a valid object. + // There is no guarantee over what it could be. + switch v.Kind() { + case reflect.Map: + vt := v.Type() + + mk := reflect.ValueOf(string(key.Node().Data)) + mkt := stringType + + keyType := vt.Key() + if !mkt.AssignableTo(keyType) { + if !mkt.ConvertibleTo(keyType) { + return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", mkt, keyType) + } + + mk = mk.Convert(keyType) + } + + // If the map does not exist, create it. + if v.IsNil() { + v = reflect.MakeMap(vt) + rv = v + } + + mv := v.MapIndex(mk) + set := false + if !mv.IsValid() { + set = true + mv = reflect.New(v.Type().Elem()).Elem() + } else { + if key.IsLast() { + var x interface{} + mv = reflect.ValueOf(&x).Elem() + set = true + } + } + + nv, err := d.handleKeyValueInner(key, value, mv) + if err != nil { + return reflect.Value{}, err + } + if nv.IsValid() { + mv = nv + set = true + } + + if set { + v.SetMapIndex(mk, mv) + } + case reflect.Struct: + path, found := structFieldPath(v, string(key.Node().Data)) + if !found { + d.skipUntilTable = true + break + } + + if d.errorContext == nil { + d.errorContext = new(errorContext) + } + t := v.Type() + d.errorContext.Struct = t + d.errorContext.Field = path + + f := v.FieldByIndex(path) + x, err := d.handleKeyValueInner(key, value, f) + if err != nil { + return reflect.Value{}, err + } + + if x.IsValid() { + f.Set(x) + } + d.errorContext.Struct = nil + d.errorContext.Field = nil + case reflect.Interface: + v = v.Elem() + + // Following encoding/json: decoding an object into an + // interface{}, it needs to always hold a + // map[string]interface{}. This is for the types to be + // consistent whether a previous value was set or not. + if !v.IsValid() || v.Type() != mapStringInterfaceType { + v = makeMapStringInterface() + } + + x, err := d.handleKeyValuePart(key, value, v) + if err != nil { + return reflect.Value{}, err + } + if x.IsValid() { + v = x + } + rv = v + case reflect.Ptr: + elem := v.Elem() + if !elem.IsValid() { + ptr := reflect.New(v.Type().Elem()) + v.Set(ptr) + rv = v + elem = ptr.Elem() + } + + elem2, err := d.handleKeyValuePart(key, value, elem) + if err != nil { + return reflect.Value{}, err + } + if elem2.IsValid() { + elem = elem2 + } + v.Elem().Set(elem) + default: + return reflect.Value{}, fmt.Errorf("unhandled kv part: %s", v.Kind()) + } + + return rv, nil +} + +func initAndDereferencePointer(v reflect.Value) reflect.Value { + var elem reflect.Value + if v.IsNil() { + ptr := reflect.New(v.Type().Elem()) + v.Set(ptr) + } + elem = v.Elem() + return elem +} + +type fieldPathsMap = map[string][]int + +var globalFieldPathsCache atomic.Value // map[danger.TypeID]fieldPathsMap + +func structFieldPath(v reflect.Value, name string) ([]int, bool) { + t := v.Type() + + cache, _ := globalFieldPathsCache.Load().(map[danger.TypeID]fieldPathsMap) + fieldPaths, ok := cache[danger.MakeTypeID(t)] + + if !ok { + fieldPaths = map[string][]int{} + + forEachField(t, nil, func(name string, path []int) { + fieldPaths[name] = path + // extra copy for the case-insensitive match + fieldPaths[strings.ToLower(name)] = path + }) + + newCache := make(map[danger.TypeID]fieldPathsMap, len(cache)+1) + newCache[danger.MakeTypeID(t)] = fieldPaths + for k, v := range cache { + newCache[k] = v + } + globalFieldPathsCache.Store(newCache) + } + + path, ok := fieldPaths[name] + if !ok { + path, ok = fieldPaths[strings.ToLower(name)] + } + return path, ok +} + +func forEachField(t reflect.Type, path []int, do func(name string, path []int)) { + n := t.NumField() + for i := 0; i < n; i++ { + f := t.Field(i) + + if !f.Anonymous && f.PkgPath != "" { + // only consider exported fields. + continue + } + + fieldPath := append(path, i) + fieldPath = fieldPath[:len(fieldPath):len(fieldPath)] + + if f.Anonymous { + forEachField(f.Type, fieldPath, do) + continue + } + + name := f.Tag.Get("toml") + if name == "-" { + continue + } + + if i := strings.IndexByte(name, ','); i >= 0 { + name = name[:i] + } + if name == "" { + name = f.Name + } + + do(name, fieldPath) + } +} diff --git a/vendor/github.com/pelletier/go-toml/v2/utf8.go b/vendor/github.com/pelletier/go-toml/v2/utf8.go new file mode 100644 index 000000000..d47a4f20c --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/v2/utf8.go @@ -0,0 +1,240 @@ +package toml + +import ( + "unicode/utf8" +) + +type utf8Err struct { + Index int + Size int +} + +func (u utf8Err) Zero() bool { + return u.Size == 0 +} + +// Verified that a given string is only made of valid UTF-8 characters allowed +// by the TOML spec: +// +// Any Unicode character may be used except those that must be escaped: +// quotation mark, backslash, and the control characters other than tab (U+0000 +// to U+0008, U+000A to U+001F, U+007F). +// +// It is a copy of the Go 1.17 utf8.Valid implementation, tweaked to exit early +// when a character is not allowed. +// +// The returned utf8Err is Zero() if the string is valid, or contains the byte +// index and size of the invalid character. +// +// quotation mark => already checked +// backslash => already checked +// 0-0x8 => invalid +// 0x9 => tab, ok +// 0xA - 0x1F => invalid +// 0x7F => invalid +func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) { + // Fast path. Check for and skip 8 bytes of ASCII characters per iteration. + offset := 0 + for len(p) >= 8 { + // Combining two 32 bit loads allows the same code to be used + // for 32 and 64 bit platforms. + // The compiler can generate a 32bit load for first32 and second32 + // on many platforms. See test/codegen/memcombine.go. + first32 := uint32(p[0]) | uint32(p[1])<<8 | uint32(p[2])<<16 | uint32(p[3])<<24 + second32 := uint32(p[4]) | uint32(p[5])<<8 | uint32(p[6])<<16 | uint32(p[7])<<24 + if (first32|second32)&0x80808080 != 0 { + // Found a non ASCII byte (>= RuneSelf). + break + } + + for i, b := range p[:8] { + if invalidAscii(b) { + err.Index = offset + i + err.Size = 1 + return + } + } + + p = p[8:] + offset += 8 + } + n := len(p) + for i := 0; i < n; { + pi := p[i] + if pi < utf8.RuneSelf { + if invalidAscii(pi) { + err.Index = offset + i + err.Size = 1 + return + } + i++ + continue + } + x := first[pi] + if x == xx { + // Illegal starter byte. + err.Index = offset + i + err.Size = 1 + return + } + size := int(x & 7) + if i+size > n { + // Short or invalid. + err.Index = offset + i + err.Size = n - i + return + } + accept := acceptRanges[x>>4] + if c := p[i+1]; c < accept.lo || accept.hi < c { + err.Index = offset + i + err.Size = 2 + return + } else if size == 2 { + } else if c := p[i+2]; c < locb || hicb < c { + err.Index = offset + i + err.Size = 3 + return + } else if size == 3 { + } else if c := p[i+3]; c < locb || hicb < c { + err.Index = offset + i + err.Size = 4 + return + } + i += size + } + return +} + +// Return the size of the next rune if valid, 0 otherwise. +func utf8ValidNext(p []byte) int { + c := p[0] + + if c < utf8.RuneSelf { + if invalidAscii(c) { + return 0 + } + return 1 + } + + x := first[c] + if x == xx { + // Illegal starter byte. + return 0 + } + size := int(x & 7) + if size > len(p) { + // Short or invalid. + return 0 + } + accept := acceptRanges[x>>4] + if c := p[1]; c < accept.lo || accept.hi < c { + return 0 + } else if size == 2 { + } else if c := p[2]; c < locb || hicb < c { + return 0 + } else if size == 3 { + } else if c := p[3]; c < locb || hicb < c { + return 0 + } + + return size +} + +var invalidAsciiTable = [256]bool{ + 0x00: true, + 0x01: true, + 0x02: true, + 0x03: true, + 0x04: true, + 0x05: true, + 0x06: true, + 0x07: true, + 0x08: true, + // 0x09 TAB + // 0x0A LF + 0x0B: true, + 0x0C: true, + // 0x0D CR + 0x0E: true, + 0x0F: true, + 0x10: true, + 0x11: true, + 0x12: true, + 0x13: true, + 0x14: true, + 0x15: true, + 0x16: true, + 0x17: true, + 0x18: true, + 0x19: true, + 0x1A: true, + 0x1B: true, + 0x1C: true, + 0x1D: true, + 0x1E: true, + 0x1F: true, + // 0x20 - 0x7E Printable ASCII characters + 0x7F: true, +} + +func invalidAscii(b byte) bool { + return invalidAsciiTable[b] +} + +// acceptRange gives the range of valid values for the second byte in a UTF-8 +// sequence. +type acceptRange struct { + lo uint8 // lowest value for second byte. + hi uint8 // highest value for second byte. +} + +// acceptRanges has size 16 to avoid bounds checks in the code that uses it. +var acceptRanges = [16]acceptRange{ + 0: {locb, hicb}, + 1: {0xA0, hicb}, + 2: {locb, 0x9F}, + 3: {0x90, hicb}, + 4: {locb, 0x8F}, +} + +// first is information about the first byte in a UTF-8 sequence. +var first = [256]uint8{ + // 1 2 3 4 5 6 7 8 9 A B C D E F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x00-0x0F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x10-0x1F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x20-0x2F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x30-0x3F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x40-0x4F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x50-0x5F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x60-0x6F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x70-0x7F + // 1 2 3 4 5 6 7 8 9 A B C D E F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x80-0x8F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x90-0x9F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xA0-0xAF + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xB0-0xBF + xx, xx, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xC0-0xCF + s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xD0-0xDF + s2, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s4, s3, s3, // 0xE0-0xEF + s5, s6, s6, s6, s7, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xF0-0xFF +} + +const ( + // The default lowest and highest continuation byte. + locb = 0b10000000 + hicb = 0b10111111 + + // These names of these constants are chosen to give nice alignment in the + // table below. The first nibble is an index into acceptRanges or F for + // special one-byte cases. The second nibble is the Rune length or the + // Status for the special one-byte case. + xx = 0xF1 // invalid: size 1 + as = 0xF0 // ASCII: size 1 + s1 = 0x02 // accept 0, size 2 + s2 = 0x13 // accept 1, size 3 + s3 = 0x03 // accept 0, size 3 + s4 = 0x23 // accept 2, size 3 + s5 = 0x34 // accept 3, size 4 + s6 = 0x04 // accept 0, size 4 + s7 = 0x44 // accept 4, size 4 +) diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go index cb8fc642d..7fe4c38cc 100644 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go @@ -168,6 +168,10 @@ func assigningCallExprs(info *TypesInfoExt, subject *ast.Ident) []*ast.CallExpr // Found the function call. callExprs = append(callExprs, assignT) case *ast.Ident: + // Skip assignments here the RHS points to the same object as the subject. + if assignT.Obj == subject.Obj { + continue + } // The subject was the result of assigning from another identifier. callExprs = append(callExprs, assigningCallExprs(info, assignT)...) default: diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go index 3d239f625..5301a3f22 100644 --- a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go @@ -6,7 +6,6 @@ import ( "go/constant" "go/token" "go/types" - "regexp" ) type Lint struct { @@ -52,7 +51,7 @@ func LintFmtErrorfCalls(fset *token.FileSet, info types.Info) []Lint { continue } - if formatVerbs[i] == "%w" { + if formatVerbs[i] == "w" { lintArg = nil break } @@ -85,6 +84,9 @@ func isErrorStringCall(info types.Info, expr ast.Expr) bool { return false } +// printfFormatStringVerbs returns a normalized list of all the verbs that are used per argument to +// the printf function. The index of each returned element corresponds to index of the respective +// argument. func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]string, bool) { if len(call.Args) <= 1 { return nil, false @@ -96,10 +98,23 @@ func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]string, boo } formatString := constant.StringVal(info.Types[strLit].Value) - // Naive format string argument verb. This does not take modifiers such as - // padding into account... - re := regexp.MustCompile(`%[^%]`) - return re.FindAllString(formatString, -1), true + pp := printfParser{str: formatString} + verbs, err := pp.ParseAllVerbs() + if err != nil { + return nil, false + } + orderedVerbs := verbOrder(verbs, len(call.Args)-1) + + resolvedVerbs := make([]string, len(orderedVerbs)) + for i, vv := range orderedVerbs { + for _, v := range vv { + resolvedVerbs[i] = v.format + if v.format == "w" { + break + } + } + } + return resolvedVerbs, true } func isFmtErrorfCallExpr(info types.Info, expr ast.Expr) (*ast.CallExpr, bool) { diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go new file mode 100644 index 000000000..b5ecee4f0 --- /dev/null +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/printf.go @@ -0,0 +1,127 @@ +package errorlint + +import ( + "fmt" + "io" + "strconv" + "strings" +) + +func verbOrder(verbs []verb, numArgs int) [][]verb { + orderedVerbs := make([][]verb, numArgs) + i := 0 + for _, v := range verbs { + if v.index != -1 { + i = v.index - 1 + } + orderedVerbs[i] = append(orderedVerbs[i], v) + verbs = verbs[1:] + i++ + } + return orderedVerbs +} + +type verb struct { + format string + index int +} + +type printfParser struct { + str string +} + +func (pp *printfParser) ParseAllVerbs() ([]verb, error) { + verbs := []verb{} + for { + verb, err := pp.parseVerb() + if err == io.EOF { + break + } else if err != nil { + return nil, err + } + verbs = append(verbs, *verb) + } + return verbs, nil +} + +func (pp *printfParser) parseVerb() (*verb, error) { + if err := pp.skipToPercent(); err != nil { + return nil, err + } + if pp.next() != '%' { + return nil, fmt.Errorf("expected '%%'") + } + + index := -1 + switch pp.peek() { + case '%': + pp.next() + return pp.parseVerb() + case '+', '#': + pp.next() + case '[': + var err error + index, err = pp.parseIndex() + if err != nil { + return nil, err + } + case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '.': + pp.parsePrecision() + case 0: + return nil, io.EOF + } + + format := pp.next() + + return &verb{format: string(format), index: index}, nil +} + +func (pp *printfParser) parseIndex() (int, error) { + if pp.next() != '[' { + return -1, fmt.Errorf("expected '['") + } + end := strings.Index(pp.str, "]") + if end == -1 { + return -1, fmt.Errorf("unterminated indexed verb") + } + index, err := strconv.Atoi(pp.str[:end]) + if err != nil { + return -1, err + } + pp.str = pp.str[end+1:] + return index, nil +} + +func (pp *printfParser) parsePrecision() { + for { + if r := pp.peek(); (r < '0' || '9' < r) && r != '.' { + break + } + pp.next() + } +} + +func (pp *printfParser) skipToPercent() error { + i := strings.Index(pp.str, "%") + if i == -1 { + return io.EOF + } + pp.str = pp.str[i:] + return nil +} + +func (pp *printfParser) peek() rune { + if len(pp.str) == 0 { + return 0 + } + return rune(pp.str[0]) +} + +func (pp *printfParser) next() rune { + if len(pp.str) == 0 { + return 0 + } + r := rune(pp.str[0]) + pp.str = pp.str[1:] + return r +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/README.md b/vendor/github.com/prometheus/client_golang/prometheus/README.md index 44986bff0..c67ff1b7f 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/README.md +++ b/vendor/github.com/prometheus/client_golang/prometheus/README.md @@ -1 +1 @@ -See [![go-doc](https://godoc.org/github.com/prometheus/client_golang/prometheus?status.svg)](https://godoc.org/github.com/prometheus/client_golang/prometheus). +See [![Go Reference](https://pkg.go.dev/badge/github.com/prometheus/client_golang/prometheus.svg)](https://pkg.go.dev/github.com/prometheus/client_golang/prometheus). diff --git a/vendor/github.com/prometheus/client_golang/prometheus/build_info.go b/vendor/github.com/prometheus/client_golang/prometheus/build_info_collector.go similarity index 56% rename from vendor/github.com/prometheus/client_golang/prometheus/build_info.go rename to vendor/github.com/prometheus/client_golang/prometheus/build_info_collector.go index 288f0e854..450189f35 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/build_info.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/build_info_collector.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Prometheus Authors +// Copyright 2021 The Prometheus Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at @@ -11,19 +11,28 @@ // See the License for the specific language governing permissions and // limitations under the License. -// +build go1.12 - package prometheus import "runtime/debug" -// readBuildInfo is a wrapper around debug.ReadBuildInfo for Go 1.12+. -func readBuildInfo() (path, version, sum string) { - path, version, sum = "unknown", "unknown", "unknown" +// NewBuildInfoCollector is the obsolete version of collectors.NewBuildInfoCollector. +// See there for documentation. +// +// Deprecated: Use collectors.NewBuildInfoCollector instead. +func NewBuildInfoCollector() Collector { + path, version, sum := "unknown", "unknown", "unknown" if bi, ok := debug.ReadBuildInfo(); ok { path = bi.Main.Path version = bi.Main.Version sum = bi.Main.Sum } - return + c := &selfCollector{MustNewConstMetric( + NewDesc( + "go_build_info", + "Build information about the main Go module.", + nil, Labels{"path": path, "version": version, "checksum": sum}, + ), + GaugeValue, 1)} + c.init(c.self) + return c } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/collector.go b/vendor/github.com/prometheus/client_golang/prometheus/collector.go index 1e839650d..ac1ca3cf5 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/collector.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/collector.go @@ -118,3 +118,11 @@ func (c *selfCollector) Describe(ch chan<- *Desc) { func (c *selfCollector) Collect(ch chan<- Metric) { ch <- c.self } + +// collectorMetric is a metric that is also a collector. +// Because of selfCollector, most (if not all) Metrics in +// this package are also collectors. +type collectorMetric interface { + Metric + Collector +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/counter.go b/vendor/github.com/prometheus/client_golang/prometheus/counter.go index 0e1b48c03..00d70f09b 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/counter.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/counter.go @@ -133,10 +133,14 @@ func (c *counter) Inc() { atomic.AddUint64(&c.valInt, 1) } -func (c *counter) Write(out *dto.Metric) error { +func (c *counter) get() float64 { fval := math.Float64frombits(atomic.LoadUint64(&c.valBits)) ival := atomic.LoadUint64(&c.valInt) - val := fval + float64(ival) + return fval + float64(ival) +} + +func (c *counter) Write(out *dto.Metric) error { + val := c.get() var exemplar *dto.Exemplar if e := c.exemplar.Load(); e != nil { @@ -163,7 +167,7 @@ func (c *counter) updateExemplar(v float64, l Labels) { // (e.g. number of HTTP requests, partitioned by response code and // method). Create instances with NewCounterVec. type CounterVec struct { - *metricVec + *MetricVec } // NewCounterVec creates a new CounterVec based on the provided CounterOpts and @@ -176,11 +180,11 @@ func NewCounterVec(opts CounterOpts, labelNames []string) *CounterVec { opts.ConstLabels, ) return &CounterVec{ - metricVec: newMetricVec(desc, func(lvs ...string) Metric { + MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { if len(lvs) != len(desc.variableLabels) { panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels, lvs)) } - result := &counter{desc: desc, labelPairs: makeLabelPairs(desc, lvs), now: time.Now} + result := &counter{desc: desc, labelPairs: MakeLabelPairs(desc, lvs), now: time.Now} result.init(result) // Init self-collection. return result }), @@ -188,7 +192,7 @@ func NewCounterVec(opts CounterOpts, labelNames []string) *CounterVec { } // GetMetricWithLabelValues returns the Counter for the given slice of label -// values (same order as the VariableLabels in Desc). If that combination of +// values (same order as the variable labels in Desc). If that combination of // label values is accessed for the first time, a new Counter is created. // // It is possible to call this method without using the returned Counter to only @@ -202,7 +206,7 @@ func NewCounterVec(opts CounterOpts, labelNames []string) *CounterVec { // Counter with the same label values is created later. // // An error is returned if the number of label values is not the same as the -// number of VariableLabels in Desc (minus any curried labels). +// number of variable labels in Desc (minus any curried labels). // // Note that for more than one label value, this method is prone to mistakes // caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as @@ -211,7 +215,7 @@ func NewCounterVec(opts CounterOpts, labelNames []string) *CounterVec { // with a performance overhead (for creating and processing the Labels map). // See also the GaugeVec example. func (v *CounterVec) GetMetricWithLabelValues(lvs ...string) (Counter, error) { - metric, err := v.metricVec.getMetricWithLabelValues(lvs...) + metric, err := v.MetricVec.GetMetricWithLabelValues(lvs...) if metric != nil { return metric.(Counter), err } @@ -219,19 +223,19 @@ func (v *CounterVec) GetMetricWithLabelValues(lvs ...string) (Counter, error) { } // GetMetricWith returns the Counter for the given Labels map (the label names -// must match those of the VariableLabels in Desc). If that label map is +// must match those of the variable labels in Desc). If that label map is // accessed for the first time, a new Counter is created. Implications of // creating a Counter without using it and keeping the Counter for later use are // the same as for GetMetricWithLabelValues. // // An error is returned if the number and names of the Labels are inconsistent -// with those of the VariableLabels in Desc (minus any curried labels). +// with those of the variable labels in Desc (minus any curried labels). // // This method is used for the same purpose as // GetMetricWithLabelValues(...string). See there for pros and cons of the two // methods. func (v *CounterVec) GetMetricWith(labels Labels) (Counter, error) { - metric, err := v.metricVec.getMetricWith(labels) + metric, err := v.MetricVec.GetMetricWith(labels) if metric != nil { return metric.(Counter), err } @@ -275,7 +279,7 @@ func (v *CounterVec) With(labels Labels) Counter { // registered with a given registry (usually the uncurried version). The Reset // method deletes all metrics, even if called on a curried vector. func (v *CounterVec) CurryWith(labels Labels) (*CounterVec, error) { - vec, err := v.curryWith(labels) + vec, err := v.MetricVec.CurryWith(labels) if vec != nil { return &CounterVec{vec}, err } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/desc.go b/vendor/github.com/prometheus/client_golang/prometheus/desc.go index 2f19f5e1e..4bb816ab7 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/desc.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/desc.go @@ -20,7 +20,7 @@ import ( "strings" "github.com/cespare/xxhash/v2" - //lint:ignore SA1019 Need to keep deprecated package for compatibility. + //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/golang/protobuf/proto" "github.com/prometheus/common/model" @@ -51,7 +51,7 @@ type Desc struct { // constLabelPairs contains precalculated DTO label pairs based on // the constant labels. constLabelPairs []*dto.LabelPair - // VariableLabels contains names of labels for which the metric + // variableLabels contains names of labels for which the metric // maintains variable values. variableLabels []string // id is a hash of the values of the ConstLabels and fqName. This diff --git a/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go b/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go index 18a99d5fa..c41ab37f3 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/expvar_collector.go @@ -22,43 +22,10 @@ type expvarCollector struct { exports map[string]*Desc } -// NewExpvarCollector returns a newly allocated expvar Collector that still has -// to be registered with a Prometheus registry. +// NewExpvarCollector is the obsolete version of collectors.NewExpvarCollector. +// See there for documentation. // -// An expvar Collector collects metrics from the expvar interface. It provides a -// quick way to expose numeric values that are already exported via expvar as -// Prometheus metrics. Note that the data models of expvar and Prometheus are -// fundamentally different, and that the expvar Collector is inherently slower -// than native Prometheus metrics. Thus, the expvar Collector is probably great -// for experiments and prototying, but you should seriously consider a more -// direct implementation of Prometheus metrics for monitoring production -// systems. -// -// The exports map has the following meaning: -// -// The keys in the map correspond to expvar keys, i.e. for every expvar key you -// want to export as Prometheus metric, you need an entry in the exports -// map. The descriptor mapped to each key describes how to export the expvar -// value. It defines the name and the help string of the Prometheus metric -// proxying the expvar value. The type will always be Untyped. -// -// For descriptors without variable labels, the expvar value must be a number or -// a bool. The number is then directly exported as the Prometheus sample -// value. (For a bool, 'false' translates to 0 and 'true' to 1). Expvar values -// that are not numbers or bools are silently ignored. -// -// If the descriptor has one variable label, the expvar value must be an expvar -// map. The keys in the expvar map become the various values of the one -// Prometheus label. The values in the expvar map must be numbers or bools again -// as above. -// -// For descriptors with more than one variable label, the expvar must be a -// nested expvar map, i.e. where the values of the topmost map are maps again -// etc. until a depth is reached that corresponds to the number of labels. The -// leaves of that structure must be numbers or bools as above to serve as the -// sample values. -// -// Anything that does not fit into the scheme above is silently ignored. +// Deprecated: Use collectors.NewExpvarCollector instead. func NewExpvarCollector(exports map[string]*Desc) Collector { return &expvarCollector{ exports: exports, diff --git a/vendor/github.com/prometheus/client_golang/prometheus/gauge.go b/vendor/github.com/prometheus/client_golang/prometheus/gauge.go index d67573f76..bd0733d6a 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/gauge.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/gauge.go @@ -132,7 +132,7 @@ func (g *gauge) Write(out *dto.Metric) error { // (e.g. number of operations queued, partitioned by user and operation // type). Create instances with NewGaugeVec. type GaugeVec struct { - *metricVec + *MetricVec } // NewGaugeVec creates a new GaugeVec based on the provided GaugeOpts and @@ -145,11 +145,11 @@ func NewGaugeVec(opts GaugeOpts, labelNames []string) *GaugeVec { opts.ConstLabels, ) return &GaugeVec{ - metricVec: newMetricVec(desc, func(lvs ...string) Metric { + MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { if len(lvs) != len(desc.variableLabels) { panic(makeInconsistentCardinalityError(desc.fqName, desc.variableLabels, lvs)) } - result := &gauge{desc: desc, labelPairs: makeLabelPairs(desc, lvs)} + result := &gauge{desc: desc, labelPairs: MakeLabelPairs(desc, lvs)} result.init(result) // Init self-collection. return result }), @@ -157,7 +157,7 @@ func NewGaugeVec(opts GaugeOpts, labelNames []string) *GaugeVec { } // GetMetricWithLabelValues returns the Gauge for the given slice of label -// values (same order as the VariableLabels in Desc). If that combination of +// values (same order as the variable labels in Desc). If that combination of // label values is accessed for the first time, a new Gauge is created. // // It is possible to call this method without using the returned Gauge to only @@ -172,7 +172,7 @@ func NewGaugeVec(opts GaugeOpts, labelNames []string) *GaugeVec { // example. // // An error is returned if the number of label values is not the same as the -// number of VariableLabels in Desc (minus any curried labels). +// number of variable labels in Desc (minus any curried labels). // // Note that for more than one label value, this method is prone to mistakes // caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as @@ -180,7 +180,7 @@ func NewGaugeVec(opts GaugeOpts, labelNames []string) *GaugeVec { // latter has a much more readable (albeit more verbose) syntax, but it comes // with a performance overhead (for creating and processing the Labels map). func (v *GaugeVec) GetMetricWithLabelValues(lvs ...string) (Gauge, error) { - metric, err := v.metricVec.getMetricWithLabelValues(lvs...) + metric, err := v.MetricVec.GetMetricWithLabelValues(lvs...) if metric != nil { return metric.(Gauge), err } @@ -188,19 +188,19 @@ func (v *GaugeVec) GetMetricWithLabelValues(lvs ...string) (Gauge, error) { } // GetMetricWith returns the Gauge for the given Labels map (the label names -// must match those of the VariableLabels in Desc). If that label map is +// must match those of the variable labels in Desc). If that label map is // accessed for the first time, a new Gauge is created. Implications of // creating a Gauge without using it and keeping the Gauge for later use are // the same as for GetMetricWithLabelValues. // // An error is returned if the number and names of the Labels are inconsistent -// with those of the VariableLabels in Desc (minus any curried labels). +// with those of the variable labels in Desc (minus any curried labels). // // This method is used for the same purpose as // GetMetricWithLabelValues(...string). See there for pros and cons of the two // methods. func (v *GaugeVec) GetMetricWith(labels Labels) (Gauge, error) { - metric, err := v.metricVec.getMetricWith(labels) + metric, err := v.MetricVec.GetMetricWith(labels) if metric != nil { return metric.(Gauge), err } @@ -244,7 +244,7 @@ func (v *GaugeVec) With(labels Labels) Gauge { // registered with a given registry (usually the uncurried version). The Reset // method deletes all metrics, even if called on a curried vector. func (v *GaugeVec) CurryWith(labels Labels) (*GaugeVec, error) { - vec, err := v.curryWith(labels) + vec, err := v.MetricVec.CurryWith(labels) if vec != nil { return &GaugeVec{vec}, err } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/go_collector.go b/vendor/github.com/prometheus/client_golang/prometheus/go_collector.go index ea05cf429..08195b410 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/go_collector.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/go_collector.go @@ -16,53 +16,209 @@ package prometheus import ( "runtime" "runtime/debug" - "sync" "time" ) -type goCollector struct { +func goRuntimeMemStats() memStatsMetrics { + return memStatsMetrics{ + { + desc: NewDesc( + memstatNamespace("alloc_bytes"), + "Number of bytes allocated and still in use.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.Alloc) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("alloc_bytes_total"), + "Total number of bytes allocated, even if freed.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.TotalAlloc) }, + valType: CounterValue, + }, { + desc: NewDesc( + memstatNamespace("sys_bytes"), + "Number of bytes obtained from system.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.Sys) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("lookups_total"), + "Total number of pointer lookups.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.Lookups) }, + valType: CounterValue, + }, { + desc: NewDesc( + memstatNamespace("mallocs_total"), + "Total number of mallocs.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.Mallocs) }, + valType: CounterValue, + }, { + desc: NewDesc( + memstatNamespace("frees_total"), + "Total number of frees.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.Frees) }, + valType: CounterValue, + }, { + desc: NewDesc( + memstatNamespace("heap_alloc_bytes"), + "Number of heap bytes allocated and still in use.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapAlloc) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("heap_sys_bytes"), + "Number of heap bytes obtained from system.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapSys) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("heap_idle_bytes"), + "Number of heap bytes waiting to be used.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapIdle) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("heap_inuse_bytes"), + "Number of heap bytes that are in use.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapInuse) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("heap_released_bytes"), + "Number of heap bytes released to OS.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapReleased) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("heap_objects"), + "Number of allocated objects.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapObjects) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("stack_inuse_bytes"), + "Number of bytes in use by the stack allocator.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.StackInuse) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("stack_sys_bytes"), + "Number of bytes obtained from system for stack allocator.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.StackSys) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("mspan_inuse_bytes"), + "Number of bytes in use by mspan structures.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.MSpanInuse) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("mspan_sys_bytes"), + "Number of bytes used for mspan structures obtained from system.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.MSpanSys) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("mcache_inuse_bytes"), + "Number of bytes in use by mcache structures.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.MCacheInuse) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("mcache_sys_bytes"), + "Number of bytes used for mcache structures obtained from system.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.MCacheSys) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("buck_hash_sys_bytes"), + "Number of bytes used by the profiling bucket hash table.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.BuckHashSys) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("gc_sys_bytes"), + "Number of bytes used for garbage collection system metadata.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.GCSys) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("other_sys_bytes"), + "Number of bytes used for other system allocations.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.OtherSys) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("next_gc_bytes"), + "Number of heap bytes when next garbage collection will take place.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return float64(ms.NextGC) }, + valType: GaugeValue, + }, { + desc: NewDesc( + memstatNamespace("gc_cpu_fraction"), + "The fraction of this program's available CPU time used by the GC since the program started.", + nil, nil, + ), + eval: func(ms *runtime.MemStats) float64 { return ms.GCCPUFraction }, + valType: GaugeValue, + }, + } +} + +type baseGoCollector struct { goroutinesDesc *Desc threadsDesc *Desc gcDesc *Desc + gcLastTimeDesc *Desc goInfoDesc *Desc - - // ms... are memstats related. - msLast *runtime.MemStats // Previously collected memstats. - msLastTimestamp time.Time - msMtx sync.Mutex // Protects msLast and msLastTimestamp. - msMetrics memStatsMetrics - msRead func(*runtime.MemStats) // For mocking in tests. - msMaxWait time.Duration // Wait time for fresh memstats. - msMaxAge time.Duration // Maximum allowed age of old memstats. } -// NewGoCollector returns a collector that exports metrics about the current Go -// process. This includes memory stats. To collect those, runtime.ReadMemStats -// is called. This requires to “stop the world”, which usually only happens for -// garbage collection (GC). Take the following implications into account when -// deciding whether to use the Go collector: -// -// 1. The performance impact of stopping the world is the more relevant the more -// frequently metrics are collected. However, with Go1.9 or later the -// stop-the-world time per metrics collection is very short (~25µs) so that the -// performance impact will only matter in rare cases. However, with older Go -// versions, the stop-the-world duration depends on the heap size and can be -// quite significant (~1.7 ms/GiB as per -// https://go-review.googlesource.com/c/go/+/34937). -// -// 2. During an ongoing GC, nothing else can stop the world. Therefore, if the -// metrics collection happens to coincide with GC, it will only complete after -// GC has finished. Usually, GC is fast enough to not cause problems. However, -// with a very large heap, GC might take multiple seconds, which is enough to -// cause scrape timeouts in common setups. To avoid this problem, the Go -// collector will use the memstats from a previous collection if -// runtime.ReadMemStats takes more than 1s. However, if there are no previously -// collected memstats, or their collection is more than 5m ago, the collection -// will block until runtime.ReadMemStats succeeds. (The problem might be solved -// in Go1.13, see https://github.com/golang/go/issues/19812 for the related Go -// issue.) -func NewGoCollector() Collector { - return &goCollector{ +func newBaseGoCollector() baseGoCollector { + return baseGoCollector{ goroutinesDesc: NewDesc( "go_goroutines", "Number of goroutines that currently exist.", @@ -75,243 +231,28 @@ func NewGoCollector() Collector { "go_gc_duration_seconds", "A summary of the pause duration of garbage collection cycles.", nil, nil), + gcLastTimeDesc: NewDesc( + memstatNamespace("last_gc_time_seconds"), + "Number of seconds since 1970 of last garbage collection.", + nil, nil), goInfoDesc: NewDesc( "go_info", "Information about the Go environment.", nil, Labels{"version": runtime.Version()}), - msLast: &runtime.MemStats{}, - msRead: runtime.ReadMemStats, - msMaxWait: time.Second, - msMaxAge: 5 * time.Minute, - msMetrics: memStatsMetrics{ - { - desc: NewDesc( - memstatNamespace("alloc_bytes"), - "Number of bytes allocated and still in use.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Alloc) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("alloc_bytes_total"), - "Total number of bytes allocated, even if freed.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.TotalAlloc) }, - valType: CounterValue, - }, { - desc: NewDesc( - memstatNamespace("sys_bytes"), - "Number of bytes obtained from system.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Sys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("lookups_total"), - "Total number of pointer lookups.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Lookups) }, - valType: CounterValue, - }, { - desc: NewDesc( - memstatNamespace("mallocs_total"), - "Total number of mallocs.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Mallocs) }, - valType: CounterValue, - }, { - desc: NewDesc( - memstatNamespace("frees_total"), - "Total number of frees.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.Frees) }, - valType: CounterValue, - }, { - desc: NewDesc( - memstatNamespace("heap_alloc_bytes"), - "Number of heap bytes allocated and still in use.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapAlloc) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_sys_bytes"), - "Number of heap bytes obtained from system.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_idle_bytes"), - "Number of heap bytes waiting to be used.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapIdle) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_inuse_bytes"), - "Number of heap bytes that are in use.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapInuse) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_released_bytes"), - "Number of heap bytes released to OS.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapReleased) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("heap_objects"), - "Number of allocated objects.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.HeapObjects) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("stack_inuse_bytes"), - "Number of bytes in use by the stack allocator.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.StackInuse) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("stack_sys_bytes"), - "Number of bytes obtained from system for stack allocator.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.StackSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("mspan_inuse_bytes"), - "Number of bytes in use by mspan structures.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.MSpanInuse) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("mspan_sys_bytes"), - "Number of bytes used for mspan structures obtained from system.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.MSpanSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("mcache_inuse_bytes"), - "Number of bytes in use by mcache structures.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.MCacheInuse) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("mcache_sys_bytes"), - "Number of bytes used for mcache structures obtained from system.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.MCacheSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("buck_hash_sys_bytes"), - "Number of bytes used by the profiling bucket hash table.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.BuckHashSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("gc_sys_bytes"), - "Number of bytes used for garbage collection system metadata.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.GCSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("other_sys_bytes"), - "Number of bytes used for other system allocations.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.OtherSys) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("next_gc_bytes"), - "Number of heap bytes when next garbage collection will take place.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.NextGC) }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("last_gc_time_seconds"), - "Number of seconds since 1970 of last garbage collection.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return float64(ms.LastGC) / 1e9 }, - valType: GaugeValue, - }, { - desc: NewDesc( - memstatNamespace("gc_cpu_fraction"), - "The fraction of this program's available CPU time used by the GC since the program started.", - nil, nil, - ), - eval: func(ms *runtime.MemStats) float64 { return ms.GCCPUFraction }, - valType: GaugeValue, - }, - }, } } -func memstatNamespace(s string) string { - return "go_memstats_" + s -} - // Describe returns all descriptions of the collector. -func (c *goCollector) Describe(ch chan<- *Desc) { +func (c *baseGoCollector) Describe(ch chan<- *Desc) { ch <- c.goroutinesDesc ch <- c.threadsDesc ch <- c.gcDesc + ch <- c.gcLastTimeDesc ch <- c.goInfoDesc - for _, i := range c.msMetrics { - ch <- i.desc - } } // Collect returns the current state of all metrics of the collector. -func (c *goCollector) Collect(ch chan<- Metric) { - var ( - ms = &runtime.MemStats{} - done = make(chan struct{}) - ) - // Start reading memstats first as it might take a while. - go func() { - c.msRead(ms) - c.msMtx.Lock() - c.msLast = ms - c.msLastTimestamp = time.Now() - c.msMtx.Unlock() - close(done) - }() - +func (c *baseGoCollector) Collect(ch chan<- Metric) { ch <- MustNewConstMetric(c.goroutinesDesc, GaugeValue, float64(runtime.NumGoroutine())) n, _ := runtime.ThreadCreateProfile(nil) ch <- MustNewConstMetric(c.threadsDesc, GaugeValue, float64(n)) @@ -326,71 +267,19 @@ func (c *goCollector) Collect(ch chan<- Metric) { } quantiles[0.0] = stats.PauseQuantiles[0].Seconds() ch <- MustNewConstSummary(c.gcDesc, uint64(stats.NumGC), stats.PauseTotal.Seconds(), quantiles) + ch <- MustNewConstMetric(c.gcLastTimeDesc, GaugeValue, float64(stats.LastGC.UnixNano())/1e9) ch <- MustNewConstMetric(c.goInfoDesc, GaugeValue, 1) - - timer := time.NewTimer(c.msMaxWait) - select { - case <-done: // Our own ReadMemStats succeeded in time. Use it. - timer.Stop() // Important for high collection frequencies to not pile up timers. - c.msCollect(ch, ms) - return - case <-timer.C: // Time out, use last memstats if possible. Continue below. - } - c.msMtx.Lock() - if time.Since(c.msLastTimestamp) < c.msMaxAge { - // Last memstats are recent enough. Collect from them under the lock. - c.msCollect(ch, c.msLast) - c.msMtx.Unlock() - return - } - // If we are here, the last memstats are too old or don't exist. We have - // to wait until our own ReadMemStats finally completes. For that to - // happen, we have to release the lock. - c.msMtx.Unlock() - <-done - c.msCollect(ch, ms) } -func (c *goCollector) msCollect(ch chan<- Metric, ms *runtime.MemStats) { - for _, i := range c.msMetrics { - ch <- MustNewConstMetric(i.desc, i.valType, i.eval(ms)) - } +func memstatNamespace(s string) string { + return "go_memstats_" + s } -// memStatsMetrics provide description, value, and value type for memstat metrics. +// memStatsMetrics provide description, evaluator, runtime/metrics name, and +// value type for memstat metrics. type memStatsMetrics []struct { desc *Desc eval func(*runtime.MemStats) float64 valType ValueType } - -// NewBuildInfoCollector returns a collector collecting a single metric -// "go_build_info" with the constant value 1 and three labels "path", "version", -// and "checksum". Their label values contain the main module path, version, and -// checksum, respectively. The labels will only have meaningful values if the -// binary is built with Go module support and from source code retrieved from -// the source repository (rather than the local file system). This is usually -// accomplished by building from outside of GOPATH, specifying the full address -// of the main package, e.g. "GO111MODULE=on go run -// github.com/prometheus/client_golang/examples/random". If built without Go -// module support, all label values will be "unknown". If built with Go module -// support but using the source code from the local file system, the "path" will -// be set appropriately, but "checksum" will be empty and "version" will be -// "(devel)". -// -// This collector uses only the build information for the main module. See -// https://github.com/povilasv/prommod for an example of a collector for the -// module dependencies. -func NewBuildInfoCollector() Collector { - path, version, sum := readBuildInfo() - c := &selfCollector{MustNewConstMetric( - NewDesc( - "go_build_info", - "Build information about the main Go module.", - nil, Labels{"path": path, "version": version, "checksum": sum}, - ), - GaugeValue, 1)} - c.init(c.self) - return c -} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go116.go b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go116.go new file mode 100644 index 000000000..24526131e --- /dev/null +++ b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go116.go @@ -0,0 +1,107 @@ +// Copyright 2021 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build !go1.17 +// +build !go1.17 + +package prometheus + +import ( + "runtime" + "sync" + "time" +) + +type goCollector struct { + base baseGoCollector + + // ms... are memstats related. + msLast *runtime.MemStats // Previously collected memstats. + msLastTimestamp time.Time + msMtx sync.Mutex // Protects msLast and msLastTimestamp. + msMetrics memStatsMetrics + msRead func(*runtime.MemStats) // For mocking in tests. + msMaxWait time.Duration // Wait time for fresh memstats. + msMaxAge time.Duration // Maximum allowed age of old memstats. +} + +// NewGoCollector is the obsolete version of collectors.NewGoCollector. +// See there for documentation. +// +// Deprecated: Use collectors.NewGoCollector instead. +func NewGoCollector() Collector { + return &goCollector{ + base: newBaseGoCollector(), + msLast: &runtime.MemStats{}, + msRead: runtime.ReadMemStats, + msMaxWait: time.Second, + msMaxAge: 5 * time.Minute, + msMetrics: goRuntimeMemStats(), + } +} + +// Describe returns all descriptions of the collector. +func (c *goCollector) Describe(ch chan<- *Desc) { + c.base.Describe(ch) + for _, i := range c.msMetrics { + ch <- i.desc + } +} + +// Collect returns the current state of all metrics of the collector. +func (c *goCollector) Collect(ch chan<- Metric) { + var ( + ms = &runtime.MemStats{} + done = make(chan struct{}) + ) + // Start reading memstats first as it might take a while. + go func() { + c.msRead(ms) + c.msMtx.Lock() + c.msLast = ms + c.msLastTimestamp = time.Now() + c.msMtx.Unlock() + close(done) + }() + + // Collect base non-memory metrics. + c.base.Collect(ch) + + timer := time.NewTimer(c.msMaxWait) + select { + case <-done: // Our own ReadMemStats succeeded in time. Use it. + timer.Stop() // Important for high collection frequencies to not pile up timers. + c.msCollect(ch, ms) + return + case <-timer.C: // Time out, use last memstats if possible. Continue below. + } + c.msMtx.Lock() + if time.Since(c.msLastTimestamp) < c.msMaxAge { + // Last memstats are recent enough. Collect from them under the lock. + c.msCollect(ch, c.msLast) + c.msMtx.Unlock() + return + } + // If we are here, the last memstats are too old or don't exist. We have + // to wait until our own ReadMemStats finally completes. For that to + // happen, we have to release the lock. + c.msMtx.Unlock() + <-done + c.msCollect(ch, ms) +} + +func (c *goCollector) msCollect(ch chan<- Metric, ms *runtime.MemStats) { + for _, i := range c.msMetrics { + ch <- MustNewConstMetric(i.desc, i.valType, i.eval(ms)) + } +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go117.go b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go117.go new file mode 100644 index 000000000..d43bdcdda --- /dev/null +++ b/vendor/github.com/prometheus/client_golang/prometheus/go_collector_go117.go @@ -0,0 +1,408 @@ +// Copyright 2021 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build go1.17 +// +build go1.17 + +package prometheus + +import ( + "math" + "runtime" + "runtime/metrics" + "strings" + "sync" + + //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. + "github.com/golang/protobuf/proto" + "github.com/prometheus/client_golang/prometheus/internal" + dto "github.com/prometheus/client_model/go" +) + +type goCollector struct { + base baseGoCollector + + // mu protects updates to all fields ensuring a consistent + // snapshot is always produced by Collect. + mu sync.Mutex + + // rm... fields all pertain to the runtime/metrics package. + rmSampleBuf []metrics.Sample + rmSampleMap map[string]*metrics.Sample + rmMetrics []collectorMetric + + // With Go 1.17, the runtime/metrics package was introduced. + // From that point on, metric names produced by the runtime/metrics + // package could be generated from runtime/metrics names. However, + // these differ from the old names for the same values. + // + // This field exist to export the same values under the old names + // as well. + msMetrics memStatsMetrics +} + +// NewGoCollector is the obsolete version of collectors.NewGoCollector. +// See there for documentation. +// +// Deprecated: Use collectors.NewGoCollector instead. +func NewGoCollector() Collector { + descriptions := metrics.All() + + // Collect all histogram samples so that we can get their buckets. + // The API guarantees that the buckets are always fixed for the lifetime + // of the process. + var histograms []metrics.Sample + for _, d := range descriptions { + if d.Kind == metrics.KindFloat64Histogram { + histograms = append(histograms, metrics.Sample{Name: d.Name}) + } + } + metrics.Read(histograms) + bucketsMap := make(map[string][]float64) + for i := range histograms { + bucketsMap[histograms[i].Name] = histograms[i].Value.Float64Histogram().Buckets + } + + // Generate a Desc and ValueType for each runtime/metrics metric. + metricSet := make([]collectorMetric, 0, len(descriptions)) + sampleBuf := make([]metrics.Sample, 0, len(descriptions)) + sampleMap := make(map[string]*metrics.Sample, len(descriptions)) + for i := range descriptions { + d := &descriptions[i] + namespace, subsystem, name, ok := internal.RuntimeMetricsToProm(d) + if !ok { + // Just ignore this metric; we can't do anything with it here. + // If a user decides to use the latest version of Go, we don't want + // to fail here. This condition is tested elsewhere. + continue + } + + // Set up sample buffer for reading, and a map + // for quick lookup of sample values. + sampleBuf = append(sampleBuf, metrics.Sample{Name: d.Name}) + sampleMap[d.Name] = &sampleBuf[len(sampleBuf)-1] + + var m collectorMetric + if d.Kind == metrics.KindFloat64Histogram { + _, hasSum := rmExactSumMap[d.Name] + unit := d.Name[strings.IndexRune(d.Name, ':')+1:] + m = newBatchHistogram( + NewDesc( + BuildFQName(namespace, subsystem, name), + d.Description, + nil, + nil, + ), + internal.RuntimeMetricsBucketsForUnit(bucketsMap[d.Name], unit), + hasSum, + ) + } else if d.Cumulative { + m = NewCounter(CounterOpts{ + Namespace: namespace, + Subsystem: subsystem, + Name: name, + Help: d.Description, + }) + } else { + m = NewGauge(GaugeOpts{ + Namespace: namespace, + Subsystem: subsystem, + Name: name, + Help: d.Description, + }) + } + metricSet = append(metricSet, m) + } + return &goCollector{ + base: newBaseGoCollector(), + rmSampleBuf: sampleBuf, + rmSampleMap: sampleMap, + rmMetrics: metricSet, + msMetrics: goRuntimeMemStats(), + } +} + +// Describe returns all descriptions of the collector. +func (c *goCollector) Describe(ch chan<- *Desc) { + c.base.Describe(ch) + for _, i := range c.msMetrics { + ch <- i.desc + } + for _, m := range c.rmMetrics { + ch <- m.Desc() + } +} + +// Collect returns the current state of all metrics of the collector. +func (c *goCollector) Collect(ch chan<- Metric) { + // Collect base non-memory metrics. + c.base.Collect(ch) + + // Collect must be thread-safe, so prevent concurrent use of + // rmSampleBuf. Just read into rmSampleBuf but write all the data + // we get into our Metrics or MemStats. + // + // This lock also ensures that the Metrics we send out are all from + // the same updates, ensuring their mutual consistency insofar as + // is guaranteed by the runtime/metrics package. + // + // N.B. This locking is heavy-handed, but Collect is expected to be called + // relatively infrequently. Also the core operation here, metrics.Read, + // is fast (O(tens of microseconds)) so contention should certainly be + // low, though channel operations and any allocations may add to that. + c.mu.Lock() + defer c.mu.Unlock() + + // Populate runtime/metrics sample buffer. + metrics.Read(c.rmSampleBuf) + + // Update all our metrics from rmSampleBuf. + for i, sample := range c.rmSampleBuf { + // N.B. switch on concrete type because it's significantly more efficient + // than checking for the Counter and Gauge interface implementations. In + // this case, we control all the types here. + switch m := c.rmMetrics[i].(type) { + case *counter: + // Guard against decreases. This should never happen, but a failure + // to do so will result in a panic, which is a harsh consequence for + // a metrics collection bug. + v0, v1 := m.get(), unwrapScalarRMValue(sample.Value) + if v1 > v0 { + m.Add(unwrapScalarRMValue(sample.Value) - m.get()) + } + m.Collect(ch) + case *gauge: + m.Set(unwrapScalarRMValue(sample.Value)) + m.Collect(ch) + case *batchHistogram: + m.update(sample.Value.Float64Histogram(), c.exactSumFor(sample.Name)) + m.Collect(ch) + default: + panic("unexpected metric type") + } + } + // ms is a dummy MemStats that we populate ourselves so that we can + // populate the old metrics from it. + var ms runtime.MemStats + memStatsFromRM(&ms, c.rmSampleMap) + for _, i := range c.msMetrics { + ch <- MustNewConstMetric(i.desc, i.valType, i.eval(&ms)) + } +} + +// unwrapScalarRMValue unwraps a runtime/metrics value that is assumed +// to be scalar and returns the equivalent float64 value. Panics if the +// value is not scalar. +func unwrapScalarRMValue(v metrics.Value) float64 { + switch v.Kind() { + case metrics.KindUint64: + return float64(v.Uint64()) + case metrics.KindFloat64: + return v.Float64() + case metrics.KindBad: + // Unsupported metric. + // + // This should never happen because we always populate our metric + // set from the runtime/metrics package. + panic("unexpected unsupported metric") + default: + // Unsupported metric kind. + // + // This should never happen because we check for this during initialization + // and flag and filter metrics whose kinds we don't understand. + panic("unexpected unsupported metric kind") + } +} + +var rmExactSumMap = map[string]string{ + "/gc/heap/allocs-by-size:bytes": "/gc/heap/allocs:bytes", + "/gc/heap/frees-by-size:bytes": "/gc/heap/frees:bytes", +} + +// exactSumFor takes a runtime/metrics metric name (that is assumed to +// be of kind KindFloat64Histogram) and returns its exact sum and whether +// its exact sum exists. +// +// The runtime/metrics API for histograms doesn't currently expose exact +// sums, but some of the other metrics are in fact exact sums of histograms. +func (c *goCollector) exactSumFor(rmName string) float64 { + sumName, ok := rmExactSumMap[rmName] + if !ok { + return 0 + } + s, ok := c.rmSampleMap[sumName] + if !ok { + return 0 + } + return unwrapScalarRMValue(s.Value) +} + +func memStatsFromRM(ms *runtime.MemStats, rm map[string]*metrics.Sample) { + lookupOrZero := func(name string) uint64 { + if s, ok := rm[name]; ok { + return s.Value.Uint64() + } + return 0 + } + + // Currently, MemStats adds tiny alloc count to both Mallocs AND Frees. + // The reason for this is because MemStats couldn't be extended at the time + // but there was a desire to have Mallocs at least be a little more representative, + // while having Mallocs - Frees still represent a live object count. + // Unfortunately, MemStats doesn't actually export a large allocation count, + // so it's impossible to pull this number out directly. + tinyAllocs := lookupOrZero("/gc/heap/tiny/allocs:objects") + ms.Mallocs = lookupOrZero("/gc/heap/allocs:objects") + tinyAllocs + ms.Frees = lookupOrZero("/gc/heap/frees:objects") + tinyAllocs + + ms.TotalAlloc = lookupOrZero("/gc/heap/allocs:bytes") + ms.Sys = lookupOrZero("/memory/classes/total:bytes") + ms.Lookups = 0 // Already always zero. + ms.HeapAlloc = lookupOrZero("/memory/classes/heap/objects:bytes") + ms.Alloc = ms.HeapAlloc + ms.HeapInuse = ms.HeapAlloc + lookupOrZero("/memory/classes/heap/unused:bytes") + ms.HeapReleased = lookupOrZero("/memory/classes/heap/released:bytes") + ms.HeapIdle = ms.HeapReleased + lookupOrZero("/memory/classes/heap/free:bytes") + ms.HeapSys = ms.HeapInuse + ms.HeapIdle + ms.HeapObjects = lookupOrZero("/gc/heap/objects:objects") + ms.StackInuse = lookupOrZero("/memory/classes/heap/stacks:bytes") + ms.StackSys = ms.StackInuse + lookupOrZero("/memory/classes/os-stacks:bytes") + ms.MSpanInuse = lookupOrZero("/memory/classes/metadata/mspan/inuse:bytes") + ms.MSpanSys = ms.MSpanInuse + lookupOrZero("/memory/classes/metadata/mspan/free:bytes") + ms.MCacheInuse = lookupOrZero("/memory/classes/metadata/mcache/inuse:bytes") + ms.MCacheSys = ms.MCacheInuse + lookupOrZero("/memory/classes/metadata/mcache/free:bytes") + ms.BuckHashSys = lookupOrZero("/memory/classes/profiling/buckets:bytes") + ms.GCSys = lookupOrZero("/memory/classes/metadata/other:bytes") + ms.OtherSys = lookupOrZero("/memory/classes/other:bytes") + ms.NextGC = lookupOrZero("/gc/heap/goal:bytes") + + // N.B. LastGC is omitted because runtime.GCStats already has this. + // See https://github.com/prometheus/client_golang/issues/842#issuecomment-861812034 + // for more details. + ms.LastGC = 0 + + // N.B. GCCPUFraction is intentionally omitted. This metric is not useful, + // and often misleading due to the fact that it's an average over the lifetime + // of the process. + // See https://github.com/prometheus/client_golang/issues/842#issuecomment-861812034 + // for more details. + ms.GCCPUFraction = 0 +} + +// batchHistogram is a mutable histogram that is updated +// in batches. +type batchHistogram struct { + selfCollector + + // Static fields updated only once. + desc *Desc + hasSum bool + + // Because this histogram operates in batches, it just uses a + // single mutex for everything. updates are always serialized + // but Write calls may operate concurrently with updates. + // Contention between these two sources should be rare. + mu sync.Mutex + buckets []float64 // Inclusive lower bounds, like runtime/metrics. + counts []uint64 + sum float64 // Used if hasSum is true. +} + +// newBatchHistogram creates a new batch histogram value with the given +// Desc, buckets, and whether or not it has an exact sum available. +// +// buckets must always be from the runtime/metrics package, following +// the same conventions. +func newBatchHistogram(desc *Desc, buckets []float64, hasSum bool) *batchHistogram { + h := &batchHistogram{ + desc: desc, + buckets: buckets, + // Because buckets follows runtime/metrics conventions, there's + // 1 more value in the buckets list than there are buckets represented, + // because in runtime/metrics, the bucket values represent *boundaries*, + // and non-Inf boundaries are inclusive lower bounds for that bucket. + counts: make([]uint64, len(buckets)-1), + hasSum: hasSum, + } + h.init(h) + return h +} + +// update updates the batchHistogram from a runtime/metrics histogram. +// +// sum must be provided if the batchHistogram was created to have an exact sum. +// h.buckets must be a strict subset of his.Buckets. +func (h *batchHistogram) update(his *metrics.Float64Histogram, sum float64) { + counts, buckets := his.Counts, his.Buckets + + h.mu.Lock() + defer h.mu.Unlock() + + // Clear buckets. + for i := range h.counts { + h.counts[i] = 0 + } + // Copy and reduce buckets. + var j int + for i, count := range counts { + h.counts[j] += count + if buckets[i+1] == h.buckets[j+1] { + j++ + } + } + if h.hasSum { + h.sum = sum + } +} + +func (h *batchHistogram) Desc() *Desc { + return h.desc +} + +func (h *batchHistogram) Write(out *dto.Metric) error { + h.mu.Lock() + defer h.mu.Unlock() + + sum := float64(0) + if h.hasSum { + sum = h.sum + } + dtoBuckets := make([]*dto.Bucket, 0, len(h.counts)) + totalCount := uint64(0) + for i, count := range h.counts { + totalCount += count + if !h.hasSum { + // N.B. This computed sum is an underestimate. + sum += h.buckets[i] * float64(count) + } + + // Skip the +Inf bucket, but only for the bucket list. + // It must still count for sum and totalCount. + if math.IsInf(h.buckets[i+1], 1) { + break + } + // Float64Histogram's upper bound is exclusive, so make it inclusive + // by obtaining the next float64 value down, in order. + upperBound := math.Nextafter(h.buckets[i+1], h.buckets[i]) + dtoBuckets = append(dtoBuckets, &dto.Bucket{ + CumulativeCount: proto.Uint64(totalCount), + UpperBound: proto.Float64(upperBound), + }) + } + out.Histogram = &dto.Histogram{ + Bucket: dtoBuckets, + SampleCount: proto.Uint64(totalCount), + SampleSum: proto.Float64(sum), + } + return nil +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/histogram.go b/vendor/github.com/prometheus/client_golang/prometheus/histogram.go index d4ea301a3..893802fd6 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/histogram.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/histogram.go @@ -22,7 +22,7 @@ import ( "sync/atomic" "time" - //lint:ignore SA1019 Need to keep deprecated package for compatibility. + //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/golang/protobuf/proto" dto "github.com/prometheus/client_model/go" @@ -47,7 +47,12 @@ type Histogram interface { Metric Collector - // Observe adds a single observation to the histogram. + // Observe adds a single observation to the histogram. Observations are + // usually positive or zero. Negative observations are accepted but + // prevent current versions of Prometheus from properly detecting + // counter resets in the sum of observations. See + // https://prometheus.io/docs/practices/histograms/#count-and-sum-of-observations + // for details. Observe(float64) } @@ -111,6 +116,34 @@ func ExponentialBuckets(start, factor float64, count int) []float64 { return buckets } +// ExponentialBucketsRange creates 'count' buckets, where the lowest bucket is +// 'min' and the highest bucket is 'max'. The final +Inf bucket is not counted +// and not included in the returned slice. The returned slice is meant to be +// used for the Buckets field of HistogramOpts. +// +// The function panics if 'count' is 0 or negative, if 'min' is 0 or negative. +func ExponentialBucketsRange(min, max float64, count int) []float64 { + if count < 1 { + panic("ExponentialBucketsRange count needs a positive count") + } + if min <= 0 { + panic("ExponentialBucketsRange min needs to be greater than 0") + } + + // Formula for exponential buckets. + // max = min*growthFactor^(bucketCount-1) + + // We know max/min and highest bucket. Solve for growthFactor. + growthFactor := math.Pow(max/min, 1.0/float64(count-1)) + + // Now that we know growthFactor, solve for each bucket. + buckets := make([]float64, count) + for i := 1; i <= count; i++ { + buckets[i-1] = min * math.Pow(growthFactor, float64(i-1)) + } + return buckets +} + // HistogramOpts bundles the options for creating a Histogram metric. It is // mandatory to set Name to a non-empty string. All other fields are optional // and can safely be left at their zero value, although it is strongly @@ -192,7 +225,7 @@ func newHistogram(desc *Desc, opts HistogramOpts, labelValues ...string) Histogr h := &histogram{ desc: desc, upperBounds: opts.Buckets, - labelPairs: makeLabelPairs(desc, labelValues), + labelPairs: MakeLabelPairs(desc, labelValues), counts: [2]*histogramCounts{{}, {}}, now: time.Now, } @@ -409,7 +442,7 @@ func (h *histogram) updateExemplar(v float64, bucket int, l Labels) { // (e.g. HTTP request latencies, partitioned by status code and method). Create // instances with NewHistogramVec. type HistogramVec struct { - *metricVec + *MetricVec } // NewHistogramVec creates a new HistogramVec based on the provided HistogramOpts and @@ -422,14 +455,14 @@ func NewHistogramVec(opts HistogramOpts, labelNames []string) *HistogramVec { opts.ConstLabels, ) return &HistogramVec{ - metricVec: newMetricVec(desc, func(lvs ...string) Metric { + MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { return newHistogram(desc, opts, lvs...) }), } } // GetMetricWithLabelValues returns the Histogram for the given slice of label -// values (same order as the VariableLabels in Desc). If that combination of +// values (same order as the variable labels in Desc). If that combination of // label values is accessed for the first time, a new Histogram is created. // // It is possible to call this method without using the returned Histogram to only @@ -444,7 +477,7 @@ func NewHistogramVec(opts HistogramOpts, labelNames []string) *HistogramVec { // example. // // An error is returned if the number of label values is not the same as the -// number of VariableLabels in Desc (minus any curried labels). +// number of variable labels in Desc (minus any curried labels). // // Note that for more than one label value, this method is prone to mistakes // caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as @@ -453,7 +486,7 @@ func NewHistogramVec(opts HistogramOpts, labelNames []string) *HistogramVec { // with a performance overhead (for creating and processing the Labels map). // See also the GaugeVec example. func (v *HistogramVec) GetMetricWithLabelValues(lvs ...string) (Observer, error) { - metric, err := v.metricVec.getMetricWithLabelValues(lvs...) + metric, err := v.MetricVec.GetMetricWithLabelValues(lvs...) if metric != nil { return metric.(Observer), err } @@ -461,19 +494,19 @@ func (v *HistogramVec) GetMetricWithLabelValues(lvs ...string) (Observer, error) } // GetMetricWith returns the Histogram for the given Labels map (the label names -// must match those of the VariableLabels in Desc). If that label map is +// must match those of the variable labels in Desc). If that label map is // accessed for the first time, a new Histogram is created. Implications of // creating a Histogram without using it and keeping the Histogram for later use // are the same as for GetMetricWithLabelValues. // // An error is returned if the number and names of the Labels are inconsistent -// with those of the VariableLabels in Desc (minus any curried labels). +// with those of the variable labels in Desc (minus any curried labels). // // This method is used for the same purpose as // GetMetricWithLabelValues(...string). See there for pros and cons of the two // methods. func (v *HistogramVec) GetMetricWith(labels Labels) (Observer, error) { - metric, err := v.metricVec.getMetricWith(labels) + metric, err := v.MetricVec.GetMetricWith(labels) if metric != nil { return metric.(Observer), err } @@ -517,7 +550,7 @@ func (v *HistogramVec) With(labels Labels) Observer { // registered with a given registry (usually the uncurried version). The Reset // method deletes all metrics, even if called on a curried vector. func (v *HistogramVec) CurryWith(labels Labels) (ObserverVec, error) { - vec, err := v.curryWith(labels) + vec, err := v.MetricVec.CurryWith(labels) if vec != nil { return &HistogramVec{vec}, err } @@ -602,7 +635,7 @@ func NewConstHistogram( count: count, sum: sum, buckets: buckets, - labelPairs: makeLabelPairs(desc, labelValues), + labelPairs: MakeLabelPairs(desc, labelValues), }, nil } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go b/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go new file mode 100644 index 000000000..fe0a52180 --- /dev/null +++ b/vendor/github.com/prometheus/client_golang/prometheus/internal/go_runtime_metrics.go @@ -0,0 +1,142 @@ +// Copyright 2021 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:build go1.17 +// +build go1.17 + +package internal + +import ( + "math" + "path" + "runtime/metrics" + "strings" + + "github.com/prometheus/common/model" +) + +// RuntimeMetricsToProm produces a Prometheus metric name from a runtime/metrics +// metric description and validates whether the metric is suitable for integration +// with Prometheus. +// +// Returns false if a name could not be produced, or if Prometheus does not understand +// the runtime/metrics Kind. +// +// Note that the main reason a name couldn't be produced is if the runtime/metrics +// package exports a name with characters outside the valid Prometheus metric name +// character set. This is theoretically possible, but should never happen in practice. +// Still, don't rely on it. +func RuntimeMetricsToProm(d *metrics.Description) (string, string, string, bool) { + namespace := "go" + + comp := strings.SplitN(d.Name, ":", 2) + key := comp[0] + unit := comp[1] + + // The last path element in the key is the name, + // the rest is the subsystem. + subsystem := path.Dir(key[1:] /* remove leading / */) + name := path.Base(key) + + // subsystem is translated by replacing all / and - with _. + subsystem = strings.ReplaceAll(subsystem, "/", "_") + subsystem = strings.ReplaceAll(subsystem, "-", "_") + + // unit is translated assuming that the unit contains no + // non-ASCII characters. + unit = strings.ReplaceAll(unit, "-", "_") + unit = strings.ReplaceAll(unit, "*", "_") + unit = strings.ReplaceAll(unit, "/", "_per_") + + // name has - replaced with _ and is concatenated with the unit and + // other data. + name = strings.ReplaceAll(name, "-", "_") + name = name + "_" + unit + if d.Cumulative { + name = name + "_total" + } + + valid := model.IsValidMetricName(model.LabelValue(namespace + "_" + subsystem + "_" + name)) + switch d.Kind { + case metrics.KindUint64: + case metrics.KindFloat64: + case metrics.KindFloat64Histogram: + default: + valid = false + } + return namespace, subsystem, name, valid +} + +// RuntimeMetricsBucketsForUnit takes a set of buckets obtained for a runtime/metrics histogram +// type (so, lower-bound inclusive) and a unit from a runtime/metrics name, and produces +// a reduced set of buckets. This function always removes any -Inf bucket as it's represented +// as the bottom-most upper-bound inclusive bucket in Prometheus. +func RuntimeMetricsBucketsForUnit(buckets []float64, unit string) []float64 { + switch unit { + case "bytes": + // Rebucket as powers of 2. + return rebucketExp(buckets, 2) + case "seconds": + // Rebucket as powers of 10 and then merge all buckets greater + // than 1 second into the +Inf bucket. + b := rebucketExp(buckets, 10) + for i := range b { + if b[i] <= 1 { + continue + } + b[i] = math.Inf(1) + b = b[:i+1] + break + } + return b + } + return buckets +} + +// rebucketExp takes a list of bucket boundaries (lower bound inclusive) and +// downsamples the buckets to those a multiple of base apart. The end result +// is a roughly exponential (in many cases, perfectly exponential) bucketing +// scheme. +func rebucketExp(buckets []float64, base float64) []float64 { + bucket := buckets[0] + var newBuckets []float64 + // We may see a -Inf here, in which case, add it and skip it + // since we risk producing NaNs otherwise. + // + // We need to preserve -Inf values to maintain runtime/metrics + // conventions. We'll strip it out later. + if bucket == math.Inf(-1) { + newBuckets = append(newBuckets, bucket) + buckets = buckets[1:] + bucket = buckets[0] + } + // From now on, bucket should always have a non-Inf value because + // Infs are only ever at the ends of the bucket lists, so + // arithmetic operations on it are non-NaN. + for i := 1; i < len(buckets); i++ { + if bucket >= 0 && buckets[i] < bucket*base { + // The next bucket we want to include is at least bucket*base. + continue + } else if bucket < 0 && buckets[i] < bucket/base { + // In this case the bucket we're targeting is negative, and since + // we're ascending through buckets here, we need to divide to get + // closer to zero exponentially. + continue + } + // The +Inf bucket will always be the last one, and we'll always + // end up including it here because bucket + newBuckets = append(newBuckets, bucket) + bucket = buckets[i] + } + return append(newBuckets, bucket) +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/metric.go b/vendor/github.com/prometheus/client_golang/prometheus/metric.go index 35bd8bde3..dc121910a 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/metric.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/metric.go @@ -17,7 +17,7 @@ import ( "strings" "time" - //lint:ignore SA1019 Need to keep deprecated package for compatibility. + //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/golang/protobuf/proto" "github.com/prometheus/common/model" @@ -58,7 +58,7 @@ type Metric interface { } // Opts bundles the options for creating most Metric types. Each metric -// implementation XXX has its own XXXOpts type, but in most cases, it is just be +// implementation XXX has its own XXXOpts type, but in most cases, it is just // an alias of this type (which might change when the requirement arises.) // // It is mandatory to set Name to a non-empty string. All other fields are @@ -89,7 +89,7 @@ type Opts struct { // better covered by target labels set by the scraping Prometheus // server, or by one specific metric (e.g. a build_info or a // machine_role metric). See also - // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels,-not-static-scraped-labels + // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels ConstLabels Labels } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector.go index 9b8097942..5bfe0ff5b 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/process_collector.go @@ -15,7 +15,11 @@ package prometheus import ( "errors" + "fmt" + "io/ioutil" "os" + "strconv" + "strings" ) type processCollector struct { @@ -50,16 +54,10 @@ type ProcessCollectorOpts struct { ReportErrors bool } -// NewProcessCollector returns a collector which exports the current state of -// process metrics including CPU, memory and file descriptor usage as well as -// the process start time. The detailed behavior is defined by the provided -// ProcessCollectorOpts. The zero value of ProcessCollectorOpts creates a -// collector for the current process with an empty namespace string and no error -// reporting. +// NewProcessCollector is the obsolete version of collectors.NewProcessCollector. +// See there for documentation. // -// The collector only works on operating systems with a Linux-style proc -// filesystem and on Microsoft Windows. On other operating systems, it will not -// collect any metrics. +// Deprecated: Use collectors.NewProcessCollector instead. func NewProcessCollector(opts ProcessCollectorOpts) Collector { ns := "" if len(opts.Namespace) > 0 { @@ -149,3 +147,20 @@ func (c *processCollector) reportError(ch chan<- Metric, desc *Desc, err error) } ch <- NewInvalidMetric(desc, err) } + +// NewPidFileFn returns a function that retrieves a pid from the specified file. +// It is meant to be used for the PidFn field in ProcessCollectorOpts. +func NewPidFileFn(pidFilePath string) func() (int, error) { + return func() (int, error) { + content, err := ioutil.ReadFile(pidFilePath) + if err != nil { + return 0, fmt.Errorf("can't read pid file %q: %+v", pidFilePath, err) + } + pid, err := strconv.Atoi(strings.TrimSpace(string(content))) + if err != nil { + return 0, fmt.Errorf("can't parse pid file %q: %+v", pidFilePath, err) + } + + return pid, nil + } +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_other.go b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_other.go index 3117461cd..2dc3660da 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/process_collector_other.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/process_collector_other.go @@ -11,6 +11,7 @@ // See the License for the specific language governing permissions and // limitations under the License. +//go:build !windows // +build !windows package prometheus diff --git a/vendor/github.com/prometheus/client_golang/prometheus/registry.go b/vendor/github.com/prometheus/client_golang/prometheus/registry.go index ba94405af..383a7f594 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/registry.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/registry.go @@ -26,7 +26,7 @@ import ( "unicode/utf8" "github.com/cespare/xxhash/v2" - //lint:ignore SA1019 Need to keep deprecated package for compatibility. + //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/golang/protobuf/proto" "github.com/prometheus/common/expfmt" @@ -215,6 +215,8 @@ func (err AlreadyRegisteredError) Error() string { // by a Gatherer to report multiple errors during MetricFamily gathering. type MultiError []error +// Error formats the contained errors as a bullet point list, preceded by the +// total number of errors. Note that this results in a multi-line string. func (errs MultiError) Error() string { if len(errs) == 0 { return "" diff --git a/vendor/github.com/prometheus/client_golang/prometheus/summary.go b/vendor/github.com/prometheus/client_golang/prometheus/summary.go index f3c1440d1..c5fa8ed7c 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/summary.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/summary.go @@ -23,7 +23,7 @@ import ( "time" "github.com/beorn7/perks/quantile" - //lint:ignore SA1019 Need to keep deprecated package for compatibility. + //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/golang/protobuf/proto" dto "github.com/prometheus/client_model/go" @@ -55,7 +55,12 @@ type Summary interface { Metric Collector - // Observe adds a single observation to the summary. + // Observe adds a single observation to the summary. Observations are + // usually positive or zero. Negative observations are accepted but + // prevent current versions of Prometheus from properly detecting + // counter resets in the sum of observations. See + // https://prometheus.io/docs/practices/histograms/#count-and-sum-of-observations + // for details. Observe(float64) } @@ -110,7 +115,7 @@ type SummaryOpts struct { // better covered by target labels set by the scraping Prometheus // server, or by one specific metric (e.g. a build_info or a // machine_role metric). See also - // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels,-not-static-scraped-labels + // https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels ConstLabels Labels // Objectives defines the quantile rank estimates with their respective @@ -121,7 +126,9 @@ type SummaryOpts struct { Objectives map[float64]float64 // MaxAge defines the duration for which an observation stays relevant - // for the summary. Must be positive. The default value is DefMaxAge. + // for the summary. Only applies to pre-calculated quantiles, does not + // apply to _sum and _count. Must be positive. The default value is + // DefMaxAge. MaxAge time.Duration // AgeBuckets is the number of buckets used to exclude observations that @@ -208,7 +215,7 @@ func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { // Use the lock-free implementation of a Summary without objectives. s := &noObjectivesSummary{ desc: desc, - labelPairs: makeLabelPairs(desc, labelValues), + labelPairs: MakeLabelPairs(desc, labelValues), counts: [2]*summaryCounts{{}, {}}, } s.init(s) // Init self-collection. @@ -221,7 +228,7 @@ func newSummary(desc *Desc, opts SummaryOpts, labelValues ...string) Summary { objectives: opts.Objectives, sortedObjectives: make([]float64, 0, len(opts.Objectives)), - labelPairs: makeLabelPairs(desc, labelValues), + labelPairs: MakeLabelPairs(desc, labelValues), hotBuf: make([]float64, 0, opts.BufCap), coldBuf: make([]float64, 0, opts.BufCap), @@ -513,7 +520,7 @@ func (s quantSort) Less(i, j int) bool { // (e.g. HTTP request latencies, partitioned by status code and method). Create // instances with NewSummaryVec. type SummaryVec struct { - *metricVec + *MetricVec } // NewSummaryVec creates a new SummaryVec based on the provided SummaryOpts and @@ -535,14 +542,14 @@ func NewSummaryVec(opts SummaryOpts, labelNames []string) *SummaryVec { opts.ConstLabels, ) return &SummaryVec{ - metricVec: newMetricVec(desc, func(lvs ...string) Metric { + MetricVec: NewMetricVec(desc, func(lvs ...string) Metric { return newSummary(desc, opts, lvs...) }), } } // GetMetricWithLabelValues returns the Summary for the given slice of label -// values (same order as the VariableLabels in Desc). If that combination of +// values (same order as the variable labels in Desc). If that combination of // label values is accessed for the first time, a new Summary is created. // // It is possible to call this method without using the returned Summary to only @@ -557,7 +564,7 @@ func NewSummaryVec(opts SummaryOpts, labelNames []string) *SummaryVec { // example. // // An error is returned if the number of label values is not the same as the -// number of VariableLabels in Desc (minus any curried labels). +// number of variable labels in Desc (minus any curried labels). // // Note that for more than one label value, this method is prone to mistakes // caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as @@ -566,7 +573,7 @@ func NewSummaryVec(opts SummaryOpts, labelNames []string) *SummaryVec { // with a performance overhead (for creating and processing the Labels map). // See also the GaugeVec example. func (v *SummaryVec) GetMetricWithLabelValues(lvs ...string) (Observer, error) { - metric, err := v.metricVec.getMetricWithLabelValues(lvs...) + metric, err := v.MetricVec.GetMetricWithLabelValues(lvs...) if metric != nil { return metric.(Observer), err } @@ -574,19 +581,19 @@ func (v *SummaryVec) GetMetricWithLabelValues(lvs ...string) (Observer, error) { } // GetMetricWith returns the Summary for the given Labels map (the label names -// must match those of the VariableLabels in Desc). If that label map is +// must match those of the variable labels in Desc). If that label map is // accessed for the first time, a new Summary is created. Implications of // creating a Summary without using it and keeping the Summary for later use are // the same as for GetMetricWithLabelValues. // // An error is returned if the number and names of the Labels are inconsistent -// with those of the VariableLabels in Desc (minus any curried labels). +// with those of the variable labels in Desc (minus any curried labels). // // This method is used for the same purpose as // GetMetricWithLabelValues(...string). See there for pros and cons of the two // methods. func (v *SummaryVec) GetMetricWith(labels Labels) (Observer, error) { - metric, err := v.metricVec.getMetricWith(labels) + metric, err := v.MetricVec.GetMetricWith(labels) if metric != nil { return metric.(Observer), err } @@ -630,7 +637,7 @@ func (v *SummaryVec) With(labels Labels) Observer { // registered with a given registry (usually the uncurried version). The Reset // method deletes all metrics, even if called on a curried vector. func (v *SummaryVec) CurryWith(labels Labels) (ObserverVec, error) { - vec, err := v.curryWith(labels) + vec, err := v.MetricVec.CurryWith(labels) if vec != nil { return &SummaryVec{vec}, err } @@ -716,7 +723,7 @@ func NewConstSummary( count: count, sum: sum, quantiles: quantiles, - labelPairs: makeLabelPairs(desc, labelValues), + labelPairs: MakeLabelPairs(desc, labelValues), }, nil } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/value.go b/vendor/github.com/prometheus/client_golang/prometheus/value.go index 6206928cc..b4e0ae11c 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/value.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/value.go @@ -19,9 +19,9 @@ import ( "time" "unicode/utf8" - //lint:ignore SA1019 Need to keep deprecated package for compatibility. + //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/golang/protobuf/proto" - "github.com/golang/protobuf/ptypes" + "google.golang.org/protobuf/types/known/timestamppb" dto "github.com/prometheus/client_model/go" ) @@ -63,7 +63,7 @@ func newValueFunc(desc *Desc, valueType ValueType, function func() float64) *val desc: desc, valType: valueType, function: function, - labelPairs: makeLabelPairs(desc, nil), + labelPairs: MakeLabelPairs(desc, nil), } result.init(result) return result @@ -95,7 +95,7 @@ func NewConstMetric(desc *Desc, valueType ValueType, value float64, labelValues desc: desc, valType: valueType, val: value, - labelPairs: makeLabelPairs(desc, labelValues), + labelPairs: MakeLabelPairs(desc, labelValues), }, nil } @@ -145,7 +145,14 @@ func populateMetric( return nil } -func makeLabelPairs(desc *Desc, labelValues []string) []*dto.LabelPair { +// MakeLabelPairs is a helper function to create protobuf LabelPairs from the +// variable and constant labels in the provided Desc. The values for the +// variable labels are defined by the labelValues slice, which must be in the +// same order as the corresponding variable labels in the Desc. +// +// This function is only needed for custom Metric implementations. See MetricVec +// example. +func MakeLabelPairs(desc *Desc, labelValues []string) []*dto.LabelPair { totalLen := len(desc.variableLabels) + len(desc.constLabelPairs) if totalLen == 0 { // Super fast path. @@ -176,8 +183,8 @@ const ExemplarMaxRunes = 64 func newExemplar(value float64, ts time.Time, l Labels) (*dto.Exemplar, error) { e := &dto.Exemplar{} e.Value = proto.Float64(value) - tsProto, err := ptypes.TimestampProto(ts) - if err != nil { + tsProto := timestamppb.New(ts) + if err := tsProto.CheckValid(); err != nil { return nil, err } e.Timestamp = tsProto diff --git a/vendor/github.com/prometheus/client_golang/prometheus/vec.go b/vendor/github.com/prometheus/client_golang/prometheus/vec.go index d53848dc4..4ababe6c9 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/vec.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/vec.go @@ -20,12 +20,20 @@ import ( "github.com/prometheus/common/model" ) -// metricVec is a Collector to bundle metrics of the same name that differ in -// their label values. metricVec is not used directly (and therefore -// unexported). It is used as a building block for implementations of vectors of -// a given metric type, like GaugeVec, CounterVec, SummaryVec, and HistogramVec. -// It also handles label currying. -type metricVec struct { +// MetricVec is a Collector to bundle metrics of the same name that differ in +// their label values. MetricVec is not used directly but as a building block +// for implementations of vectors of a given metric type, like GaugeVec, +// CounterVec, SummaryVec, and HistogramVec. It is exported so that it can be +// used for custom Metric implementations. +// +// To create a FooVec for custom Metric Foo, embed a pointer to MetricVec in +// FooVec and initialize it with NewMetricVec. Implement wrappers for +// GetMetricWithLabelValues and GetMetricWith that return (Foo, error) rather +// than (Metric, error). Similarly, create a wrapper for CurryWith that returns +// (*FooVec, error) rather than (*MetricVec, error). It is recommended to also +// add the convenience methods WithLabelValues, With, and MustCurryWith, which +// panic instead of returning errors. See also the MetricVec example. +type MetricVec struct { *metricMap curry []curriedLabelValue @@ -35,9 +43,9 @@ type metricVec struct { hashAddByte func(h uint64, b byte) uint64 } -// newMetricVec returns an initialized metricVec. -func newMetricVec(desc *Desc, newMetric func(lvs ...string) Metric) *metricVec { - return &metricVec{ +// NewMetricVec returns an initialized metricVec. +func NewMetricVec(desc *Desc, newMetric func(lvs ...string) Metric) *MetricVec { + return &MetricVec{ metricMap: &metricMap{ metrics: map[uint64][]metricWithLabelValues{}, desc: desc, @@ -63,7 +71,7 @@ func newMetricVec(desc *Desc, newMetric func(lvs ...string) Metric) *metricVec { // latter has a much more readable (albeit more verbose) syntax, but it comes // with a performance overhead (for creating and processing the Labels map). // See also the CounterVec example. -func (m *metricVec) DeleteLabelValues(lvs ...string) bool { +func (m *MetricVec) DeleteLabelValues(lvs ...string) bool { h, err := m.hashLabelValues(lvs) if err != nil { return false @@ -82,7 +90,7 @@ func (m *metricVec) DeleteLabelValues(lvs ...string) bool { // // This method is used for the same purpose as DeleteLabelValues(...string). See // there for pros and cons of the two methods. -func (m *metricVec) Delete(labels Labels) bool { +func (m *MetricVec) Delete(labels Labels) bool { h, err := m.hashLabels(labels) if err != nil { return false @@ -95,15 +103,32 @@ func (m *metricVec) Delete(labels Labels) bool { // show up in GoDoc. // Describe implements Collector. -func (m *metricVec) Describe(ch chan<- *Desc) { m.metricMap.Describe(ch) } +func (m *MetricVec) Describe(ch chan<- *Desc) { m.metricMap.Describe(ch) } // Collect implements Collector. -func (m *metricVec) Collect(ch chan<- Metric) { m.metricMap.Collect(ch) } +func (m *MetricVec) Collect(ch chan<- Metric) { m.metricMap.Collect(ch) } // Reset deletes all metrics in this vector. -func (m *metricVec) Reset() { m.metricMap.Reset() } - -func (m *metricVec) curryWith(labels Labels) (*metricVec, error) { +func (m *MetricVec) Reset() { m.metricMap.Reset() } + +// CurryWith returns a vector curried with the provided labels, i.e. the +// returned vector has those labels pre-set for all labeled operations performed +// on it. The cardinality of the curried vector is reduced accordingly. The +// order of the remaining labels stays the same (just with the curried labels +// taken out of the sequence – which is relevant for the +// (GetMetric)WithLabelValues methods). It is possible to curry a curried +// vector, but only with labels not yet used for currying before. +// +// The metrics contained in the MetricVec are shared between the curried and +// uncurried vectors. They are just accessed differently. Curried and uncurried +// vectors behave identically in terms of collection. Only one must be +// registered with a given registry (usually the uncurried version). The Reset +// method deletes all metrics, even if called on a curried vector. +// +// Note that CurryWith is usually not called directly but through a wrapper +// around MetricVec, implementing a vector for a specific Metric +// implementation, for example GaugeVec. +func (m *MetricVec) CurryWith(labels Labels) (*MetricVec, error) { var ( newCurry []curriedLabelValue oldCurry = m.curry @@ -128,7 +153,7 @@ func (m *metricVec) curryWith(labels Labels) (*metricVec, error) { return nil, fmt.Errorf("%d unknown label(s) found during currying", l) } - return &metricVec{ + return &MetricVec{ metricMap: m.metricMap, curry: newCurry, hashAdd: m.hashAdd, @@ -136,7 +161,34 @@ func (m *metricVec) curryWith(labels Labels) (*metricVec, error) { }, nil } -func (m *metricVec) getMetricWithLabelValues(lvs ...string) (Metric, error) { +// GetMetricWithLabelValues returns the Metric for the given slice of label +// values (same order as the variable labels in Desc). If that combination of +// label values is accessed for the first time, a new Metric is created (by +// calling the newMetric function provided during construction of the +// MetricVec). +// +// It is possible to call this method without using the returned Metric to only +// create the new Metric but leave it in its initial state. +// +// Keeping the Metric for later use is possible (and should be considered if +// performance is critical), but keep in mind that Reset, DeleteLabelValues and +// Delete can be used to delete the Metric from the MetricVec. In that case, the +// Metric will still exist, but it will not be exported anymore, even if a +// Metric with the same label values is created later. +// +// An error is returned if the number of label values is not the same as the +// number of variable labels in Desc (minus any curried labels). +// +// Note that for more than one label value, this method is prone to mistakes +// caused by an incorrect order of arguments. Consider GetMetricWith(Labels) as +// an alternative to avoid that type of mistake. For higher label numbers, the +// latter has a much more readable (albeit more verbose) syntax, but it comes +// with a performance overhead (for creating and processing the Labels map). +// +// Note that GetMetricWithLabelValues is usually not called directly but through +// a wrapper around MetricVec, implementing a vector for a specific Metric +// implementation, for example GaugeVec. +func (m *MetricVec) GetMetricWithLabelValues(lvs ...string) (Metric, error) { h, err := m.hashLabelValues(lvs) if err != nil { return nil, err @@ -145,7 +197,23 @@ func (m *metricVec) getMetricWithLabelValues(lvs ...string) (Metric, error) { return m.metricMap.getOrCreateMetricWithLabelValues(h, lvs, m.curry), nil } -func (m *metricVec) getMetricWith(labels Labels) (Metric, error) { +// GetMetricWith returns the Metric for the given Labels map (the label names +// must match those of the variable labels in Desc). If that label map is +// accessed for the first time, a new Metric is created. Implications of +// creating a Metric without using it and keeping the Metric for later use +// are the same as for GetMetricWithLabelValues. +// +// An error is returned if the number and names of the Labels are inconsistent +// with those of the variable labels in Desc (minus any curried labels). +// +// This method is used for the same purpose as +// GetMetricWithLabelValues(...string). See there for pros and cons of the two +// methods. +// +// Note that GetMetricWith is usually not called directly but through a wrapper +// around MetricVec, implementing a vector for a specific Metric implementation, +// for example GaugeVec. +func (m *MetricVec) GetMetricWith(labels Labels) (Metric, error) { h, err := m.hashLabels(labels) if err != nil { return nil, err @@ -154,7 +222,7 @@ func (m *metricVec) getMetricWith(labels Labels) (Metric, error) { return m.metricMap.getOrCreateMetricWithLabels(h, labels, m.curry), nil } -func (m *metricVec) hashLabelValues(vals []string) (uint64, error) { +func (m *MetricVec) hashLabelValues(vals []string) (uint64, error) { if err := validateLabelValues(vals, len(m.desc.variableLabels)-len(m.curry)); err != nil { return 0, err } @@ -177,7 +245,7 @@ func (m *metricVec) hashLabelValues(vals []string) (uint64, error) { return h, nil } -func (m *metricVec) hashLabels(labels Labels) (uint64, error) { +func (m *MetricVec) hashLabels(labels Labels) (uint64, error) { if err := validateValuesInLabels(labels, len(m.desc.variableLabels)-len(m.curry)); err != nil { return 0, err } @@ -276,7 +344,9 @@ func (m *metricMap) deleteByHashWithLabelValues( } if len(metrics) > 1 { + old := metrics m.metrics[h] = append(metrics[:i], metrics[i+1:]...) + old[len(old)-1] = metricWithLabelValues{} } else { delete(m.metrics, h) } @@ -302,7 +372,9 @@ func (m *metricMap) deleteByHashWithLabels( } if len(metrics) > 1 { + old := metrics m.metrics[h] = append(metrics[:i], metrics[i+1:]...) + old[len(old)-1] = metricWithLabelValues{} } else { delete(m.metrics, h) } diff --git a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go index 438aa5e92..74ee93280 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/wrap.go +++ b/vendor/github.com/prometheus/client_golang/prometheus/wrap.go @@ -17,7 +17,7 @@ import ( "fmt" "sort" - //lint:ignore SA1019 Need to keep deprecated package for compatibility. + //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/golang/protobuf/proto" dto "github.com/prometheus/client_model/go" @@ -32,7 +32,9 @@ import ( // in a no-op Registerer. // // WrapRegistererWith provides a way to add fixed labels to a subset of -// Collectors. It should not be used to add fixed labels to all metrics exposed. +// Collectors. It should not be used to add fixed labels to all metrics +// exposed. See also +// https://prometheus.io/docs/instrumenting/writing_exporters/#target-labels-not-static-scraped-labels // // Conflicts between Collectors registered through the original Registerer with // Collectors registered through the wrapping Registerer will still be diff --git a/vendor/github.com/prometheus/common/expfmt/decode.go b/vendor/github.com/prometheus/common/expfmt/decode.go index c092723e8..7657f841d 100644 --- a/vendor/github.com/prometheus/common/expfmt/decode.go +++ b/vendor/github.com/prometheus/common/expfmt/decode.go @@ -164,7 +164,7 @@ func (sd *SampleDecoder) Decode(s *model.Vector) error { } // ExtractSamples builds a slice of samples from the provided metric -// families. If an error occurrs during sample extraction, it continues to +// families. If an error occurs during sample extraction, it continues to // extract from the remaining metric families. The returned error is the last // error that has occurred. func ExtractSamples(o *DecodeOptions, fams ...*dto.MetricFamily) (model.Vector, error) { diff --git a/vendor/github.com/prometheus/common/expfmt/encode.go b/vendor/github.com/prometheus/common/expfmt/encode.go index bd4e34745..64dc0eb40 100644 --- a/vendor/github.com/prometheus/common/expfmt/encode.go +++ b/vendor/github.com/prometheus/common/expfmt/encode.go @@ -18,7 +18,7 @@ import ( "io" "net/http" - "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/proto" //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/matttproud/golang_protobuf_extensions/pbutil" "github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg" diff --git a/vendor/github.com/prometheus/common/expfmt/text_parse.go b/vendor/github.com/prometheus/common/expfmt/text_parse.go index 342e5940d..84be0643e 100644 --- a/vendor/github.com/prometheus/common/expfmt/text_parse.go +++ b/vendor/github.com/prometheus/common/expfmt/text_parse.go @@ -24,7 +24,7 @@ import ( dto "github.com/prometheus/client_model/go" - "github.com/golang/protobuf/proto" + "github.com/golang/protobuf/proto" //nolint:staticcheck // Ignore SA1019. Need to keep deprecated package for compatibility. "github.com/prometheus/common/model" ) @@ -299,6 +299,17 @@ func (p *TextParser) startLabelName() stateFn { p.parseError(fmt.Sprintf("expected '=' after label name, found %q", p.currentByte)) return nil } + // Check for duplicate label names. + labels := make(map[string]struct{}) + for _, l := range p.currentMetric.Label { + lName := l.GetName() + if _, exists := labels[lName]; !exists { + labels[lName] = struct{}{} + } else { + p.parseError(fmt.Sprintf("duplicate label names for metric %q", p.currentMF.GetName())) + return nil + } + } return p.startLabelValue } diff --git a/vendor/github.com/prometheus/common/model/fnv.go b/vendor/github.com/prometheus/common/model/fnv.go index 038fc1c90..367afecd3 100644 --- a/vendor/github.com/prometheus/common/model/fnv.go +++ b/vendor/github.com/prometheus/common/model/fnv.go @@ -20,7 +20,7 @@ const ( prime64 = 1099511628211 ) -// hashNew initializies a new fnv64a hash value. +// hashNew initializes a new fnv64a hash value. func hashNew() uint64 { return offset64 } diff --git a/vendor/github.com/prometheus/common/model/labels.go b/vendor/github.com/prometheus/common/model/labels.go index 41051a01a..ef8956335 100644 --- a/vendor/github.com/prometheus/common/model/labels.go +++ b/vendor/github.com/prometheus/common/model/labels.go @@ -45,6 +45,14 @@ const ( // scrape a target. MetricsPathLabel = "__metrics_path__" + // ScrapeIntervalLabel is the name of the label that holds the scrape interval + // used to scrape a target. + ScrapeIntervalLabel = "__scrape_interval__" + + // ScrapeTimeoutLabel is the name of the label that holds the scrape + // timeout used to scrape a target. + ScrapeTimeoutLabel = "__scrape_timeout__" + // ReservedLabelPrefix is a prefix which is not legal in user-supplied // label names. ReservedLabelPrefix = "__" diff --git a/vendor/github.com/prometheus/common/model/time.go b/vendor/github.com/prometheus/common/model/time.go index 490a0240c..7f67b16e4 100644 --- a/vendor/github.com/prometheus/common/model/time.go +++ b/vendor/github.com/prometheus/common/model/time.go @@ -14,6 +14,8 @@ package model import ( + "encoding/json" + "errors" "fmt" "math" "regexp" @@ -181,77 +183,118 @@ func (d *Duration) Type() string { return "duration" } -var durationRE = regexp.MustCompile("^([0-9]+)(y|w|d|h|m|s|ms)$") +var durationRE = regexp.MustCompile("^(([0-9]+)y)?(([0-9]+)w)?(([0-9]+)d)?(([0-9]+)h)?(([0-9]+)m)?(([0-9]+)s)?(([0-9]+)ms)?$") // ParseDuration parses a string into a time.Duration, assuming that a year // always has 365d, a week always has 7d, and a day always has 24h. func ParseDuration(durationStr string) (Duration, error) { - // Allow 0 without a unit. - if durationStr == "0" { + switch durationStr { + case "0": + // Allow 0 without a unit. return 0, nil + case "": + return 0, fmt.Errorf("empty duration string") } matches := durationRE.FindStringSubmatch(durationStr) - if len(matches) != 3 { + if matches == nil { return 0, fmt.Errorf("not a valid duration string: %q", durationStr) } - var ( - n, _ = strconv.Atoi(matches[1]) - dur = time.Duration(n) * time.Millisecond - ) - switch unit := matches[2]; unit { - case "y": - dur *= 1000 * 60 * 60 * 24 * 365 - case "w": - dur *= 1000 * 60 * 60 * 24 * 7 - case "d": - dur *= 1000 * 60 * 60 * 24 - case "h": - dur *= 1000 * 60 * 60 - case "m": - dur *= 1000 * 60 - case "s": - dur *= 1000 - case "ms": - // Value already correct - default: - return 0, fmt.Errorf("invalid time unit in duration string: %q", unit) + var dur time.Duration + + // Parse the match at pos `pos` in the regex and use `mult` to turn that + // into ms, then add that value to the total parsed duration. + var overflowErr error + m := func(pos int, mult time.Duration) { + if matches[pos] == "" { + return + } + n, _ := strconv.Atoi(matches[pos]) + + // Check if the provided duration overflows time.Duration (> ~ 290years). + if n > int((1<<63-1)/mult/time.Millisecond) { + overflowErr = errors.New("duration out of range") + } + d := time.Duration(n) * time.Millisecond + dur += d * mult + + if dur < 0 { + overflowErr = errors.New("duration out of range") + } } - return Duration(dur), nil + + m(2, 1000*60*60*24*365) // y + m(4, 1000*60*60*24*7) // w + m(6, 1000*60*60*24) // d + m(8, 1000*60*60) // h + m(10, 1000*60) // m + m(12, 1000) // s + m(14, 1) // ms + + return Duration(dur), overflowErr } func (d Duration) String() string { var ( - ms = int64(time.Duration(d) / time.Millisecond) - unit = "ms" + ms = int64(time.Duration(d) / time.Millisecond) + r = "" ) if ms == 0 { return "0s" } - factors := map[string]int64{ - "y": 1000 * 60 * 60 * 24 * 365, - "w": 1000 * 60 * 60 * 24 * 7, - "d": 1000 * 60 * 60 * 24, - "h": 1000 * 60 * 60, - "m": 1000 * 60, - "s": 1000, - "ms": 1, + + f := func(unit string, mult int64, exact bool) { + if exact && ms%mult != 0 { + return + } + if v := ms / mult; v > 0 { + r += fmt.Sprintf("%d%s", v, unit) + ms -= v * mult + } } - switch int64(0) { - case ms % factors["y"]: - unit = "y" - case ms % factors["w"]: - unit = "w" - case ms % factors["d"]: - unit = "d" - case ms % factors["h"]: - unit = "h" - case ms % factors["m"]: - unit = "m" - case ms % factors["s"]: - unit = "s" + // Only format years and weeks if the remainder is zero, as it is often + // easier to read 90d than 12w6d. + f("y", 1000*60*60*24*365, true) + f("w", 1000*60*60*24*7, true) + + f("d", 1000*60*60*24, false) + f("h", 1000*60*60, false) + f("m", 1000*60, false) + f("s", 1000, false) + f("ms", 1, false) + + return r +} + +// MarshalJSON implements the json.Marshaler interface. +func (d Duration) MarshalJSON() ([]byte, error) { + return json.Marshal(d.String()) +} + +// UnmarshalJSON implements the json.Unmarshaler interface. +func (d *Duration) UnmarshalJSON(bytes []byte) error { + var s string + if err := json.Unmarshal(bytes, &s); err != nil { + return err + } + dur, err := ParseDuration(s) + if err != nil { + return err } - return fmt.Sprintf("%v%v", ms/factors[unit], unit) + *d = dur + return nil +} + +// MarshalText implements the encoding.TextMarshaler interface. +func (d *Duration) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText implements the encoding.TextUnmarshaler interface. +func (d *Duration) UnmarshalText(text []byte) error { + var err error + *d, err = ParseDuration(string(text)) + return err } // MarshalYAML implements the yaml.Marshaler interface. diff --git a/vendor/github.com/prometheus/procfs/Makefile b/vendor/github.com/prometheus/procfs/Makefile index 616a0d25e..fa2bd5b52 100644 --- a/vendor/github.com/prometheus/procfs/Makefile +++ b/vendor/github.com/prometheus/procfs/Makefile @@ -18,6 +18,8 @@ include Makefile.common ./ttar -C $(dir $*) -x -f $*.ttar touch $@ +fixtures: fixtures/.unpacked + update_fixtures: rm -vf fixtures/.unpacked ./ttar -c -f fixtures.ttar fixtures/ diff --git a/vendor/github.com/prometheus/procfs/Makefile.common b/vendor/github.com/prometheus/procfs/Makefile.common index 3ac29c636..a1b1ca40f 100644 --- a/vendor/github.com/prometheus/procfs/Makefile.common +++ b/vendor/github.com/prometheus/procfs/Makefile.common @@ -78,12 +78,12 @@ ifneq ($(shell which gotestsum),) endif endif -PROMU_VERSION ?= 0.7.0 +PROMU_VERSION ?= 0.12.0 PROMU_URL := https://github.com/prometheus/promu/releases/download/v$(PROMU_VERSION)/promu-$(PROMU_VERSION).$(GO_BUILD_PLATFORM).tar.gz GOLANGCI_LINT := GOLANGCI_LINT_OPTS ?= -GOLANGCI_LINT_VERSION ?= v1.18.0 +GOLANGCI_LINT_VERSION ?= v1.39.0 # golangci-lint only supports linux, darwin and windows platforms on i386/amd64. # windows isn't included here because of the path separator being different. ifeq ($(GOHOSTOS),$(filter $(GOHOSTOS),linux darwin)) @@ -118,7 +118,7 @@ endif %: common-% ; .PHONY: common-all -common-all: precheck style check_license lint unused build test +common-all: precheck style check_license lint yamllint unused build test .PHONY: common-style common-style: @@ -198,6 +198,15 @@ else endif endif +.PHONY: common-yamllint +common-yamllint: + @echo ">> running yamllint on all YAML files in the repository" +ifeq (, $(shell which yamllint)) + @echo "yamllint not installed so skipping" +else + yamllint . +endif + # For backward-compatibility. .PHONY: common-staticcheck common-staticcheck: lint diff --git a/vendor/github.com/prometheus/procfs/README.md b/vendor/github.com/prometheus/procfs/README.md index 55d1e3261..43c37735a 100644 --- a/vendor/github.com/prometheus/procfs/README.md +++ b/vendor/github.com/prometheus/procfs/README.md @@ -6,8 +6,8 @@ metrics from the pseudo-filesystems /proc and /sys. *WARNING*: This package is a work in progress. Its API may still break in backwards-incompatible ways without warnings. Use it at your own risk. -[![GoDoc](https://godoc.org/github.com/prometheus/procfs?status.png)](https://godoc.org/github.com/prometheus/procfs) -[![Build Status](https://travis-ci.org/prometheus/procfs.svg?branch=master)](https://travis-ci.org/prometheus/procfs) +[![Go Reference](https://pkg.go.dev/badge/github.com/prometheus/procfs.svg)](https://pkg.go.dev/github.com/prometheus/procfs) +[![CircleCI](https://circleci.com/gh/prometheus/procfs/tree/master.svg?style=svg)](https://circleci.com/gh/prometheus/procfs/tree/master) [![Go Report Card](https://goreportcard.com/badge/github.com/prometheus/procfs)](https://goreportcard.com/report/github.com/prometheus/procfs) ## Usage diff --git a/vendor/github.com/prometheus/client_golang/prometheus/build_info_pre_1.12.go b/vendor/github.com/prometheus/procfs/cmdline.go similarity index 60% rename from vendor/github.com/prometheus/client_golang/prometheus/build_info_pre_1.12.go rename to vendor/github.com/prometheus/procfs/cmdline.go index 6609e2877..bf4f3b48c 100644 --- a/vendor/github.com/prometheus/client_golang/prometheus/build_info_pre_1.12.go +++ b/vendor/github.com/prometheus/procfs/cmdline.go @@ -1,4 +1,4 @@ -// Copyright 2019 The Prometheus Authors +// Copyright 2021 The Prometheus Authors // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at @@ -11,12 +11,20 @@ // See the License for the specific language governing permissions and // limitations under the License. -// +build !go1.12 +package procfs -package prometheus +import ( + "strings" -// readBuildInfo is a wrapper around debug.ReadBuildInfo for Go versions before -// 1.12. Remove this whole file once the minimum supported Go version is 1.12. -func readBuildInfo() (path, version, sum string) { - return "unknown", "unknown", "unknown" + "github.com/prometheus/procfs/internal/util" +) + +// CmdLine returns the command line of the kernel. +func (fs FS) CmdLine() ([]string, error) { + data, err := util.ReadFileNoStat(fs.proc.Path("cmdline")) + if err != nil { + return nil, err + } + + return strings.Fields(string(data)), nil } diff --git a/vendor/github.com/prometheus/procfs/doc.go b/vendor/github.com/prometheus/procfs/doc.go index e2acd6d40..d31a82600 100644 --- a/vendor/github.com/prometheus/procfs/doc.go +++ b/vendor/github.com/prometheus/procfs/doc.go @@ -31,7 +31,7 @@ // log.Fatalf("could not get process: %s", err) // } // -// stat, err := p.NewStat() +// stat, err := p.Stat() // if err != nil { // log.Fatalf("could not get process stat: %s", err) // } diff --git a/vendor/github.com/prometheus/procfs/fixtures.ttar b/vendor/github.com/prometheus/procfs/fixtures.ttar index 1e76173da..5e7eeef4a 100644 --- a/vendor/github.com/prometheus/procfs/fixtures.ttar +++ b/vendor/github.com/prometheus/procfs/fixtures.ttar @@ -644,6 +644,11 @@ Node 0, zone DMA32 759 572 791 475 194 45 12 0 Node 0, zone Normal 4381 1093 185 1530 567 102 4 0 0 0 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/proc/cmdline +Lines: 1 +BOOT_IMAGE=/vmlinuz-5.11.0-22-generic root=UUID=456a0345-450d-4f7b-b7c9-43e3241d99ad ro quiet splash vt.handoff=7 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Path: fixtures/proc/cpuinfo Lines: 216 processor : 0 @@ -2204,6 +2209,23 @@ Lines: 1 00015c73 00020e76 F0000769 00000000 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/proc/net/stat +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/proc/net/stat/arp_cache +Lines: 3 +entries allocs destroys hash_grows lookups hits res_failed rcv_probes_mcast rcv_probes_ucast periodic_gc_runs forced_gc_runs unresolved_discards table_fulls +00000014 00000001 00000002 00000003 00000004 00000005 00000006 00000007 00000008 00000009 0000000a 0000000b 0000000c +00000014 0000000d 0000000e 0000000f 00000010 00000011 00000012 00000013 00000014 00000015 00000016 00000017 00000018 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/proc/net/stat/ndisc_cache +Lines: 3 +entries allocs destroys hash_grows lookups hits res_failed rcv_probes_mcast rcv_probes_ucast periodic_gc_runs forced_gc_runs unresolved_discards table_fulls +00000024 000000f0 000000f1 000000f2 000000f3 000000f4 000000f5 000000f6 000000f7 000000f8 000000f9 000000fa 000000fb +00000024 000000fc 000000fd 000000fe 000000ff 00000100 00000101 00000102 00000103 00000104 00000105 00000106 00000107 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Path: fixtures/proc/net/tcp Lines: 4 sl local_address rem_address st tx_queue rx_queue tr tm->when retrnsmt uid timeout inode @@ -3455,6 +3477,460 @@ Mode: 664 Directory: fixtures/sys/class Mode: 775 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/drm +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/drm/card0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/drm/card0/device +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/aer_dev_correctable +Lines: 9 +RxErr 0 +BadTLP 0 +BadDLLP 0 +Rollover 0 +Timeout 0 +NonFatalErr 0 +CorrIntErr 0 +HeaderOF 0 +TOTAL_ERR_COR 0 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/aer_dev_fatal +Lines: 19 +Undefined 0 +DLP 0 +SDES 0 +TLP 0 +FCP 0 +CmpltTO 0 +CmpltAbrt 0 +UnxCmplt 0 +RxOF 0 +MalfTLP 0 +ECRC 0 +UnsupReq 0 +ACSViol 0 +UncorrIntErr 0 +BlockedTLP 0 +AtomicOpBlocked 0 +TLPBlockedErr 0 +PoisonTLPBlocked 0 +TOTAL_ERR_FATAL 0 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/aer_dev_nonfatal +Lines: 19 +Undefined 0 +DLP 0 +SDES 0 +TLP 0 +FCP 0 +CmpltTO 0 +CmpltAbrt 0 +UnxCmplt 0 +RxOF 0 +MalfTLP 0 +ECRC 0 +UnsupReq 0 +ACSViol 0 +UncorrIntErr 0 +BlockedTLP 0 +AtomicOpBlocked 0 +TLPBlockedErr 0 +PoisonTLPBlocked 0 +TOTAL_ERR_NONFATAL 0 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/ari_enabled +Lines: 1 +0 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/boot_vga +Lines: 1 +1 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/broken_parity_status +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/class +Lines: 1 +0x030000 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/consistent_dma_mask_bits +Lines: 1 +44 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/current_link_speed +Lines: 1 +8.0 GT/s PCIe +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/current_link_width +Lines: 1 +16 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/d3cold_allowed +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/device +Lines: 1 +0x687f +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/dma_mask_bits +Lines: 1 +44 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/driver_override +Lines: 1 +(null) +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/enable +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/gpu_busy_percent +Lines: 1 +4 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/irq +Lines: 1 +95 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/local_cpulist +Lines: 1 +0-15 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/local_cpus +Lines: 1 +0000ffff +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/max_link_speed +Lines: 1 +8.0 GT/s PCIe +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/max_link_width +Lines: 1 +16 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/mem_info_gtt_total +Lines: 1 +8573157376 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/mem_info_gtt_used +Lines: 1 +144560128 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/mem_info_vis_vram_total +Lines: 1 +8573157376 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/mem_info_vis_vram_used +Lines: 1 +1490378752 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/mem_info_vram_total +Lines: 1 +8573157376 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/mem_info_vram_used +Lines: 1 +1490378752 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/mem_info_vram_vendor +Lines: 1 +samsung +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/modalias +Lines: 1 +pci:v00001002d0000687Fsv00001043sd000004C4bc03sc00i00 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/msi_bus +Lines: 1 +1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/numa_node +Lines: 1 +-1 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pcie_bw +Lines: 1 +6641 815 256 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pcie_replay_count +Lines: 1 +0 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/power_dpm_force_performance_level +Lines: 1 +manual +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/power_dpm_state +Lines: 1 +performance +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/power_state +Lines: 1 +D0 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_cur_state +Lines: 1 +1 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_dpm_dcefclk +Lines: 5 +0: 600Mhz * +1: 720Mhz +2: 800Mhz +3: 847Mhz +4: 900Mhz +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_dpm_mclk +Lines: 4 +0: 167Mhz * +1: 500Mhz +2: 800Mhz +3: 945Mhz +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_dpm_pcie +Lines: 2 +0: 8.0GT/s, x16 +1: 8.0GT/s, x16 * +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_dpm_sclk +Lines: 8 +0: 852Mhz * +1: 991Mhz +2: 1084Mhz +3: 1138Mhz +4: 1200Mhz +5: 1401Mhz +6: 1536Mhz +7: 1630Mhz +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_dpm_socclk +Lines: 8 +0: 600Mhz +1: 720Mhz * +2: 800Mhz +3: 847Mhz +4: 900Mhz +5: 960Mhz +6: 1028Mhz +7: 1107Mhz +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_features +Lines: 32 +Current ppfeatures: 0x0000000019a1ff4f +FEATURES BITMASK ENABLEMENT +DPM_PREFETCHER 0x0000000000000001 Y +GFXCLK_DPM 0x0000000000000002 Y +UCLK_DPM 0x0000000000000004 Y +SOCCLK_DPM 0x0000000000000008 Y +UVD_DPM 0x0000000000000010 N +VCE_DPM 0x0000000000000020 N +ULV 0x0000000000000040 Y +MP0CLK_DPM 0x0000000000000080 N +LINK_DPM 0x0000000000000100 Y +DCEFCLK_DPM 0x0000000000000200 Y +AVFS 0x0000000000000400 Y +GFXCLK_DS 0x0000000000000800 Y +SOCCLK_DS 0x0000000000001000 Y +LCLK_DS 0x0000000000002000 Y +PPT 0x0000000000004000 Y +TDC 0x0000000000008000 Y +THERMAL 0x0000000000010000 Y +GFX_PER_CU_CG 0x0000000000020000 N +RM 0x0000000000040000 N +DCEFCLK_DS 0x0000000000080000 N +ACDC 0x0000000000100000 N +VR0HOT 0x0000000000200000 Y +VR1HOT 0x0000000000400000 N +FW_CTF 0x0000000000800000 Y +LED_DISPLAY 0x0000000001000000 Y +FAN_CONTROL 0x0000000002000000 N +FAST_PPT 0x0000000004000000 N +DIDT 0x0000000008000000 Y +ACG 0x0000000010000000 Y +PCC_LIMIT 0x0000000020000000 N +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_force_state +Lines: 1 + +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_mclk_od +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_num_states +Lines: 3 +states: 2 +0 boot +1 performance +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_od_clk_voltage +Lines: 18 +OD_SCLK: +0: 852Mhz 800mV +1: 991Mhz 900mV +2: 1084Mhz 950mV +3: 1138Mhz 1000mV +4: 1200Mhz 1050mV +5: 1401Mhz 1100mV +6: 1536Mhz 1150mV +7: 1630Mhz 1200mV +OD_MCLK: +0: 167Mhz 800mV +1: 500Mhz 800mV +2: 800Mhz 950mV +3: 945Mhz 1100mV +OD_RANGE: +SCLK: 852MHz 2400MHz +MCLK: 167MHz 1500MHz +VDDC: 800mV 1200mV +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_power_profile_mode +Lines: 8 +NUM MODE_NAME BUSY_SET_POINT FPS USE_RLC_BUSY MIN_ACTIVE_LEVEL + 0 BOOTUP_DEFAULT : 70 60 0 0 + 1 3D_FULL_SCREEN*: 70 60 1 3 + 2 POWER_SAVING : 90 60 0 0 + 3 VIDEO : 70 60 0 0 + 4 VR : 70 90 0 0 + 5 COMPUTE : 30 60 0 6 + 6 CUSTOM : 0 0 0 0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/pp_sclk_od +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/product_name +Lines: 1 + +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/product_number +Lines: 1 + +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/resource +Lines: 13 +0x0000007c00000000 0x0000007dffffffff 0x000000000014220c +0x0000000000000000 0x0000000000000000 0x0000000000000000 +0x0000007e00000000 0x0000007e0fffffff 0x000000000014220c +0x0000000000000000 0x0000000000000000 0x0000000000000000 +0x000000000000d000 0x000000000000d0ff 0x0000000000040101 +0x00000000fcd00000 0x00000000fcd7ffff 0x0000000000040200 +0x00000000fcd80000 0x00000000fcd9ffff 0x0000000000046200 +0x0000000000000000 0x0000000000000000 0x0000000000000000 +0x0000000000000000 0x0000000000000000 0x0000000000000000 +0x0000000000000000 0x0000000000000000 0x0000000000000000 +0x0000000000000000 0x0000000000000000 0x0000000000000000 +0x0000000000000000 0x0000000000000000 0x0000000000000000 +0x0000000000000000 0x0000000000000000 0x0000000000000000 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/revision +Lines: 1 +0xc1 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/serial_number +Lines: 1 + +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/subsystem_device +Lines: 1 +0x04c4 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/subsystem_vendor +Lines: 1 +0x1043 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/thermal_throttling_logging +Lines: 1 +0000:09:00.0: thermal throttling logging enabled, with interval 60 seconds +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/uevent +Lines: 6 +DRIVER=amdgpu +PCI_CLASS=30000 +PCI_ID=1002:687F +PCI_SUBSYS_ID=1043:04C4 +PCI_SLOT_NAME=0000:09:00.0 +MODALIAS=pci:v00001002d0000687Fsv00001043sd000004C4bc03sc00i00 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/unique_id +Lines: 1 +0123456789abcdef +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/vbios_version +Lines: 1 +115-D050PIL-100 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/drm/card0/device/vendor +Lines: 1 +0x1002 +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Directory: fixtures/sys/class/fc_host Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -3592,140 +4068,262 @@ Mode: 644 Directory: fixtures/sys/class/infiniband Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/class/infiniband/mlx4_0 +Directory: fixtures/sys/class/infiniband/hfi1_0 Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/board_id -Lines: 1 -SM_1141000001000 -Mode: 644 -# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/fw_ver +Path: fixtures/sys/class/infiniband/hfi1_0/board_id Lines: 1 -2.31.5050 +HPE 100Gb 1-port OP101 QSFP28 x16 PCIe Gen3 with Intel Omni-Path Adapter Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/hca_type +Path: fixtures/sys/class/infiniband/hfi1_0/fw_ver Lines: 1 -MT4099 +1.27.0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/class/infiniband/mlx4_0/ports +Directory: fixtures/sys/class/infiniband/hfi1_0/ports Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/class/infiniband/mlx4_0/ports/1 +Directory: fixtures/sys/class/infiniband/hfi1_0/ports/1 Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters +Directory: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/VL15_dropped +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/VL15_dropped Lines: 1 0 -Mode: 664 +Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/excessive_buffer_overrun_errors +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/excessive_buffer_overrun_errors Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/link_downed +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/link_downed Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/link_error_recovery +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/link_error_recovery Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/local_link_integrity_errors +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/local_link_integrity_errors Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_constraint_errors +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_rcv_constraint_errors Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_data +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_rcv_data Lines: 1 -2221223609 +345091702026 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_errors +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_rcv_errors Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_packets +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_rcv_packets Lines: 1 -87169372 +638036947 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_remote_physical_errors +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_rcv_remote_physical_errors Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_switch_relay_errors +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_rcv_switch_relay_errors Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_constraint_errors +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_xmit_constraint_errors Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_data +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_xmit_data Lines: 1 -26509113295 +273558326543 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_discards +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_xmit_discards Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_packets +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_xmit_packets Lines: 1 -85734114 +568318856 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_wait +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/port_xmit_wait Lines: 1 -3599 +0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/symbol_error +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/counters/symbol_error Lines: 1 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/phys_state +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/phys_state Lines: 1 5: LinkUp Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/rate +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/rate Lines: 1 -40 Gb/sec (4X QDR) +100 Gb/sec (4X EDR) Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/state +Path: fixtures/sys/class/infiniband/hfi1_0/ports/1/state Lines: 1 4: ACTIVE Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/class/infiniband/mlx4_0/ports/2 +Directory: fixtures/sys/class/infiniband/mlx4_0 Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/class/infiniband/mlx4_0/ports/2/counters -Mode: 755 +Path: fixtures/sys/class/infiniband/mlx4_0/board_id +Lines: 1 +SM_1141000001000 +Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/class/infiniband/mlx4_0/ports/2/counters/VL15_dropped +Path: fixtures/sys/class/infiniband/mlx4_0/fw_ver +Lines: 1 +2.31.5050 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/hca_type +Lines: 1 +MT4099 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/infiniband/mlx4_0/ports +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/infiniband/mlx4_0/ports/1 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/VL15_dropped +Lines: 1 +0 +Mode: 664 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/excessive_buffer_overrun_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/link_downed +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/link_error_recovery +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/local_link_integrity_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_constraint_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_data +Lines: 1 +2221223609 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_packets +Lines: 1 +87169372 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_remote_physical_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_rcv_switch_relay_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_constraint_errors +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_data +Lines: 1 +26509113295 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_discards +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_packets +Lines: 1 +85734114 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/port_xmit_wait +Lines: 1 +3599 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/counters/symbol_error +Lines: 1 +0 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/phys_state +Lines: 1 +5: LinkUp +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/rate +Lines: 1 +40 Gb/sec (4X QDR) +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/1/state +Lines: 1 +4: ACTIVE +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/infiniband/mlx4_0/ports/2 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/infiniband/mlx4_0/ports/2/counters +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/infiniband/mlx4_0/ports/2/counters/VL15_dropped Lines: 1 0 Mode: 664 @@ -3960,6 +4558,32 @@ Lines: 1 1 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/nvme +Mode: 775 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/nvme/nvme0 +Mode: 775 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/nvme/nvme0/firmware_rev +Lines: 1 +1B2QEXP7 +Mode: 664 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/nvme/nvme0/model +Lines: 1 +Samsung SSD 970 PRO 512GB +Mode: 664 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/nvme/nvme0/serial +Lines: 1 +S680HF8N190894I +Mode: 664 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/nvme/nvme0/state +Lines: 1 +live +Mode: 664 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Directory: fixtures/sys/class/power_supply Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -4164,6 +4788,33 @@ Path: fixtures/sys/class/powercap/intel-rapl:a/uevent Lines: 0 Mode: 644 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/class/scsi_tape +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/scsi_tape/nst0 +SymlinkTo: ../../devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/scsi_tape/nst0a +SymlinkTo: ../../devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/scsi_tape/nst0l +SymlinkTo: ../../devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/scsi_tape/nst0m +SymlinkTo: ../../devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/scsi_tape/st0 +SymlinkTo: ../../devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/scsi_tape/st0a +SymlinkTo: ../../devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/scsi_tape/st0l +SymlinkTo: ../../devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/class/scsi_tape/st0m +SymlinkTo: ../../devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Directory: fixtures/sys/class/thermal Mode: 775 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -4575,6 +5226,475 @@ Mode: 444 Directory: fixtures/sys/devices/pci0000:00 Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/in_flight +Lines: 1 +1EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/io_ns +Lines: 1 +9247011087720EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/other_cnt +Lines: 1 +1409EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/read_byte_cnt +Lines: 1 +979383912EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/read_cnt +Lines: 1 +3741EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/read_ns +Lines: 1 +33788355744EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/resid_cnt +Lines: 1 +19EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/write_byte_cnt +Lines: 1 +1496246784000EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/write_cnt +Lines: 1 +53772916EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0/stats/write_ns +Lines: 1 +5233597394395EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/in_flight +Lines: 1 +1EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/io_ns +Lines: 1 +9247011087720EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/other_cnt +Lines: 1 +1409EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/read_byte_cnt +Lines: 1 +979383912EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/read_cnt +Lines: 1 +3741EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/read_ns +Lines: 1 +33788355744EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/resid_cnt +Lines: 1 +19EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/write_byte_cnt +Lines: 1 +1496246784000EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/write_cnt +Lines: 1 +53772916EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0a/stats/write_ns +Lines: 1 +5233597394395EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/in_flight +Lines: 1 +1EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/io_ns +Lines: 1 +9247011087720EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/other_cnt +Lines: 1 +1409EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/read_byte_cnt +Lines: 1 +979383912EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/read_cnt +Lines: 1 +3741EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/read_ns +Lines: 1 +33788355744EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/resid_cnt +Lines: 1 +19EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/write_byte_cnt +Lines: 1 +1496246784000EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/write_cnt +Lines: 1 +53772916EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0l/stats/write_ns +Lines: 1 +5233597394395EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/in_flight +Lines: 1 +1EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/io_ns +Lines: 1 +9247011087720EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/other_cnt +Lines: 1 +1409EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/read_byte_cnt +Lines: 1 +979383912EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/read_cnt +Lines: 1 +3741EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/read_ns +Lines: 1 +33788355744EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/resid_cnt +Lines: 1 +19EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/write_byte_cnt +Lines: 1 +1496246784000EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/write_cnt +Lines: 1 +53772916EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/nst0m/stats/write_ns +Lines: 1 +5233597394395EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/in_flight +Lines: 1 +1EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/io_ns +Lines: 1 +9247011087720EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/other_cnt +Lines: 1 +1409EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/read_byte_cnt +Lines: 1 +979383912EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/read_cnt +Lines: 1 +3741EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/read_ns +Lines: 1 +33788355744EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/resid_cnt +Lines: 1 +19EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/write_byte_cnt +Lines: 1 +1496246784000EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/write_cnt +Lines: 1 +53772916EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0/stats/write_ns +Lines: 1 +5233597394395EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/in_flight +Lines: 1 +1EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/io_ns +Lines: 1 +9247011087720EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/other_cnt +Lines: 1 +1409EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/read_byte_cnt +Lines: 1 +979383912EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/read_cnt +Lines: 1 +3741EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/read_ns +Lines: 1 +33788355744EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/resid_cnt +Lines: 1 +19EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/write_byte_cnt +Lines: 1 +1496246784000EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/write_cnt +Lines: 1 +53772916EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0a/stats/write_ns +Lines: 1 +5233597394395EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/in_flight +Lines: 1 +1EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/io_ns +Lines: 1 +9247011087720EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/other_cnt +Lines: 1 +1409EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/read_byte_cnt +Lines: 1 +979383912EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/read_cnt +Lines: 1 +3741EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/read_ns +Lines: 1 +33788355744EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/resid_cnt +Lines: 1 +19EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/write_byte_cnt +Lines: 1 +1496246784000EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/write_cnt +Lines: 1 +53772916EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0l/stats/write_ns +Lines: 1 +5233597394395EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/in_flight +Lines: 1 +1EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/io_ns +Lines: 1 +9247011087720EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/other_cnt +Lines: 1 +1409EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/read_byte_cnt +Lines: 1 +979383912EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/read_cnt +Lines: 1 +3741EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/read_ns +Lines: 1 +33788355744EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/resid_cnt +Lines: 1 +19EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/write_byte_cnt +Lines: 1 +1496246784000EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/write_cnt +Lines: 1 +53772916EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/pci0000:00/0000:00:00.0/host0/port-0:0/end_device-0:0/target0:0:0/0:0:0:0/scsi_tape/st0m/stats/write_ns +Lines: 1 +5233597394395EOF +Mode: 444 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Directory: fixtures/sys/devices/pci0000:00/0000:00:0d.0 Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -4978,35 +6098,6 @@ Mode: 644 Directory: fixtures/sys/devices/system Mode: 775 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/devices/system/node -Mode: 775 -# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/devices/system/node/node1 -Mode: 755 -# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/devices/system/node/node1/vmstat -Lines: 6 -nr_free_pages 1 -nr_zone_inactive_anon 2 -nr_zone_active_anon 3 -nr_zone_inactive_file 4 -nr_zone_active_file 5 -nr_zone_unevictable 6 -Mode: 644 -# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Directory: fixtures/sys/devices/system/node/node2 -Mode: 755 -# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -Path: fixtures/sys/devices/system/node/node2/vmstat -Lines: 6 -nr_free_pages 7 -nr_zone_inactive_anon 8 -nr_zone_active_anon 9 -nr_zone_inactive_file 10 -nr_zone_active_file 11 -nr_zone_unevictable 12 -Mode: 644 -# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Directory: fixtures/sys/devices/system/clocksource Mode: 775 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -5254,6 +6345,35 @@ Mode: 644 Directory: fixtures/sys/devices/system/cpu/cpufreq/policy1 Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/system/node +Mode: 775 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/system/node/node1 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/system/node/node1/vmstat +Lines: 6 +nr_free_pages 1 +nr_zone_inactive_anon 2 +nr_zone_active_anon 3 +nr_zone_inactive_file 4 +nr_zone_active_file 5 +nr_zone_unevictable 6 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Directory: fixtures/sys/devices/system/node/node2 +Mode: 755 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - +Path: fixtures/sys/devices/system/node/node2/vmstat +Lines: 6 +nr_free_pages 7 +nr_zone_inactive_anon 8 +nr_zone_active_anon 9 +nr_zone_inactive_file 10 +nr_zone_active_file 11 +nr_zone_unevictable 12 +Mode: 644 +# ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Directory: fixtures/sys/fs Mode: 755 # ttar - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/vendor/github.com/prometheus/procfs/mdstat.go b/vendor/github.com/prometheus/procfs/mdstat.go index 4c4493bfa..f0b9e5f75 100644 --- a/vendor/github.com/prometheus/procfs/mdstat.go +++ b/vendor/github.com/prometheus/procfs/mdstat.go @@ -22,9 +22,12 @@ import ( ) var ( - statusLineRE = regexp.MustCompile(`(\d+) blocks .*\[(\d+)/(\d+)\] \[[U_]+\]`) - recoveryLineRE = regexp.MustCompile(`\((\d+)/\d+\)`) - componentDeviceRE = regexp.MustCompile(`(.*)\[\d+\]`) + statusLineRE = regexp.MustCompile(`(\d+) blocks .*\[(\d+)/(\d+)\] \[([U_]+)\]`) + recoveryLineBlocksRE = regexp.MustCompile(`\((\d+)/\d+\)`) + recoveryLinePctRE = regexp.MustCompile(`= (.+)%`) + recoveryLineFinishRE = regexp.MustCompile(`finish=(.+)min`) + recoveryLineSpeedRE = regexp.MustCompile(`speed=(.+)[A-Z]`) + componentDeviceRE = regexp.MustCompile(`(.*)\[\d+\]`) ) // MDStat holds info parsed from /proc/mdstat. @@ -39,12 +42,20 @@ type MDStat struct { DisksTotal int64 // Number of failed disks. DisksFailed int64 + // Number of "down" disks. (the _ indicator in the status line) + DisksDown int64 // Spare disks in the device. DisksSpare int64 // Number of blocks the device holds. BlocksTotal int64 // Number of blocks on the device that are in sync. BlocksSynced int64 + // progress percentage of current sync + BlocksSyncedPct float64 + // estimated finishing time for current sync (in minutes) + BlocksSyncedFinishTime float64 + // current sync speed (in Kilobytes/sec) + BlocksSyncedSpeed float64 // Name of md component devices Devices []string } @@ -91,7 +102,7 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) { // Failed disks have the suffix (F) & Spare disks have the suffix (S). fail := int64(strings.Count(line, "(F)")) spare := int64(strings.Count(line, "(S)")) - active, total, size, err := evalStatusLine(lines[i], lines[i+1]) + active, total, down, size, err := evalStatusLine(lines[i], lines[i+1]) if err != nil { return nil, fmt.Errorf("error parsing md device lines: %w", err) @@ -105,6 +116,9 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) { // If device is syncing at the moment, get the number of currently // synced bytes, otherwise that number equals the size of the device. syncedBlocks := size + speed := float64(0) + finish := float64(0) + pct := float64(0) recovering := strings.Contains(lines[syncLineIdx], "recovery") resyncing := strings.Contains(lines[syncLineIdx], "resync") checking := strings.Contains(lines[syncLineIdx], "check") @@ -124,7 +138,7 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) { strings.Contains(lines[syncLineIdx], "DELAYED") { syncedBlocks = 0 } else { - syncedBlocks, err = evalRecoveryLine(lines[syncLineIdx]) + syncedBlocks, pct, finish, speed, err = evalRecoveryLine(lines[syncLineIdx]) if err != nil { return nil, fmt.Errorf("error parsing sync line in md device %q: %w", mdName, err) } @@ -132,69 +146,104 @@ func parseMDStat(mdStatData []byte) ([]MDStat, error) { } mdStats = append(mdStats, MDStat{ - Name: mdName, - ActivityState: state, - DisksActive: active, - DisksFailed: fail, - DisksSpare: spare, - DisksTotal: total, - BlocksTotal: size, - BlocksSynced: syncedBlocks, - Devices: evalComponentDevices(deviceFields), + Name: mdName, + ActivityState: state, + DisksActive: active, + DisksFailed: fail, + DisksDown: down, + DisksSpare: spare, + DisksTotal: total, + BlocksTotal: size, + BlocksSynced: syncedBlocks, + BlocksSyncedPct: pct, + BlocksSyncedFinishTime: finish, + BlocksSyncedSpeed: speed, + Devices: evalComponentDevices(deviceFields), }) } return mdStats, nil } -func evalStatusLine(deviceLine, statusLine string) (active, total, size int64, err error) { +func evalStatusLine(deviceLine, statusLine string) (active, total, down, size int64, err error) { sizeStr := strings.Fields(statusLine)[0] size, err = strconv.ParseInt(sizeStr, 10, 64) if err != nil { - return 0, 0, 0, fmt.Errorf("unexpected statusLine %q: %w", statusLine, err) + return 0, 0, 0, 0, fmt.Errorf("unexpected statusLine %q: %w", statusLine, err) } if strings.Contains(deviceLine, "raid0") || strings.Contains(deviceLine, "linear") { // In the device deviceLine, only disks have a number associated with them in []. total = int64(strings.Count(deviceLine, "[")) - return total, total, size, nil + return total, total, 0, size, nil } if strings.Contains(deviceLine, "inactive") { - return 0, 0, size, nil + return 0, 0, 0, size, nil } matches := statusLineRE.FindStringSubmatch(statusLine) - if len(matches) != 4 { - return 0, 0, 0, fmt.Errorf("couldn't find all the substring matches: %s", statusLine) + if len(matches) != 5 { + return 0, 0, 0, 0, fmt.Errorf("couldn't find all the substring matches: %s", statusLine) } total, err = strconv.ParseInt(matches[2], 10, 64) if err != nil { - return 0, 0, 0, fmt.Errorf("unexpected statusLine %q: %w", statusLine, err) + return 0, 0, 0, 0, fmt.Errorf("unexpected statusLine %q: %w", statusLine, err) } active, err = strconv.ParseInt(matches[3], 10, 64) if err != nil { - return 0, 0, 0, fmt.Errorf("unexpected statusLine %q: %w", statusLine, err) + return 0, 0, 0, 0, fmt.Errorf("unexpected statusLine %q: %w", statusLine, err) } + down = int64(strings.Count(matches[4], "_")) - return active, total, size, nil + return active, total, down, size, nil } -func evalRecoveryLine(recoveryLine string) (syncedBlocks int64, err error) { - matches := recoveryLineRE.FindStringSubmatch(recoveryLine) +func evalRecoveryLine(recoveryLine string) (syncedBlocks int64, pct float64, finish float64, speed float64, err error) { + matches := recoveryLineBlocksRE.FindStringSubmatch(recoveryLine) if len(matches) != 2 { - return 0, fmt.Errorf("unexpected recoveryLine: %s", recoveryLine) + return 0, 0, 0, 0, fmt.Errorf("unexpected recoveryLine: %s", recoveryLine) } syncedBlocks, err = strconv.ParseInt(matches[1], 10, 64) if err != nil { - return 0, fmt.Errorf("error parsing int from recoveryLine %q: %w", recoveryLine, err) + return 0, 0, 0, 0, fmt.Errorf("error parsing int from recoveryLine %q: %w", recoveryLine, err) } - return syncedBlocks, nil + // Get percentage complete + matches = recoveryLinePctRE.FindStringSubmatch(recoveryLine) + if len(matches) != 2 { + return syncedBlocks, 0, 0, 0, fmt.Errorf("unexpected recoveryLine matching percentage: %s", recoveryLine) + } + pct, err = strconv.ParseFloat(strings.TrimSpace(matches[1]), 64) + if err != nil { + return syncedBlocks, 0, 0, 0, fmt.Errorf("error parsing float from recoveryLine %q: %w", recoveryLine, err) + } + + // Get time expected left to complete + matches = recoveryLineFinishRE.FindStringSubmatch(recoveryLine) + if len(matches) != 2 { + return syncedBlocks, pct, 0, 0, fmt.Errorf("unexpected recoveryLine matching est. finish time: %s", recoveryLine) + } + finish, err = strconv.ParseFloat(matches[1], 64) + if err != nil { + return syncedBlocks, pct, 0, 0, fmt.Errorf("error parsing float from recoveryLine %q: %w", recoveryLine, err) + } + + // Get recovery speed + matches = recoveryLineSpeedRE.FindStringSubmatch(recoveryLine) + if len(matches) != 2 { + return syncedBlocks, pct, finish, 0, fmt.Errorf("unexpected recoveryLine matching speed: %s", recoveryLine) + } + speed, err = strconv.ParseFloat(matches[1], 64) + if err != nil { + return syncedBlocks, pct, finish, 0, fmt.Errorf("error parsing float from recoveryLine %q: %w", recoveryLine, err) + } + + return syncedBlocks, pct, finish, speed, nil } func evalComponentDevices(deviceFields []string) []string { diff --git a/vendor/github.com/prometheus/procfs/net_ip_socket.go b/vendor/github.com/prometheus/procfs/net_ip_socket.go index ac01dd847..8c9ee3de8 100644 --- a/vendor/github.com/prometheus/procfs/net_ip_socket.go +++ b/vendor/github.com/prometheus/procfs/net_ip_socket.go @@ -65,6 +65,7 @@ type ( TxQueue uint64 RxQueue uint64 UID uint64 + Inode uint64 } ) @@ -150,9 +151,9 @@ func parseIP(hexIP string) (net.IP, error) { // parseNetIPSocketLine parses a single line, represented by a list of fields. func parseNetIPSocketLine(fields []string) (*netIPSocketLine, error) { line := &netIPSocketLine{} - if len(fields) < 8 { + if len(fields) < 10 { return nil, fmt.Errorf( - "cannot parse net socket line as it has less then 8 columns %q", + "cannot parse net socket line as it has less then 10 columns %q", strings.Join(fields, " "), ) } @@ -216,5 +217,10 @@ func parseNetIPSocketLine(fields []string) (*netIPSocketLine, error) { return nil, fmt.Errorf("cannot parse uid value in socket line: %w", err) } + // inode + if line.Inode, err = strconv.ParseUint(fields[9], 0, 64); err != nil { + return nil, fmt.Errorf("cannot parse inode value in socket line: %w", err) + } + return line, nil } diff --git a/vendor/github.com/prometheus/procfs/netstat.go b/vendor/github.com/prometheus/procfs/netstat.go new file mode 100644 index 000000000..94d892f11 --- /dev/null +++ b/vendor/github.com/prometheus/procfs/netstat.go @@ -0,0 +1,68 @@ +// Copyright 2020 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package procfs + +import ( + "bufio" + "os" + "path/filepath" + "strconv" + "strings" +) + +// NetStat contains statistics for all the counters from one file +type NetStat struct { + Filename string + Stats map[string][]uint64 +} + +// NetStat retrieves stats from /proc/net/stat/ +func (fs FS) NetStat() ([]NetStat, error) { + statFiles, err := filepath.Glob(fs.proc.Path("net/stat/*")) + if err != nil { + return nil, err + } + + var netStatsTotal []NetStat + + for _, filePath := range statFiles { + file, err := os.Open(filePath) + if err != nil { + return nil, err + } + + netStatFile := NetStat{ + Filename: filepath.Base(filePath), + Stats: make(map[string][]uint64), + } + scanner := bufio.NewScanner(file) + scanner.Scan() + // First string is always a header for stats + var headers []string + headers = append(headers, strings.Fields(scanner.Text())...) + + // Other strings represent per-CPU counters + for scanner.Scan() { + for num, counter := range strings.Fields(scanner.Text()) { + value, err := strconv.ParseUint(counter, 16, 32) + if err != nil { + return nil, err + } + netStatFile.Stats[headers[num]] = append(netStatFile.Stats[headers[num]], value) + } + } + netStatsTotal = append(netStatsTotal, netStatFile) + } + return netStatsTotal, nil +} diff --git a/vendor/github.com/prometheus/procfs/proc_cgroup.go b/vendor/github.com/prometheus/procfs/proc_cgroup.go index 0094a13c0..be45b7987 100644 --- a/vendor/github.com/prometheus/procfs/proc_cgroup.go +++ b/vendor/github.com/prometheus/procfs/proc_cgroup.go @@ -90,7 +90,7 @@ func parseCgroups(data []byte) ([]Cgroup, error) { // control hierarchy running on this system. On every system (v1 and v2), all hierarchies contain all processes, // so the len of the returned struct is equal to the number of active hierarchies on this system func (p Proc) Cgroups() ([]Cgroup, error) { - data, err := util.ReadFileNoStat(fmt.Sprintf("/proc/%d/cgroup", p.PID)) + data, err := util.ReadFileNoStat(p.path("cgroup")) if err != nil { return nil, err } diff --git a/vendor/github.com/prometheus/procfs/proc_stat.go b/vendor/github.com/prometheus/procfs/proc_stat.go index 67ca0e9fb..8c7b6e80a 100644 --- a/vendor/github.com/prometheus/procfs/proc_stat.go +++ b/vendor/github.com/prometheus/procfs/proc_stat.go @@ -100,6 +100,15 @@ type ProcStat struct { VSize uint // Resident set size in pages. RSS int + // Soft limit in bytes on the rss of the process. + RSSLimit uint64 + // Real-time scheduling priority, a number in the range 1 to 99 for processes + // scheduled under a real-time policy, or 0, for non-real-time processes. + RTPriority uint + // Scheduling policy. + Policy uint + // Aggregated block I/O delays, measured in clock ticks (centiseconds). + DelayAcctBlkIOTicks uint64 proc fs.FS } @@ -119,7 +128,8 @@ func (p Proc) Stat() (ProcStat, error) { } var ( - ignore int + ignoreInt64 int64 + ignoreUint64 uint64 s = ProcStat{PID: p.PID, proc: p.fs} l = bytes.Index(data, []byte("(")) @@ -151,10 +161,28 @@ func (p Proc) Stat() (ProcStat, error) { &s.Priority, &s.Nice, &s.NumThreads, - &ignore, + &ignoreInt64, &s.Starttime, &s.VSize, &s.RSS, + &s.RSSLimit, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreUint64, + &ignoreInt64, + &ignoreInt64, + &s.RTPriority, + &s.Policy, + &s.DelayAcctBlkIOTicks, ) if err != nil { return ProcStat{}, err diff --git a/vendor/github.com/prometheus/procfs/zoneinfo.go b/vendor/github.com/prometheus/procfs/zoneinfo.go index 0b9bb6796..209e2ac98 100644 --- a/vendor/github.com/prometheus/procfs/zoneinfo.go +++ b/vendor/github.com/prometheus/procfs/zoneinfo.go @@ -99,7 +99,6 @@ func parseZoneinfo(zoneinfoData []byte) ([]Zoneinfo, error) { continue } if strings.HasPrefix(strings.TrimSpace(line), "per-node stats") { - zoneinfoElement.Zone = "" continue } parts := strings.Fields(strings.TrimSpace(line)) diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/stdinfo/stdinfo.go b/vendor/github.com/quasilyte/go-ruleguard/internal/stdinfo/stdinfo.go deleted file mode 100644 index f00d66d46..000000000 --- a/vendor/github.com/quasilyte/go-ruleguard/internal/stdinfo/stdinfo.go +++ /dev/null @@ -1,151 +0,0 @@ -package stdinfo - -var Packages = map[string]string{ - "adler32": "hash/adler32", - "aes": "crypto/aes", - "ascii85": "encoding/ascii85", - "asn1": "encoding/asn1", - "ast": "go/ast", - "atomic": "sync/atomic", - "base32": "encoding/base32", - "base64": "encoding/base64", - "big": "math/big", - "binary": "encoding/binary", - "bits": "math/bits", - "bufio": "bufio", - "build": "go/build", - "bytes": "bytes", - "bzip2": "compress/bzip2", - "cgi": "net/http/cgi", - "cgo": "runtime/cgo", - "cipher": "crypto/cipher", - "cmplx": "math/cmplx", - "color": "image/color", - "constant": "go/constant", - "constraint": "go/build/constraint", - "context": "context", - "cookiejar": "net/http/cookiejar", - "crc32": "hash/crc32", - "crc64": "hash/crc64", - "crypto": "crypto", - "csv": "encoding/csv", - "debug": "runtime/debug", - "des": "crypto/des", - "doc": "go/doc", - "draw": "image/draw", - "driver": "database/sql/driver", - "dsa": "crypto/dsa", - "dwarf": "debug/dwarf", - "ecdsa": "crypto/ecdsa", - "ed25519": "crypto/ed25519", - "elf": "debug/elf", - "elliptic": "crypto/elliptic", - "embed": "embed", - "encoding": "encoding", - "errors": "errors", - "exec": "os/exec", - "expvar": "expvar", - "fcgi": "net/http/fcgi", - "filepath": "path/filepath", - "flag": "flag", - "flate": "compress/flate", - "fmt": "fmt", - "fnv": "hash/fnv", - "format": "go/format", - "fs": "io/fs", - "fstest": "testing/fstest", - "gif": "image/gif", - "gob": "encoding/gob", - "gosym": "debug/gosym", - "gzip": "compress/gzip", - "hash": "hash", - "heap": "container/heap", - "hex": "encoding/hex", - "hmac": "crypto/hmac", - "html": "html", - "http": "net/http", - "httptest": "net/http/httptest", - "httptrace": "net/http/httptrace", - "httputil": "net/http/httputil", - "image": "image", - "importer": "go/importer", - "io": "io", - "iotest": "testing/iotest", - "ioutil": "io/ioutil", - "jpeg": "image/jpeg", - "json": "encoding/json", - "jsonrpc": "net/rpc/jsonrpc", - "list": "container/list", - "log": "log", - "lzw": "compress/lzw", - "macho": "debug/macho", - "mail": "net/mail", - "maphash": "hash/maphash", - "math": "math", - "md5": "crypto/md5", - "metrics": "runtime/metrics", - "mime": "mime", - "multipart": "mime/multipart", - "net": "net", - "os": "os", - "palette": "image/color/palette", - "parse": "text/template/parse", - "parser": "go/parser", - "path": "path", - "pe": "debug/pe", - "pem": "encoding/pem", - "pkix": "crypto/x509/pkix", - "plan9obj": "debug/plan9obj", - "plugin": "plugin", - "png": "image/png", - "pprof": "runtime/pprof", - "printer": "go/printer", - "quick": "testing/quick", - "quotedprintable": "mime/quotedprintable", - "race": "runtime/race", - "rand": "math/rand", - "rc4": "crypto/rc4", - "reflect": "reflect", - "regexp": "regexp", - "ring": "container/ring", - "rpc": "net/rpc", - "rsa": "crypto/rsa", - "runtime": "runtime", - "scanner": "text/scanner", - "sha1": "crypto/sha1", - "sha256": "crypto/sha256", - "sha512": "crypto/sha512", - "signal": "os/signal", - "smtp": "net/smtp", - "sort": "sort", - "sql": "database/sql", - "strconv": "strconv", - "strings": "strings", - "subtle": "crypto/subtle", - "suffixarray": "index/suffixarray", - "sync": "sync", - "syntax": "regexp/syntax", - "syscall": "syscall", - "syslog": "log/syslog", - "tabwriter": "text/tabwriter", - "tar": "archive/tar", - "template": "text/template", - "testing": "testing", - "textproto": "net/textproto", - "time": "time", - "tls": "crypto/tls", - "token": "go/token", - "trace": "runtime/trace", - "types": "go/types", - "tzdata": "time/tzdata", - "unicode": "unicode", - "unsafe": "unsafe", - "url": "net/url", - "user": "os/user", - "utf16": "unicode/utf16", - "utf8": "unicode/utf8", - "x509": "crypto/x509", - "xml": "encoding/xml", - "zip": "archive/zip", - "zlib": "compress/zlib", -} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go index e00706c34..52b85fdb5 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go @@ -15,10 +15,10 @@ import ( "sync" "github.com/quasilyte/go-ruleguard/internal/goenv" - "github.com/quasilyte/go-ruleguard/internal/stdinfo" "github.com/quasilyte/go-ruleguard/ruleguard/ir" "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" "github.com/quasilyte/go-ruleguard/ruleguard/typematch" + "github.com/quasilyte/stdinfo" ) type engine struct { @@ -64,7 +64,7 @@ func (e *engine) Load(ctx *LoadContext, buildContext *build.Context, filename st pkg: pkg, ctx: ctx, importer: imp, - itab: typematch.NewImportsTab(stdinfo.Packages), + itab: typematch.NewImportsTab(stdinfo.PathByName), gogrepFset: token.NewFileSet(), } l := newIRLoader(config) @@ -97,7 +97,7 @@ func (e *engine) LoadFromIR(ctx *LoadContext, buildContext *build.Context, filen state: e.state, ctx: ctx, importer: imp, - itab: typematch.NewImportsTab(stdinfo.Packages), + itab: typematch.NewImportsTab(stdinfo.PathByName), gogrepFset: token.NewFileSet(), } l := newIRLoader(config) diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go index 848d2d93a..52bfa643e 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go @@ -7,12 +7,13 @@ import ( "go/types" "path/filepath" + "github.com/quasilyte/gogrep" + "github.com/quasilyte/gogrep/nodetag" + "github.com/quasilyte/go-ruleguard/internal/xtypes" "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" "github.com/quasilyte/go-ruleguard/ruleguard/textmatch" "github.com/quasilyte/go-ruleguard/ruleguard/typematch" - "github.com/quasilyte/gogrep" - "github.com/quasilyte/gogrep/nodetag" ) const filterSuccess = matchFilterResult("") @@ -159,6 +160,26 @@ func makeAddressableFilter(src, varname string) filterFunc { } } +func makeVarContainsFilter(src, varname string, pat *gogrep.Pattern) filterFunc { + return func(params *filterParams) matchFilterResult { + params.gogrepSubState.CapturePreset = params.match.CaptureList() + matched := false + gogrep.Walk(params.subNode(varname), func(n ast.Node) bool { + if matched { + return false + } + pat.MatchNode(params.gogrepSubState, n, func(m gogrep.MatchData) { + matched = true + }) + return true + }) + if matched { + return filterSuccess + } + return filterFailure(src) + } +} + func makeCustomVarFilter(src, varname string, fn *quasigo.Func) filterFunc { return func(params *filterParams) matchFilterResult { // TODO(quasilyte): what if bytecode function panics due to the programming error? @@ -189,6 +210,16 @@ func makeTypeImplementsFilter(src, varname string, iface *types.Interface) filte } } +func makeTypeHasMethodFilter(src, varname string, fn *types.Func) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subNode(varname)) + if typeHasMethod(typ, fn) { + return filterSuccess + } + return filterFailure(src) + } +} + func makeTypeHasPointersFilter(src, varname string) filterFunc { return func(params *filterParams) matchFilterResult { typ := params.typeofNode(params.subExpr(varname)) @@ -246,16 +277,27 @@ func makeTypeOfKindFilter(src, varname string, underlying bool, kind types.Basic } } +func makeTypesIdenticalFilter(src, lhsVarname, rhsVarname string) filterFunc { + return func(params *filterParams) matchFilterResult { + lhsType := params.typeofNode(params.subNode(lhsVarname)) + rhsType := params.typeofNode(params.subNode(rhsVarname)) + if xtypes.Identical(lhsType, rhsType) { + return filterSuccess + } + return filterFailure(src) + } +} + func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Pattern) filterFunc { if underlying { return func(params *filterParams) matchFilterResult { if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { return exprListFilterApply(src, list, func(x ast.Expr) bool { - return pat.MatchIdentical(params.typeofNode(x).Underlying()) + return pat.MatchIdentical(params.typematchState, params.typeofNode(x).Underlying()) }) } typ := params.typeofNode(params.subNode(varname)).Underlying() - if pat.MatchIdentical(typ) { + if pat.MatchIdentical(params.typematchState, typ) { return filterSuccess } return filterFailure(src) @@ -265,11 +307,11 @@ func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Patte return func(params *filterParams) matchFilterResult { if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { return exprListFilterApply(src, list, func(x ast.Expr) bool { - return pat.MatchIdentical(params.typeofNode(x)) + return pat.MatchIdentical(params.typematchState, params.typeofNode(x)) }) } typ := params.typeofNode(params.subNode(varname)) - if pat.MatchIdentical(typ) { + if pat.MatchIdentical(params.typematchState, typ) { return filterSuccess } return filterFailure(src) @@ -322,6 +364,18 @@ func makeLineFilter(src, varname string, op token.Token, rhsVarname string) filt } } +func makeObjectIsGlobalFilter(src, varname string) filterFunc { + return func(params *filterParams) matchFilterResult { + obj := params.ctx.Types.ObjectOf(identOf(params.subExpr(varname))) + globalScope := params.ctx.Pkg.Scope() + if obj.Parent() == globalScope { + return filterSuccess + } + + return filterFailure(src) + } +} + func makeGoVersionFilter(src string, op token.Token, version GoVersion) filterFunc { return func(params *filterParams) matchFilterResult { if params.ctx.GoVersion.IsAny() { @@ -365,6 +419,19 @@ func makeTypeSizeConstFilter(src, varname string, op token.Token, rhsValue const } } +func makeTypeSizeFilter(src, varname string, op token.Token, rhsVarname string) filterFunc { + return func(params *filterParams) matchFilterResult { + lhsTyp := params.typeofNode(params.subExpr(varname)) + lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(lhsTyp)) + rhsTyp := params.typeofNode(params.subExpr(rhsVarname)) + rhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(rhsTyp)) + if constant.Compare(lhsValue, op, rhsValue) { + return filterSuccess + } + return filterFailure(src) + } +} + func makeValueIntConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { return func(params *filterParams) matchFilterResult { if list, ok := params.subNode(varname).(gogrep.ExprSlice); ok { @@ -542,6 +609,15 @@ func nodeIs(n ast.Node, tag nodetag.Value) bool { return matched } +func typeHasMethod(typ types.Type, fn *types.Func) bool { + obj, _, _ := types.LookupFieldOrMethod(typ, true, fn.Pkg(), fn.Name()) + fn2, ok := obj.(*types.Func) + if !ok { + return false + } + return xtypes.Identical(fn.Type(), fn2.Type()) +} + func typeHasPointers(typ types.Type) bool { switch typ := typ.(type) { case *types.Basic: diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go index 011a82cec..2574ac90f 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go @@ -7,6 +7,7 @@ import ( "regexp" "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" + "github.com/quasilyte/go-ruleguard/ruleguard/typematch" "github.com/quasilyte/gogrep" "github.com/quasilyte/gogrep/nodetag" ) @@ -58,7 +59,9 @@ type filterParams struct { imports map[string]struct{} env *quasigo.EvalEnv - importer *goImporter + importer *goImporter + gogrepSubState *gogrep.MatcherState + typematchState *typematch.MatcherState match matchData nodePath *nodePath diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go index c4a07bfc6..639724b0d 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/filter_op.gen.go @@ -80,87 +80,103 @@ const ( // $Value type: string FilterVarObjectIsOp FilterOp = 21 + // m[$Value].Object.IsGlobal() + // $Value type: string + FilterVarObjectIsGlobalOp FilterOp = 22 + // m[$Value].Type.Is($Args[0]) // $Value type: string - FilterVarTypeIsOp FilterOp = 22 + FilterVarTypeIsOp FilterOp = 23 + + // m[$Value].Type.IdenticalTo($Args[0]) + // $Value type: string + FilterVarTypeIdenticalToOp FilterOp = 24 // m[$Value].Type.Underlying().Is($Args[0]) // $Value type: string - FilterVarTypeUnderlyingIsOp FilterOp = 23 + FilterVarTypeUnderlyingIsOp FilterOp = 25 // m[$Value].Type.OfKind($Args[0]) // $Value type: string - FilterVarTypeOfKindOp FilterOp = 24 + FilterVarTypeOfKindOp FilterOp = 26 // m[$Value].Type.Underlying().OfKind($Args[0]) // $Value type: string - FilterVarTypeUnderlyingOfKindOp FilterOp = 25 + FilterVarTypeUnderlyingOfKindOp FilterOp = 27 // m[$Value].Type.ConvertibleTo($Args[0]) // $Value type: string - FilterVarTypeConvertibleToOp FilterOp = 26 + FilterVarTypeConvertibleToOp FilterOp = 28 // m[$Value].Type.AssignableTo($Args[0]) // $Value type: string - FilterVarTypeAssignableToOp FilterOp = 27 + FilterVarTypeAssignableToOp FilterOp = 29 // m[$Value].Type.Implements($Args[0]) // $Value type: string - FilterVarTypeImplementsOp FilterOp = 28 + FilterVarTypeImplementsOp FilterOp = 30 + + // m[$Value].Type.HasMethod($Args[0]) + // $Value type: string + FilterVarTypeHasMethodOp FilterOp = 31 // m[$Value].Text.Matches($Args[0]) // $Value type: string - FilterVarTextMatchesOp FilterOp = 29 + FilterVarTextMatchesOp FilterOp = 32 + + // m[$Value].Contains($Args[0]) + // $Value type: string + FilterVarContainsOp FilterOp = 33 // m.Deadcode() - FilterDeadcodeOp FilterOp = 30 + FilterDeadcodeOp FilterOp = 34 // m.GoVersion().Eq($Value) // $Value type: string - FilterGoVersionEqOp FilterOp = 31 + FilterGoVersionEqOp FilterOp = 35 // m.GoVersion().LessThan($Value) // $Value type: string - FilterGoVersionLessThanOp FilterOp = 32 + FilterGoVersionLessThanOp FilterOp = 36 // m.GoVersion().GreaterThan($Value) // $Value type: string - FilterGoVersionGreaterThanOp FilterOp = 33 + FilterGoVersionGreaterThanOp FilterOp = 37 // m.GoVersion().LessEqThan($Value) // $Value type: string - FilterGoVersionLessEqThanOp FilterOp = 34 + FilterGoVersionLessEqThanOp FilterOp = 38 // m.GoVersion().GreaterEqThan($Value) // $Value type: string - FilterGoVersionGreaterEqThanOp FilterOp = 35 + FilterGoVersionGreaterEqThanOp FilterOp = 39 // m.File.Imports($Value) // $Value type: string - FilterFileImportsOp FilterOp = 36 + FilterFileImportsOp FilterOp = 40 // m.File.PkgPath.Matches($Value) // $Value type: string - FilterFilePkgPathMatchesOp FilterOp = 37 + FilterFilePkgPathMatchesOp FilterOp = 41 // m.File.Name.Matches($Value) // $Value type: string - FilterFileNameMatchesOp FilterOp = 38 + FilterFileNameMatchesOp FilterOp = 42 // $Value holds a function name // $Value type: string - FilterFilterFuncRefOp FilterOp = 39 + FilterFilterFuncRefOp FilterOp = 43 // $Value holds a string constant // $Value type: string - FilterStringOp FilterOp = 40 + FilterStringOp FilterOp = 44 // $Value holds an int64 constant // $Value type: int64 - FilterIntOp FilterOp = 41 + FilterIntOp FilterOp = 45 // m[`$$`].Node.Parent().Is($Args[0]) - FilterRootNodeParentIsOp FilterOp = 42 + FilterRootNodeParentIsOp FilterOp = 46 ) var filterOpNames = map[FilterOp]string{ @@ -186,14 +202,18 @@ var filterOpNames = map[FilterOp]string{ FilterVarFilterOp: `VarFilter`, FilterVarNodeIsOp: `VarNodeIs`, FilterVarObjectIsOp: `VarObjectIs`, + FilterVarObjectIsGlobalOp: `VarObjectIsGlobal`, FilterVarTypeIsOp: `VarTypeIs`, + FilterVarTypeIdenticalToOp: `VarTypeIdenticalTo`, FilterVarTypeUnderlyingIsOp: `VarTypeUnderlyingIs`, FilterVarTypeOfKindOp: `VarTypeOfKind`, FilterVarTypeUnderlyingOfKindOp: `VarTypeUnderlyingOfKind`, FilterVarTypeConvertibleToOp: `VarTypeConvertibleTo`, FilterVarTypeAssignableToOp: `VarTypeAssignableTo`, FilterVarTypeImplementsOp: `VarTypeImplements`, + FilterVarTypeHasMethodOp: `VarTypeHasMethod`, FilterVarTextMatchesOp: `VarTextMatches`, + FilterVarContainsOp: `VarContains`, FilterDeadcodeOp: `Deadcode`, FilterGoVersionEqOp: `GoVersionEq`, FilterGoVersionLessThanOp: `GoVersionLessThan`, @@ -229,14 +249,18 @@ var filterOpFlags = map[FilterOp]uint64{ FilterVarFilterOp: flagHasVar, FilterVarNodeIsOp: flagHasVar, FilterVarObjectIsOp: flagHasVar, + FilterVarObjectIsGlobalOp: flagHasVar, FilterVarTypeIsOp: flagHasVar, + FilterVarTypeIdenticalToOp: flagHasVar, FilterVarTypeUnderlyingIsOp: flagHasVar, FilterVarTypeOfKindOp: flagHasVar, FilterVarTypeUnderlyingOfKindOp: flagHasVar, FilterVarTypeConvertibleToOp: flagHasVar, FilterVarTypeAssignableToOp: flagHasVar, FilterVarTypeImplementsOp: flagHasVar, + FilterVarTypeHasMethodOp: flagHasVar, FilterVarTextMatchesOp: flagHasVar, + FilterVarContainsOp: flagHasVar, FilterStringOp: flagIsBasicLit, FilterIntOp: flagIsBasicLit, } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go index 48b719d94..3e9806d8c 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir/gen_filter_op.go @@ -1,3 +1,4 @@ +//go:build generate // +build generate package main @@ -52,15 +53,20 @@ func main() { {name: "VarFilter", comment: "m[$Value].Filter($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarNodeIs", comment: "m[$Value].Node.Is($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarObjectIs", comment: "m[$Value].Object.Is($Args[0])", valueType: "string", flags: flagHasVar}, + {name: "VarObjectIsGlobal", comment: "m[$Value].Object.IsGlobal()", valueType: "string", flags: flagHasVar}, {name: "VarTypeIs", comment: "m[$Value].Type.Is($Args[0])", valueType: "string", flags: flagHasVar}, + {name: "VarTypeIdenticalTo", comment: "m[$Value].Type.IdenticalTo($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeUnderlyingIs", comment: "m[$Value].Type.Underlying().Is($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeOfKind", comment: "m[$Value].Type.OfKind($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeUnderlyingOfKind", comment: "m[$Value].Type.Underlying().OfKind($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeConvertibleTo", comment: "m[$Value].Type.ConvertibleTo($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeAssignableTo", comment: "m[$Value].Type.AssignableTo($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTypeImplements", comment: "m[$Value].Type.Implements($Args[0])", valueType: "string", flags: flagHasVar}, + {name: "VarTypeHasMethod", comment: "m[$Value].Type.HasMethod($Args[0])", valueType: "string", flags: flagHasVar}, {name: "VarTextMatches", comment: "m[$Value].Text.Matches($Args[0])", valueType: "string", flags: flagHasVar}, + {name: "VarContains", comment: "m[$Value].Contains($Args[0])", valueType: "string", flags: flagHasVar}, + {name: "Deadcode", comment: "m.Deadcode()"}, {name: "GoVersionEq", comment: "m.GoVersion().Eq($Value)", valueType: "string"}, diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go index 7e29c81b0..ff107f978 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ir_loader.go @@ -11,13 +11,14 @@ import ( "io/ioutil" "regexp" + "github.com/quasilyte/gogrep" + "github.com/quasilyte/gogrep/nodetag" + "github.com/quasilyte/go-ruleguard/ruleguard/goutil" "github.com/quasilyte/go-ruleguard/ruleguard/ir" "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" "github.com/quasilyte/go-ruleguard/ruleguard/textmatch" "github.com/quasilyte/go-ruleguard/ruleguard/typematch" - "github.com/quasilyte/gogrep" - "github.com/quasilyte/gogrep/nodetag" ) type irLoaderConfig struct { @@ -237,7 +238,7 @@ func (l *irLoader) loadRuleGroup(group *ir.RuleGroup) error { l.group.Name = l.prefix + "/" + l.group.Name } - if l.ctx.GroupFilter != nil && !l.ctx.GroupFilter(l.group.Name) { + if l.ctx.GroupFilter != nil && !l.ctx.GroupFilter(l.group) { return nil // Skip this group } if _, ok := l.res.groups[l.group.Name]; ok { @@ -252,7 +253,8 @@ func (l *irLoader) loadRuleGroup(group *ir.RuleGroup) error { l.itab.Load(imported.Name, imported.Path) } - for _, rule := range group.Rules { + for i := range group.Rules { + rule := &group.Rules[i] if err := l.loadRule(group, rule); err != nil { return err } @@ -261,7 +263,7 @@ func (l *irLoader) loadRuleGroup(group *ir.RuleGroup) error { return nil } -func (l *irLoader) loadRule(group *ir.RuleGroup, rule ir.Rule) error { +func (l *irLoader) loadRule(group *ir.RuleGroup, rule *ir.Rule) error { proto := goRule{ line: rule.Line, group: l.group, @@ -271,7 +273,8 @@ func (l *irLoader) loadRule(group *ir.RuleGroup, rule ir.Rule) error { } info := filterInfo{ - Vars: make(map[string]struct{}), + Vars: make(map[string]struct{}), + group: group, } if rule.WhereExpr.IsValid() { filter, err := l.newFilter(rule.WhereExpr, &info) @@ -294,7 +297,7 @@ func (l *irLoader) loadRule(group *ir.RuleGroup, rule ir.Rule) error { return nil } -func (l *irLoader) loadCommentRule(resultProto goRule, rule ir.Rule, src string, line int) error { +func (l *irLoader) loadCommentRule(resultProto goRule, rule *ir.Rule, src string, line int) error { dst := l.res.universal pat, err := regexp.Compile(src) if err != nil { @@ -312,10 +315,7 @@ func (l *irLoader) loadCommentRule(resultProto goRule, rule ir.Rule, src string, return nil } -func (l *irLoader) loadSyntaxRule(group *ir.RuleGroup, resultProto goRule, filterInfo filterInfo, rule ir.Rule, src string, line int) error { - result := resultProto - result.line = line - +func (l *irLoader) gogrepCompile(group *ir.RuleGroup, src string) (*gogrep.Pattern, gogrep.PatternInfo, error) { var imports map[string]string if len(group.Imports) != 0 { imports = make(map[string]string) @@ -331,7 +331,14 @@ func (l *irLoader) loadSyntaxRule(group *ir.RuleGroup, resultProto goRule, filte WithTypes: true, Imports: imports, } - pat, info, err := gogrep.Compile(gogrepConfig) + return gogrep.Compile(gogrepConfig) +} + +func (l *irLoader) loadSyntaxRule(group *ir.RuleGroup, resultProto goRule, filterInfo filterInfo, rule *ir.Rule, src string, line int) error { + result := resultProto + result.line = line + + pat, info, err := l.gogrepCompile(group, src) if err != nil { return l.errorf(rule.Line, err, "parse match pattern") } @@ -392,6 +399,57 @@ func (l *irLoader) unwrapTypeExpr(filter ir.FilterExpr) (types.Type, error) { return typ, nil } +func (l *irLoader) unwrapFuncRefExpr(filter ir.FilterExpr) (*types.Func, error) { + s := l.unwrapStringExpr(filter) + if s == "" { + return nil, l.errorf(filter.Line, nil, "expected a non-empty func ref string") + } + + n, err := parser.ParseExpr(s) + if err != nil { + return nil, err + } + + switch n := n.(type) { + case *ast.CallExpr: + // TODO: implement this. + return nil, l.errorf(filter.Line, nil, "inline func signatures are not supported yet") + case *ast.SelectorExpr: + funcName := n.Sel.Name + pkgAndType, ok := n.X.(*ast.SelectorExpr) + if !ok { + return nil, l.errorf(filter.Line, nil, "invalid selector expression") + } + pkgID, ok := pkgAndType.X.(*ast.Ident) + if !ok { + return nil, l.errorf(filter.Line, nil, "invalid package name selector part") + } + pkgName := pkgID.Name + typeName := pkgAndType.Sel.Name + fqn := pkgName + "." + typeName + typ, err := l.state.FindType(l.importer, l.pkg, fqn) + if err != nil { + return nil, l.errorf(filter.Line, nil, "can't find %s type", fqn) + } + switch typ := typ.Underlying().(type) { + case *types.Interface: + for i := 0; i < typ.NumMethods(); i++ { + fn := typ.Method(i) + if fn.Name() == funcName { + return fn, nil + } + } + default: + return nil, l.errorf(filter.Line, nil, "only interfaces are supported, but %s is %T", fqn, typ) + } + + default: + return nil, l.errorf(filter.Line, nil, "unexpected %T node", n) + } + + return nil, nil +} + func (l *irLoader) unwrapInterfaceExpr(filter ir.FilterExpr) (*types.Interface, error) { typeString := l.unwrapStringExpr(filter) if typeString == "" { @@ -565,6 +623,11 @@ func (l *irLoader) newFilter(filter ir.FilterExpr, info *filterInfo) (matchFilte result.fn = makeTypeOfKindFilter(result.src, filter.Value.(string), underlying, kind) } + case ir.FilterVarTypeIdenticalToOp: + lhsVarname := filter.Value.(string) + rhsVarname := filter.Args[0].Value.(string) + result.fn = makeTypesIdenticalFilter(result.src, lhsVarname, rhsVarname) + case ir.FilterVarTypeIsOp, ir.FilterVarTypeUnderlyingIsOp: typeString := l.unwrapStringExpr(filter.Args[0]) if typeString == "" { @@ -599,10 +662,22 @@ func (l *irLoader) newFilter(filter ir.FilterExpr, info *filterInfo) (matchFilte } result.fn = makeTypeImplementsFilter(result.src, filter.Value.(string), iface) + case ir.FilterVarTypeHasMethodOp: + fn, err := l.unwrapFuncRefExpr(filter.Args[0]) + if err != nil { + return result, err + } + if fn == nil { + return result, l.errorf(filter.Line, nil, "can't resolve HasMethod() argument") + } + result.fn = makeTypeHasMethodFilter(result.src, filter.Value.(string), fn) + case ir.FilterVarPureOp: result.fn = makePureFilter(result.src, filter.Value.(string)) case ir.FilterVarConstOp: result.fn = makeConstFilter(result.src, filter.Value.(string)) + case ir.FilterVarObjectIsGlobalOp: + result.fn = makeObjectIsGlobalFilter(result.src, filter.Value.(string)) case ir.FilterVarConstSliceOp: result.fn = makeConstSliceFilter(result.src, filter.Value.(string)) case ir.FilterVarAddressableOp: @@ -659,6 +734,14 @@ func (l *irLoader) newFilter(filter ir.FilterExpr, info *filterInfo) (matchFilte } result.fn = makeFileNameMatchesFilter(result.src, re) + case ir.FilterVarContainsOp: + src := filter.Args[0].Value.(string) + pat, _, err := l.gogrepCompile(info.group, src) + if err != nil { + return result, l.errorf(filter.Line, err, "parse contains pattern") + } + result.fn = makeVarContainsFilter(result.src, filter.Value.(string), pat) + case ir.FilterVarFilterOp: funcName := filter.Args[0].Value.(string) userFn := l.state.env.GetFunc(l.file.PkgPath, funcName) @@ -751,6 +834,8 @@ func (l *irLoader) newBinaryExprFilter(filter ir.FilterExpr, info *filterInfo) ( case ir.FilterVarTypeSizeOp: if rhsValue != nil { result.fn = makeTypeSizeConstFilter(result.src, lhs.Value.(string), tok, rhsValue) + } else { + result.fn = makeTypeSizeFilter(result.src, lhs.Value.(string), tok, rhs.Value.(string)) } case ir.FilterVarValueIntOp: if rhsValue != nil { @@ -775,4 +860,6 @@ func (l *irLoader) newBinaryExprFilter(filter ir.FilterExpr, info *filterInfo) ( type filterInfo struct { Vars map[string]struct{} + + group *ir.RuleGroup } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go index 7e577a436..7ebd1fe32 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/irconv/irconv.go @@ -11,9 +11,10 @@ import ( "strings" "github.com/go-toolsmith/astcopy" + "golang.org/x/tools/go/ast/astutil" + "github.com/quasilyte/go-ruleguard/ruleguard/goutil" "github.com/quasilyte/go-ruleguard/ruleguard/ir" - "golang.org/x/tools/go/ast/astutil" ) type Context struct { @@ -661,6 +662,28 @@ func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr { case "File.Name.Matches": return ir.FilterExpr{Op: ir.FilterFileNameMatchesOp, Value: conv.parseStringArg(e.Args[0])} + case "Contains": + pat := conv.parseStringArg(e.Args[0]) + return ir.FilterExpr{ + Op: ir.FilterVarContainsOp, + Value: op.varName, + Args: []ir.FilterExpr{ + {Op: ir.FilterStringOp, Value: pat}, + }, + } + + case "Type.IdenticalTo": + // TODO: reuse the code with parsing At() args? + index, ok := e.Args[0].(*ast.IndexExpr) + if !ok { + panic(conv.errorf(e.Args[0], "expected %s[`varname`] expression", conv.group.MatcherName)) + } + rhsVarname := conv.parseStringArg(index.Index) + args := []ir.FilterExpr{ + {Op: ir.FilterStringOp, Value: rhsVarname}, + } + return ir.FilterExpr{Op: ir.FilterVarTypeIdenticalToOp, Value: op.varName, Args: args} + case "Filter": funcName, ok := e.Args[0].(*ast.Ident) if !ok { @@ -692,6 +715,8 @@ func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr { return ir.FilterExpr{Op: ir.FilterRootNodeParentIsOp, Args: args} case "Object.Is": return ir.FilterExpr{Op: ir.FilterVarObjectIsOp, Value: op.varName, Args: args} + case "Object.IsGlobal": + return ir.FilterExpr{Op: ir.FilterVarObjectIsGlobalOp, Value: op.varName} case "Type.HasPointers": return ir.FilterExpr{Op: ir.FilterVarTypeHasPointersOp, Value: op.varName} case "Type.Is": @@ -708,6 +733,8 @@ func (conv *converter) convertFilterExprImpl(e ast.Expr) ir.FilterExpr { return ir.FilterExpr{Op: ir.FilterVarTypeAssignableToOp, Value: op.varName, Args: args} case "Type.Implements": return ir.FilterExpr{Op: ir.FilterVarTypeImplementsOp, Value: op.varName, Args: args} + case "Type.HasMethod": + return ir.FilterExpr{Op: ir.FilterVarTypeHasMethodOp, Value: op.varName, Args: args} } } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go index 2642bb7b9..896fedeaf 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go @@ -83,7 +83,7 @@ type LoadContext struct { // If function returns false, that group will not be included // in the resulting rules set. // Nil filter accepts all rule groups. - GroupFilter func(string) bool + GroupFilter func(*GoRuleGroup) bool Fset *token.FileSet } @@ -104,6 +104,17 @@ type RunContext struct { Report func(*ReportData) GoVersion GoVersion + + // TruncateLen is a length threshold (in bytes) for interpolated vars in Report() templates. + // + // Truncation removes the part of the string in the middle and replaces it with <...> + // so it meets the max length constraint. + // + // The default value is 60 (implied if value is 0). + // + // Note that this value is ignored for Suggest templates. + // Ruleguard doesn't truncate suggested replacement candidates. + TruncateLen int } type ReportData struct { diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go index 9adeb2b06..8ad239337 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go @@ -7,6 +7,7 @@ import ( "go/ast" "go/build" "go/printer" + "go/token" "io/ioutil" "path/filepath" "reflect" @@ -16,6 +17,7 @@ import ( "github.com/quasilyte/go-ruleguard/ruleguard/goutil" "github.com/quasilyte/go-ruleguard/ruleguard/profiling" + "github.com/quasilyte/go-ruleguard/ruleguard/typematch" "github.com/quasilyte/gogrep" "github.com/quasilyte/gogrep/nodetag" ) @@ -28,9 +30,12 @@ type rulesRunner struct { ctx *RunContext rules *goRuleSet + truncateLen int + reportData ReportData - gogrepState gogrep.MatcherState + gogrepState gogrep.MatcherState + gogrepSubState gogrep.MatcherState importer *goImporter @@ -64,21 +69,30 @@ func newRulesRunner(ctx *RunContext, buildContext *build.Context, state *engineS }) gogrepState := gogrep.NewMatcherState() gogrepState.Types = ctx.Types + gogrepSubState := gogrep.NewMatcherState() + gogrepSubState.Types = ctx.Types rr := &rulesRunner{ - bgContext: context.Background(), - ctx: ctx, - importer: importer, - rules: rules, - gogrepState: gogrepState, - nodePath: newNodePath(), + bgContext: context.Background(), + ctx: ctx, + importer: importer, + rules: rules, + gogrepState: gogrepState, + gogrepSubState: gogrepSubState, + nodePath: newNodePath(), + truncateLen: ctx.TruncateLen, filterParams: filterParams{ - env: state.env.GetEvalEnv(), - importer: importer, - ctx: ctx, + typematchState: typematch.NewMatcherState(), + env: state.env.GetEvalEnv(), + importer: importer, + ctx: ctx, }, } + if ctx.TruncateLen == 0 { + rr.truncateLen = 60 + } rr.filterParams.nodeText = rr.nodeText rr.filterParams.nodePath = &rr.nodePath + rr.filterParams.gogrepSubState = &rr.gogrepSubState return rr } @@ -370,57 +384,107 @@ func (rr *rulesRunner) collectImports(f *ast.File) { } func (rr *rulesRunner) renderMessage(msg string, m matchData, truncate bool) string { - var buf strings.Builder - if strings.Contains(msg, "$$") { - buf.Write(rr.nodeText(m.Node())) - msg = strings.ReplaceAll(msg, "$$", buf.String()) - } - if len(m.CaptureList()) == 0 { + if !strings.Contains(msg, "$") { return msg } - capture := make([]gogrep.CapturedNode, len(m.CaptureList())) - copy(capture, m.CaptureList()) - sort.Slice(capture, func(i, j int) bool { - return len(capture[i].Name) > len(capture[j].Name) - }) + var capture []gogrep.CapturedNode + if len(m.CaptureList()) != 0 { + capture = make([]gogrep.CapturedNode, 0, len(m.CaptureList())) + for _, c := range m.CaptureList() { + n := c.Node + // Some captured nodes are typed, but nil. + // We can't really get their text, so skip them here. + // For example, pattern `func $_() $results { $*_ }` may + // match a nil *ast.FieldList for $results if executed + // against a function with no results. + if reflect.ValueOf(n).IsNil() && !gogrep.IsEmptyNodeSlice(n) { + continue + } + capture = append(capture, c) + } + if len(capture) > 1 { + sort.Slice(capture, func(i, j int) bool { + return len(capture[i].Name) > len(capture[j].Name) + }) + } + } - for _, c := range capture { - n := c.Node - // Some captured nodes are typed, but nil. - // We can't really get their text, so skip them here. - // For example, pattern `func $_() $results { $*_ }` may - // match a nil *ast.FieldList for $results if executed - // against a function with no results. - if reflect.ValueOf(n).IsNil() && !gogrep.IsEmptyNodeSlice(n) { - continue + result := make([]byte, 0, len(msg)*2) + i := 0 + for { + j := strings.IndexByte(msg[i:], '$') + if j == -1 { + result = append(result, msg[i:]...) + break } - key := "$" + c.Name - if !strings.Contains(msg, key) { - continue + dollarPos := i + j + result = append(result, msg[i:dollarPos]...) + var n ast.Node + var nameLen int + if strings.HasPrefix(msg[dollarPos+1:], "$") { + n = m.Node() + nameLen = 1 + } else { + for _, c := range capture { + if strings.HasPrefix(msg[dollarPos+1:], c.Name) { + n = c.Node + nameLen = len(c.Name) + break + } + } } - buf.Reset() - buf.Write(rr.nodeText(n)) - replacement := buf.String() - if truncate { - replacement = truncateText(replacement, 60) + if n != nil { + text := rr.nodeText(n) + text = rr.fixedText(text, n, msg[dollarPos+1+nameLen:]) + if truncate { + text = truncateText(text, rr.truncateLen) + } + result = append(result, text...) + } else { + result = append(result, '$') } - msg = strings.ReplaceAll(msg, key, replacement) + i = dollarPos + len("$") + nameLen } - return msg + + return string(result) } -func truncateText(s string, maxLen int) string { - const placeholder = "<...>" - if len(s) <= maxLen-len(placeholder) { +func (rr *rulesRunner) fixedText(text []byte, n ast.Node, following string) []byte { + // pattern=`$x.y` $x=`&buf` following=`.y` + // Insert $x as `buf`, so we get `buf.y` instead of incorrect `&buf.y`. + if n, ok := n.(*ast.UnaryExpr); ok && n.Op == token.AND { + shouldFix := false + switch n.X.(type) { + case *ast.Ident, *ast.IndexExpr, *ast.SelectorExpr: + shouldFix = true + } + if shouldFix && strings.HasPrefix(following, ".") { + return bytes.TrimPrefix(text, []byte("&")) + } + } + + return text +} + +var longTextPlaceholder = []byte("<...>") + +func truncateText(s []byte, maxLen int) []byte { + if len(s) <= maxLen-len(longTextPlaceholder) { return s } - maxLen -= len(placeholder) + maxLen -= len(longTextPlaceholder) leftLen := maxLen / 2 rightLen := (maxLen % 2) + leftLen left := s[:leftLen] right := s[len(s)-rightLen:] - return left + placeholder + right + + result := make([]byte, 0, len(left)+len(longTextPlaceholder)+len(right)) + result = append(result, left...) + result = append(result, longTextPlaceholder...) + result = append(result, right...) + + return result } var multiMatchTags = [nodetag.NumBuckets]bool{ diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go index a909e3e8c..672b6b45b 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go @@ -16,16 +16,17 @@ func _() { _ = x[opArray-5] _ = x[opMap-6] _ = x[opChan-7] - _ = x[opFunc-8] - _ = x[opStructNoSeq-9] - _ = x[opStruct-10] - _ = x[opAnyInterface-11] - _ = x[opNamed-12] + _ = x[opFuncNoSeq-8] + _ = x[opFunc-9] + _ = x[opStructNoSeq-10] + _ = x[opStruct-11] + _ = x[opAnyInterface-12] + _ = x[opNamed-13] } -const _patternOp_name = "opBuiltinTypeopPointeropVaropVarSeqopSliceopArrayopMapopChanopFuncopStructNoSeqopStructopAnyInterfaceopNamed" +const _patternOp_name = "opBuiltinTypeopPointeropVaropVarSeqopSliceopArrayopMapopChanopFuncNoSeqopFuncopStructNoSeqopStructopAnyInterfaceopNamed" -var _patternOp_index = [...]uint8{0, 13, 22, 27, 35, 42, 49, 54, 60, 66, 79, 87, 101, 108} +var _patternOp_index = [...]uint8{0, 13, 22, 27, 35, 42, 49, 54, 60, 71, 77, 90, 98, 112, 119} func (i patternOp) String() string { if i < 0 || i >= patternOp(len(_patternOp_index)-1) { diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go index 4749106ec..b74740378 100644 --- a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go @@ -24,6 +24,7 @@ const ( opArray opMap opChan + opFuncNoSeq opFunc opStructNoSeq opStruct @@ -31,10 +32,32 @@ const ( opNamed ) -type Pattern struct { +type MatcherState struct { typeMatches map[string]types.Type int64Matches map[string]int64 +} + +func NewMatcherState() *MatcherState { + return &MatcherState{ + typeMatches: map[string]types.Type{}, + int64Matches: map[string]int64{}, + } +} + +func (state *MatcherState) reset() { + if len(state.int64Matches) != 0 { + for k := range state.int64Matches { + delete(state.int64Matches, k) + } + } + if len(state.typeMatches) != 0 { + for k := range state.typeMatches { + delete(state.typeMatches, k) + } + } +} +type Pattern struct { root *pattern } @@ -106,9 +129,7 @@ func Parse(ctx *Context, s string) (*Pattern, error) { return nil, fmt.Errorf("can't convert %s type expression", s) } p := &Pattern{ - typeMatches: map[string]types.Type{}, - int64Matches: map[string]int64{}, - root: root, + root: root, } return p, nil } @@ -167,6 +188,9 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern { if !ok { return nil } + if pkg.Name == "unsafe" && e.Sel.Name == "Pointer" { + return &pattern{op: opBuiltinType, value: types.Typ[types.UnsafePointer]} + } pkgPath, ok := ctx.Itab.Lookup(pkg.Name) if !ok { return nil @@ -253,6 +277,7 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern { return parseExpr(ctx, e.X) case *ast.FuncType: + hasSeq := false var params []*pattern var results []*pattern if e.Params != nil { @@ -264,6 +289,9 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern { if len(field.Names) != 0 { return nil } + if p.op == opVarSeq { + hasSeq = true + } params = append(params, p) } } @@ -276,11 +304,18 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern { if len(field.Names) != 0 { return nil } + if p.op == opVarSeq { + hasSeq = true + } results = append(results, p) } } + op := opFuncNoSeq + if hasSeq { + op = opFunc + } return &pattern{ - op: opFunc, + op: op, value: len(params), subs: append(params, results...), } @@ -330,21 +365,12 @@ func parseExpr(ctx *Context, e ast.Expr) *pattern { } // MatchIdentical returns true if the go typ matches pattern p. -func (p *Pattern) MatchIdentical(typ types.Type) bool { - p.reset() - return p.matchIdentical(p.root, typ) +func (p *Pattern) MatchIdentical(state *MatcherState, typ types.Type) bool { + state.reset() + return p.matchIdentical(state, p.root, typ) } -func (p *Pattern) reset() { - if len(p.int64Matches) != 0 { - p.int64Matches = map[string]int64{} - } - if len(p.typeMatches) != 0 { - p.typeMatches = map[string]types.Type{} - } -} - -func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool { +func (p *Pattern) matchIdenticalFielder(state *MatcherState, subs []*pattern, f fielder) bool { // TODO: do backtracking. numFields := f.NumFields() @@ -372,7 +398,7 @@ func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool { matchAny = false i++ // Lookahead for non-greedy matching. - case i+1 < len(subs) && p.matchIdentical(subs[i+1], f.Field(fieldsMatched).Type()): + case i+1 < len(subs) && p.matchIdentical(state, subs[i+1], f.Field(fieldsMatched).Type()): matchAny = false i += 2 fieldsMatched++ @@ -382,7 +408,7 @@ func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool { continue } - if fieldsLeft == 0 || !p.matchIdentical(pat, f.Field(fieldsMatched).Type()) { + if fieldsLeft == 0 || !p.matchIdentical(state, pat, f.Field(fieldsMatched).Type()) { return false } i++ @@ -392,16 +418,16 @@ func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool { return numFields == fieldsMatched } -func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { +func (p *Pattern) matchIdentical(state *MatcherState, sub *pattern, typ types.Type) bool { switch sub.op { case opVar: name := sub.value.(string) if name == "_" { return true } - y, ok := p.typeMatches[name] + y, ok := state.typeMatches[name] if !ok { - p.typeMatches[name] = typ + state.typeMatches[name] = typ return true } if y == nil { @@ -417,14 +443,14 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { if !ok { return false } - return p.matchIdentical(sub.subs[0], typ.Elem()) + return p.matchIdentical(state, sub.subs[0], typ.Elem()) case opSlice: typ, ok := typ.(*types.Slice) if !ok { return false } - return p.matchIdentical(sub.subs[0], typ.Elem()) + return p.matchIdentical(state, sub.subs[0], typ.Elem()) case opArray: typ, ok := typ.(*types.Array) @@ -438,25 +464,25 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { wantLen = typ.Len() break } - length, ok := p.int64Matches[v] + length, ok := state.int64Matches[v] if ok { wantLen = length } else { - p.int64Matches[v] = typ.Len() + state.int64Matches[v] = typ.Len() wantLen = typ.Len() } case int64: wantLen = v } - return wantLen == typ.Len() && p.matchIdentical(sub.subs[0], typ.Elem()) + return wantLen == typ.Len() && p.matchIdentical(state, sub.subs[0], typ.Elem()) case opMap: typ, ok := typ.(*types.Map) if !ok { return false } - return p.matchIdentical(sub.subs[0], typ.Key()) && - p.matchIdentical(sub.subs[1], typ.Elem()) + return p.matchIdentical(state, sub.subs[0], typ.Key()) && + p.matchIdentical(state, sub.subs[1], typ.Elem()) case opChan: typ, ok := typ.(*types.Chan) @@ -464,7 +490,7 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { return false } dir := sub.value.(types.ChanDir) - return dir == typ.Dir() && p.matchIdentical(sub.subs[0], typ.Elem()) + return dir == typ.Dir() && p.matchIdentical(state, sub.subs[0], typ.Elem()) case opNamed: typ, ok := typ.(*types.Named) @@ -485,7 +511,7 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { path := strings.SplitAfter(obj.Pkg().Path(), "/vendor/") return path[len(path)-1] == pkgPath && typeName == obj.Name() - case opFunc: + case opFuncNoSeq: typ, ok := typ.(*types.Signature) if !ok { return false @@ -500,17 +526,35 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { return false } for i := 0; i < typ.Params().Len(); i++ { - if !p.matchIdentical(params[i], typ.Params().At(i).Type()) { + if !p.matchIdentical(state, params[i], typ.Params().At(i).Type()) { return false } } for i := 0; i < typ.Results().Len(); i++ { - if !p.matchIdentical(results[i], typ.Results().At(i).Type()) { + if !p.matchIdentical(state, results[i], typ.Results().At(i).Type()) { return false } } return true + case opFunc: + typ, ok := typ.(*types.Signature) + if !ok { + return false + } + numParams := sub.value.(int) + params := sub.subs[:numParams] + results := sub.subs[numParams:] + adapter := tupleFielder{x: typ.Params()} + if !p.matchIdenticalFielder(state, params, &adapter) { + return false + } + adapter.x = typ.Results() + if !p.matchIdenticalFielder(state, results, &adapter) { + return false + } + return true + case opStructNoSeq: typ, ok := typ.(*types.Struct) if !ok { @@ -520,7 +564,7 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { return false } for i, member := range sub.subs { - if !p.matchIdentical(member, typ.Field(i).Type()) { + if !p.matchIdentical(state, member, typ.Field(i).Type()) { return false } } @@ -531,7 +575,7 @@ func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { if !ok { return false } - if !p.matchIdenticalFielder(sub.subs, typ) { + if !p.matchIdenticalFielder(state, sub.subs, typ) { return false } return true @@ -549,3 +593,10 @@ type fielder interface { Field(i int) *types.Var NumFields() int } + +type tupleFielder struct { + x *types.Tuple +} + +func (tup *tupleFielder) Field(i int) *types.Var { return tup.x.At(i) } +func (tup *tupleFielder) NumFields() int { return tup.x.Len() } diff --git a/vendor/github.com/quasilyte/gogrep/compile.go b/vendor/github.com/quasilyte/gogrep/compile.go index c79f290ae..cc60c05a6 100644 --- a/vendor/github.com/quasilyte/gogrep/compile.go +++ b/vendor/github.com/quasilyte/gogrep/compile.go @@ -127,6 +127,10 @@ func (c *compiler) compileNode(n ast.Node) { c.compileDeclSlice(n) case ExprSlice: c.compileExprSlice(n) + case *rangeClause: + c.compileRangeClause(n) + case *rangeHeader: + c.compileRangeHeader(n) default: panic(c.errorf(n, "compileNode: unexpected %T", n)) } @@ -1127,6 +1131,37 @@ func (c *compiler) compileExprSlice(exprs ExprSlice) { c.emitInstOp(opEnd) } +func (c *compiler) compileRangeClause(clause *rangeClause) { + c.emitInstOp(opRangeClause) + c.compileExpr(clause.X) +} + +func (c *compiler) compileRangeHeader(h *rangeHeader) { + n := h.Node + switch { + case n.Key == nil && n.Value == nil: + c.emitInstOp(opRangeHeader) + c.compileExpr(n.X) + case n.Key != nil && n.Value == nil: + c.emitInst(instruction{ + op: opRangeKeyHeader, + value: c.toUint8(n, int(n.Tok)), + }) + c.compileExpr(n.Key) + c.compileExpr(n.X) + case n.Key != nil && n.Value != nil: + c.emitInst(instruction{ + op: opRangeKeyValueHeader, + value: c.toUint8(n, int(n.Tok)), + }) + c.compileExpr(n.Key) + c.compileExpr(n.Value) + c.compileExpr(n.X) + default: + panic(c.errorf(n, "unexpected range header")) + } +} + func pickOp(cond bool, ifTrue, ifFalse operation) operation { if cond { return ifTrue diff --git a/vendor/github.com/quasilyte/gogrep/gen_operations.go b/vendor/github.com/quasilyte/gogrep/gen_operations.go index 4f70b0cbb..8de59980b 100644 --- a/vendor/github.com/quasilyte/gogrep/gen_operations.go +++ b/vendor/github.com/quasilyte/gogrep/gen_operations.go @@ -151,6 +151,11 @@ var opPrototypes = []operationProto{ {name: "RangeKeyStmt", tag: "RangeStmt", args: "key x block", value: "token.Token | ':=' or '='", example: "for key := range x {}"}, {name: "RangeKeyValueStmt", tag: "RangeStmt", args: "key value x block", value: "token.Token | ':=' or '='", example: "for key, value := range x {}"}, + {name: "RangeClause", tag: "RangeStmt", args: "x", example: "range x"}, + {name: "RangeHeader", tag: "RangeStmt", args: "x", example: "for range x"}, + {name: "RangeKeyHeader", tag: "RangeStmt", args: "key x", value: "token.Token | ':=' or '='", example: "for key := range x"}, + {name: "RangeKeyValueHeader", tag: "RangeStmt", args: "key value x", value: "token.Token | ':=' or '='", example: "for key, value := range x"}, + {name: "FieldList", args: "fields..."}, {name: "UnnamedField", args: "typ", example: "type"}, {name: "SimpleField", args: "typ", valueIndex: "strings | field name", example: "name type"}, diff --git a/vendor/github.com/quasilyte/gogrep/gogrep.go b/vendor/github.com/quasilyte/gogrep/gogrep.go index 1a91e49ff..313a9a251 100644 --- a/vendor/github.com/quasilyte/gogrep/gogrep.go +++ b/vendor/github.com/quasilyte/gogrep/gogrep.go @@ -1,6 +1,7 @@ package gogrep import ( + "errors" "go/ast" "go/token" "go/types" @@ -34,14 +35,36 @@ func (data MatchData) CapturedByName(name string) (ast.Node, bool) { return findNamed(data.Capture, name) } +type PartialNode struct { + X ast.Node + + from token.Pos + to token.Pos +} + +func (p *PartialNode) Pos() token.Pos { return p.from } +func (p *PartialNode) End() token.Pos { return p.to } + type MatcherState struct { Types *types.Info + // CapturePreset is a key-value pairs to use in the next match calls + // as predefined variables. + // For example, if the pattern is `$x = f()` and CapturePreset contains + // a pair with Name=x and value of `obj.x`, then the above mentioned + // pattern will only match `obj.x = f()` statements. + // + // If nil, the default behavior will be used. A first syntax element + // matching the matcher var will be captured. + CapturePreset []CapturedNode + // node values recorded by name, excluding "_" (used only by the // actual matching phase) capture []CapturedNode pc int + + partial PartialNode } func NewMatcherState() MatcherState { @@ -106,6 +129,9 @@ func Compile(config CompileConfig) (*Pattern, PatternInfo, error) { if err != nil { return nil, info, err } + if n == nil { + return nil, info, errors.New("invalid pattern syntax") + } var c compiler c.config = config prog, err := c.Compile(n, &info) @@ -116,6 +142,37 @@ func Compile(config CompileConfig) (*Pattern, PatternInfo, error) { return &Pattern{m: m}, info, nil } +func Walk(root ast.Node, fn func(n ast.Node) bool) { + switch root := root.(type) { + case ExprSlice: + for _, e := range root { + ast.Inspect(e, fn) + } + case stmtSlice: + for _, e := range root { + ast.Inspect(e, fn) + } + case fieldSlice: + for _, e := range root { + ast.Inspect(e, fn) + } + case identSlice: + for _, e := range root { + ast.Inspect(e, fn) + } + case specSlice: + for _, e := range root { + ast.Inspect(e, fn) + } + case declSlice: + for _, e := range root { + ast.Inspect(e, fn) + } + default: + ast.Inspect(root, fn) + } +} + func newPatternInfo() PatternInfo { return PatternInfo{ Vars: make(map[string]struct{}), diff --git a/vendor/github.com/quasilyte/gogrep/match.go b/vendor/github.com/quasilyte/gogrep/match.go index 7e1e86dbe..d927beff3 100644 --- a/vendor/github.com/quasilyte/gogrep/match.go +++ b/vendor/github.com/quasilyte/gogrep/match.go @@ -37,6 +37,13 @@ func (m *matcher) ifaceValue(inst instruction) interface{} { return m.prog.ifaces[inst.valueIndex] } +func (m *matcher) resetCapture(state *MatcherState) { + state.capture = state.capture[:0] + if state.CapturePreset != nil { + state.capture = append(state.capture, state.CapturePreset...) + } +} + func (m *matcher) MatchNode(state *MatcherState, n ast.Node, accept func(MatchData)) { state.pc = 0 inst := m.nextInst(state) @@ -63,8 +70,16 @@ func (m *matcher) MatchNode(state *MatcherState, n ast.Node, accept func(MatchDa if n, ok := n.(*ast.File); ok { m.walkDeclSlice(state, n.Decls, accept) } + case opRangeClause: + m.matchRangeClause(state, n, accept) + case opRangeHeader: + m.matchRangeHeader(state, n, accept) + case opRangeKeyHeader: + m.matchRangeKeyHeader(state, inst, n, accept) + case opRangeKeyValueHeader: + m.matchRangeKeyValueHeader(state, inst, n, accept) default: - state.capture = state.capture[:0] + m.resetCapture(state) if m.matchNodeWithInst(state, inst, n) { accept(MatchData{ Capture: state.capture, @@ -91,7 +106,7 @@ func (m *matcher) walkNodeSlice(state *MatcherState, nodes NodeSlice, accept fun from := 0 for { state.pc = 1 // FIXME: this is a kludge - state.capture = state.capture[:0] + m.resetCapture(state) matched, offset := m.matchNodeList(state, nodes.slice(from, sliceLen), true) if matched == nil { break @@ -114,6 +129,7 @@ func (m *matcher) matchNamed(state *MatcherState, name string, n ast.Node) bool state.capture = append(state.capture, CapturedNode{Name: name, Node: n}) return true } + return equalNodes(prev, n) } @@ -724,6 +740,98 @@ func (m *matcher) matchNodeList(state *MatcherState, nodes NodeSlice, partial bo return nodes.slice(partialStart, partialEnd), partialEnd + 1 } +func (m *matcher) matchRangeClause(state *MatcherState, n ast.Node, accept func(MatchData)) { + rng, ok := n.(*ast.RangeStmt) + if !ok { + return + } + m.resetCapture(state) + if !m.matchNode(state, rng.X) { + return + } + + // Now the fun begins: there is no Range pos in RangeStmt, so we need + // to make our best guess to find it. + // See https://github.com/golang/go/issues/50429 + // + // In gogrep we don't have []byte sources available, and + // it would be cumbersome to walk bytes manually to find the "range" keyword. + // What we can do is to hope that code is: + // 1. Properly gofmt-ed. + // 2. There are no some freefloating artifacts between TokPos and "range". + var from int + if rng.TokPos != token.NoPos { + // Start from the end of the '=' or ':=' token. + from = int(rng.TokPos + 1) + if rng.Tok == token.DEFINE { + from++ // ':=' is 1 byte longer that '=' + } + // Now suppose we have 'for _, x := range xs {...}' + // If this is true, then `xs.Pos.Offset - len(" range ")` would + // lead us to the current 'from' value. + // It's syntactically correct to have `:=range`, so we don't + // unconditionally add a space here. + if int(rng.X.Pos())-len(" range ") == from { + // This means that there is exactly one space between Tok and "range". + // There are some afwul cases where this might break, but let's + // not think about them too much. + from += len(" ") + } + } else { + // `for range xs {...}` form. + // There should be at least 1 space between "for" and "range". + from = int(rng.For) + len("for ") + } + + state.partial.X = rng + state.partial.from = token.Pos(from) + state.partial.to = rng.X.End() + + accept(MatchData{ + Capture: state.capture, + Node: &state.partial, + }) +} + +func (m *matcher) matchRangeHeader(state *MatcherState, n ast.Node, accept func(MatchData)) { + rng, ok := n.(*ast.RangeStmt) + if ok && rng.Key == nil && rng.Value == nil && m.matchNode(state, rng.X) { + m.setRangeHeaderPos(state, rng) + accept(MatchData{ + Capture: state.capture, + Node: &state.partial, + }) + } +} + +func (m *matcher) matchRangeKeyHeader(state *MatcherState, inst instruction, n ast.Node, accept func(MatchData)) { + rng, ok := n.(*ast.RangeStmt) + if ok && rng.Key != nil && rng.Value == nil && token.Token(inst.value) == rng.Tok && m.matchNode(state, rng.Key) && m.matchNode(state, rng.X) { + m.setRangeHeaderPos(state, rng) + accept(MatchData{ + Capture: state.capture, + Node: &state.partial, + }) + } +} + +func (m *matcher) matchRangeKeyValueHeader(state *MatcherState, inst instruction, n ast.Node, accept func(MatchData)) { + rng, ok := n.(*ast.RangeStmt) + if ok && rng.Key != nil && rng.Value != nil && token.Token(inst.value) == rng.Tok && m.matchNode(state, rng.Key) && m.matchNode(state, rng.Value) && m.matchNode(state, rng.X) { + m.setRangeHeaderPos(state, rng) + accept(MatchData{ + Capture: state.capture, + Node: &state.partial, + }) + } +} + +func (m *matcher) setRangeHeaderPos(state *MatcherState, rng *ast.RangeStmt) { + state.partial.X = rng + state.partial.from = rng.Pos() + state.partial.to = rng.Body.Pos() - 1 +} + func findNamed(capture []CapturedNode, name string) (ast.Node, bool) { for _, c := range capture { if c.Name == name { diff --git a/vendor/github.com/quasilyte/gogrep/operation_string.go b/vendor/github.com/quasilyte/gogrep/operation_string.go index 1f8f09c90..fa093266e 100644 --- a/vendor/github.com/quasilyte/gogrep/operation_string.go +++ b/vendor/github.com/quasilyte/gogrep/operation_string.go @@ -106,33 +106,37 @@ func _() { _ = x[opRangeStmt-95] _ = x[opRangeKeyStmt-96] _ = x[opRangeKeyValueStmt-97] - _ = x[opFieldList-98] - _ = x[opUnnamedField-99] - _ = x[opSimpleField-100] - _ = x[opField-101] - _ = x[opMultiField-102] - _ = x[opValueSpec-103] - _ = x[opValueInitSpec-104] - _ = x[opTypedValueInitSpec-105] - _ = x[opTypedValueSpec-106] - _ = x[opTypeSpec-107] - _ = x[opTypeAliasSpec-108] - _ = x[opFuncDecl-109] - _ = x[opMethodDecl-110] - _ = x[opFuncProtoDecl-111] - _ = x[opMethodProtoDecl-112] - _ = x[opDeclStmt-113] - _ = x[opConstDecl-114] - _ = x[opVarDecl-115] - _ = x[opTypeDecl-116] - _ = x[opAnyImportDecl-117] - _ = x[opImportDecl-118] - _ = x[opEmptyPackage-119] + _ = x[opRangeClause-98] + _ = x[opRangeHeader-99] + _ = x[opRangeKeyHeader-100] + _ = x[opRangeKeyValueHeader-101] + _ = x[opFieldList-102] + _ = x[opUnnamedField-103] + _ = x[opSimpleField-104] + _ = x[opField-105] + _ = x[opMultiField-106] + _ = x[opValueSpec-107] + _ = x[opValueInitSpec-108] + _ = x[opTypedValueInitSpec-109] + _ = x[opTypedValueSpec-110] + _ = x[opTypeSpec-111] + _ = x[opTypeAliasSpec-112] + _ = x[opFuncDecl-113] + _ = x[opMethodDecl-114] + _ = x[opFuncProtoDecl-115] + _ = x[opMethodProtoDecl-116] + _ = x[opDeclStmt-117] + _ = x[opConstDecl-118] + _ = x[opVarDecl-119] + _ = x[opTypeDecl-120] + _ = x[opAnyImportDecl-121] + _ = x[opImportDecl-122] + _ = x[opEmptyPackage-123] } -const _operation_name = "InvalidNodeNamedNodeNodeSeqNamedNodeSeqOptNodeNamedOptNodeFieldNodeNamedFieldNodeMultiStmtMultiExprMultiDeclEndBasicLitStrictIntLitStrictFloatLitStrictCharLitStrictStringLitStrictComplexLitIdentPkgIndexExprSliceExprSliceFromExprSliceToExprSliceFromToExprSliceToCapExprSliceFromToCapExprFuncLitCompositeLitTypedCompositeLitSimpleSelectorExprSelectorExprTypeAssertExprTypeSwitchAssertExprStructTypeInterfaceTypeVoidFuncTypeFuncTypeArrayTypeSliceTypeMapTypeChanTypeKeyValueExprEllipsisTypedEllipsisStarExprUnaryExprBinaryExprParenExprArgListSimpleArgListVariadicCallExprNonVariadicCallExprCallExprAssignStmtMultiAssignStmtBranchStmtSimpleLabeledBranchStmtLabeledBranchStmtSimpleLabeledStmtLabeledStmtBlockStmtExprStmtGoStmtDeferStmtSendStmtEmptyStmtIncDecStmtReturnStmtIfStmtIfInitStmtIfElseStmtIfInitElseStmtIfNamedOptStmtIfNamedOptElseStmtSwitchStmtSwitchTagStmtSwitchInitStmtSwitchInitTagStmtSelectStmtTypeSwitchStmtTypeSwitchInitStmtCaseClauseDefaultCaseClauseCommClauseDefaultCommClauseForStmtForPostStmtForCondStmtForCondPostStmtForInitStmtForInitPostStmtForInitCondStmtForInitCondPostStmtRangeStmtRangeKeyStmtRangeKeyValueStmtFieldListUnnamedFieldSimpleFieldFieldMultiFieldValueSpecValueInitSpecTypedValueInitSpecTypedValueSpecTypeSpecTypeAliasSpecFuncDeclMethodDeclFuncProtoDeclMethodProtoDeclDeclStmtConstDeclVarDeclTypeDeclAnyImportDeclImportDeclEmptyPackage" +const _operation_name = "InvalidNodeNamedNodeNodeSeqNamedNodeSeqOptNodeNamedOptNodeFieldNodeNamedFieldNodeMultiStmtMultiExprMultiDeclEndBasicLitStrictIntLitStrictFloatLitStrictCharLitStrictStringLitStrictComplexLitIdentPkgIndexExprSliceExprSliceFromExprSliceToExprSliceFromToExprSliceToCapExprSliceFromToCapExprFuncLitCompositeLitTypedCompositeLitSimpleSelectorExprSelectorExprTypeAssertExprTypeSwitchAssertExprStructTypeInterfaceTypeVoidFuncTypeFuncTypeArrayTypeSliceTypeMapTypeChanTypeKeyValueExprEllipsisTypedEllipsisStarExprUnaryExprBinaryExprParenExprArgListSimpleArgListVariadicCallExprNonVariadicCallExprCallExprAssignStmtMultiAssignStmtBranchStmtSimpleLabeledBranchStmtLabeledBranchStmtSimpleLabeledStmtLabeledStmtBlockStmtExprStmtGoStmtDeferStmtSendStmtEmptyStmtIncDecStmtReturnStmtIfStmtIfInitStmtIfElseStmtIfInitElseStmtIfNamedOptStmtIfNamedOptElseStmtSwitchStmtSwitchTagStmtSwitchInitStmtSwitchInitTagStmtSelectStmtTypeSwitchStmtTypeSwitchInitStmtCaseClauseDefaultCaseClauseCommClauseDefaultCommClauseForStmtForPostStmtForCondStmtForCondPostStmtForInitStmtForInitPostStmtForInitCondStmtForInitCondPostStmtRangeStmtRangeKeyStmtRangeKeyValueStmtRangeClauseRangeHeaderRangeKeyHeaderRangeKeyValueHeaderFieldListUnnamedFieldSimpleFieldFieldMultiFieldValueSpecValueInitSpecTypedValueInitSpecTypedValueSpecTypeSpecTypeAliasSpecFuncDeclMethodDeclFuncProtoDeclMethodProtoDeclDeclStmtConstDeclVarDeclTypeDeclAnyImportDeclImportDeclEmptyPackage" -var _operation_index = [...]uint16{0, 7, 11, 20, 27, 39, 46, 58, 67, 81, 90, 99, 108, 111, 119, 131, 145, 158, 173, 189, 194, 197, 206, 215, 228, 239, 254, 268, 286, 293, 305, 322, 340, 352, 366, 386, 396, 409, 421, 429, 438, 447, 454, 462, 474, 482, 495, 503, 512, 522, 531, 538, 551, 567, 586, 594, 604, 619, 629, 652, 669, 686, 697, 706, 714, 720, 729, 737, 746, 756, 766, 772, 782, 792, 806, 820, 838, 848, 861, 875, 892, 902, 916, 934, 944, 961, 971, 988, 995, 1006, 1017, 1032, 1043, 1058, 1073, 1092, 1101, 1113, 1130, 1139, 1151, 1162, 1167, 1177, 1186, 1199, 1217, 1231, 1239, 1252, 1260, 1270, 1283, 1298, 1306, 1315, 1322, 1330, 1343, 1353, 1365} +var _operation_index = [...]uint16{0, 7, 11, 20, 27, 39, 46, 58, 67, 81, 90, 99, 108, 111, 119, 131, 145, 158, 173, 189, 194, 197, 206, 215, 228, 239, 254, 268, 286, 293, 305, 322, 340, 352, 366, 386, 396, 409, 421, 429, 438, 447, 454, 462, 474, 482, 495, 503, 512, 522, 531, 538, 551, 567, 586, 594, 604, 619, 629, 652, 669, 686, 697, 706, 714, 720, 729, 737, 746, 756, 766, 772, 782, 792, 806, 820, 838, 848, 861, 875, 892, 902, 916, 934, 944, 961, 971, 988, 995, 1006, 1017, 1032, 1043, 1058, 1073, 1092, 1101, 1113, 1130, 1141, 1152, 1166, 1185, 1194, 1206, 1217, 1222, 1232, 1241, 1254, 1272, 1286, 1294, 1307, 1315, 1325, 1338, 1353, 1361, 1370, 1377, 1385, 1398, 1408, 1420} func (i operation) String() string { if i >= operation(len(_operation_index)-1) { diff --git a/vendor/github.com/quasilyte/gogrep/operations.gen.go b/vendor/github.com/quasilyte/gogrep/operations.gen.go index a07285cc3..8ff1fbeb7 100644 --- a/vendor/github.com/quasilyte/gogrep/operations.gen.go +++ b/vendor/github.com/quasilyte/gogrep/operations.gen.go @@ -451,102 +451,124 @@ const ( // Value: token.Token | ':=' or '=' opRangeKeyValueStmt operation = 97 + // Tag: RangeStmt + // Args: x + // Example: range x + opRangeClause operation = 98 + + // Tag: RangeStmt + // Args: x + // Example: for range x + opRangeHeader operation = 99 + + // Tag: RangeStmt + // Args: key x + // Example: for key := range x + // Value: token.Token | ':=' or '=' + opRangeKeyHeader operation = 100 + + // Tag: RangeStmt + // Args: key value x + // Example: for key, value := range x + // Value: token.Token | ':=' or '=' + opRangeKeyValueHeader operation = 101 + // Tag: Unknown // Args: fields... - opFieldList operation = 98 + opFieldList operation = 102 // Tag: Unknown // Args: typ // Example: type - opUnnamedField operation = 99 + opUnnamedField operation = 103 // Tag: Unknown // Args: typ // Example: name type // ValueIndex: strings | field name - opSimpleField operation = 100 + opSimpleField operation = 104 // Tag: Unknown // Args: name typ // Example: $name type - opField operation = 101 + opField operation = 105 // Tag: Unknown // Args: names... typ // Example: name1, name2 type - opMultiField operation = 102 + opMultiField operation = 106 // Tag: ValueSpec // Args: value - opValueSpec operation = 103 + opValueSpec operation = 107 // Tag: ValueSpec // Args: lhs... rhs... // Example: lhs = rhs - opValueInitSpec operation = 104 + opValueInitSpec operation = 108 // Tag: ValueSpec // Args: lhs... type rhs... // Example: lhs typ = rhs - opTypedValueInitSpec operation = 105 + opTypedValueInitSpec operation = 109 // Tag: ValueSpec // Args: lhs... type // Example: lhs typ - opTypedValueSpec operation = 106 + opTypedValueSpec operation = 110 // Tag: TypeSpec // Args: name type // Example: name type - opTypeSpec operation = 107 + opTypeSpec operation = 111 // Tag: TypeSpec // Args: name type // Example: name = type - opTypeAliasSpec operation = 108 + opTypeAliasSpec operation = 112 // Tag: FuncDecl // Args: name type block - opFuncDecl operation = 109 + opFuncDecl operation = 113 // Tag: FuncDecl // Args: recv name type block - opMethodDecl operation = 110 + opMethodDecl operation = 114 // Tag: FuncDecl // Args: name type - opFuncProtoDecl operation = 111 + opFuncProtoDecl operation = 115 // Tag: FuncDecl // Args: recv name type - opMethodProtoDecl operation = 112 + opMethodProtoDecl operation = 116 // Tag: DeclStmt // Args: decl - opDeclStmt operation = 113 + opDeclStmt operation = 117 // Tag: GenDecl // Args: valuespecs... - opConstDecl operation = 114 + opConstDecl operation = 118 // Tag: GenDecl // Args: valuespecs... - opVarDecl operation = 115 + opVarDecl operation = 119 // Tag: GenDecl // Args: typespecs... - opTypeDecl operation = 116 + opTypeDecl operation = 120 // Tag: GenDecl - opAnyImportDecl operation = 117 + opAnyImportDecl operation = 121 // Tag: GenDecl // Args: importspecs... - opImportDecl operation = 118 + opImportDecl operation = 122 // Tag: File // Args: name - opEmptyPackage operation = 119 + opEmptyPackage operation = 123 ) type operationInfo struct { @@ -1337,6 +1359,38 @@ var operationInfoTable = [256]operationInfo{ VariadicMap: 0, // 0 SliceIndex: -1, }, + opRangeClause: { + Tag: nodetag.RangeStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + SliceIndex: -1, + }, + opRangeHeader: { + Tag: nodetag.RangeStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + SliceIndex: -1, + }, + opRangeKeyHeader: { + Tag: nodetag.RangeStmt, + NumArgs: 2, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + SliceIndex: -1, + }, + opRangeKeyValueHeader: { + Tag: nodetag.RangeStmt, + NumArgs: 3, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + SliceIndex: -1, + }, opFieldList: { Tag: nodetag.Unknown, NumArgs: 1, diff --git a/vendor/github.com/quasilyte/gogrep/parse.go b/vendor/github.com/quasilyte/gogrep/parse.go index e3d17c8ce..f70c4a8f4 100644 --- a/vendor/github.com/quasilyte/gogrep/parse.go +++ b/vendor/github.com/quasilyte/gogrep/parse.go @@ -46,7 +46,7 @@ func parseExpr(fset *token.FileSet, expr string) (ast.Node, error) { if err != nil { return nil, err } - node, _, err := parseDetectingNode(fset, exprStr) + node, err := parseDetectingNode(fset, exprStr) if err != nil { err = subPosOffsets(err, offs...) return nil, fmt.Errorf("cannot parse expr: %v", err) @@ -128,15 +128,33 @@ func parseType(fset *token.FileSet, src string) (ast.Expr, *ast.File, error) { // one of: *ast.File, ast.Decl, ast.Expr, ast.Stmt, *ast.ValueSpec. // It also returns the *ast.File used for the parsing, so that the returned node // can be easily type-checked. -func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, error) { +func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, error) { file := fset.AddFile("", fset.Base(), len(src)) scan := scanner.Scanner{} scan.Init(file, []byte(src), nil, 0) if _, tok, _ := scan.Scan(); tok == token.EOF { - return nil, nil, fmt.Errorf("empty source code") + return nil, fmt.Errorf("empty source code") } var mainErr error + // Some adhoc patterns first. + if strings.HasPrefix(src, "range ") { + e, err := parser.ParseExpr(src[len("range "):]) + if err == nil && noBadNodes(e) { + return &rangeClause{X: e}, nil + } + } + if strings.HasPrefix(src, "for ") && !strings.HasSuffix(src, "}") { + asStmts := execTmpl(tmplStmts, src+"{}") + f, err := parser.ParseFile(fset, "", asStmts, 0) + if err == nil && noBadNodes(f) { + bl := f.Decls[0].(*ast.FuncDecl).Body + if len(bl.List) == 1 { + return &rangeHeader{Node: bl.List[0].(*ast.RangeStmt)}, nil + } + } + } + // try as a block; otherwise blocks might be mistaken for composite // literals further below asBlock := execTmpl(tmplBlock, src) @@ -144,7 +162,7 @@ func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, e bl := f.Decls[0].(*ast.FuncDecl).Body if len(bl.List) == 1 { ifs := bl.List[0].(*ast.IfStmt) - return ifs.Body, f, nil + return ifs.Body, nil } } @@ -154,9 +172,9 @@ func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, e vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) cl := vs.Values[0].(*ast.CompositeLit) if len(cl.Elts) == 1 { - return cl.Elts[0], f, nil + return cl.Elts[0], nil } - return ExprSlice(cl.Elts), f, nil + return ExprSlice(cl.Elts), nil } // then try as statements @@ -165,9 +183,9 @@ func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, e if err == nil && noBadNodes(f) { bl := f.Decls[0].(*ast.FuncDecl).Body if len(bl.List) == 1 { - return bl.List[0], f, nil + return bl.List[0], nil } - return stmtSlice(bl.List), f, nil + return stmtSlice(bl.List), nil } // Statements is what covers most cases, so it will give // the best overall error message. Show positions @@ -179,29 +197,32 @@ func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, e asDecl := execTmpl(tmplDecl, src) if f, err := parser.ParseFile(fset, "", asDecl, 0); err == nil && noBadNodes(f) { if len(f.Decls) == 1 { - return f.Decls[0], f, nil + return f.Decls[0], nil } - return declSlice(f.Decls), f, nil + return declSlice(f.Decls), nil } // try as a whole file if f, err := parser.ParseFile(fset, "", src, 0); err == nil && noBadNodes(f) { - return f, f, nil + return f, nil } // type expressions not yet picked up, for e.g. chans and interfaces if typ, f, err := parseType(fset, src); err == nil && noBadNodes(f) { - return typ, f, nil + return typ, nil } // value specs asValSpec := execTmpl(tmplValSpec, src) if f, err := parser.ParseFile(fset, "", asValSpec, 0); err == nil && noBadNodes(f) { - vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) - return vs, f, nil + decl := f.Decls[0].(*ast.GenDecl) + if len(decl.Specs) != 0 { + vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) + return vs, nil + } } - return nil, nil, mainErr + return nil, mainErr } type posOffset struct { @@ -360,3 +381,17 @@ func decodeWildNode(n ast.Node) varInfo { } return varInfo{} } + +type rangeClause struct { + X ast.Expr +} + +type rangeHeader struct { + Node *ast.RangeStmt +} + +func (*rangeClause) Pos() token.Pos { return 0 } +func (*rangeClause) End() token.Pos { return 0 } + +func (*rangeHeader) Pos() token.Pos { return 0 } +func (*rangeHeader) End() token.Pos { return 0 } diff --git a/vendor/github.com/quasilyte/stdinfo/LICENSE b/vendor/github.com/quasilyte/stdinfo/LICENSE new file mode 100644 index 000000000..87a453862 --- /dev/null +++ b/vendor/github.com/quasilyte/stdinfo/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Iskander (Alex) Sharipov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/quasilyte/stdinfo/stdinfo.go b/vendor/github.com/quasilyte/stdinfo/stdinfo.go new file mode 100644 index 000000000..040f63445 --- /dev/null +++ b/vendor/github.com/quasilyte/stdinfo/stdinfo.go @@ -0,0 +1,30 @@ +package stdinfo + +type Package struct { + // Name is a package name. + // For "encoding/json" the package name is "json". + Name string + + // Path is a package path, like "encoding/json". + Path string + + // Freq is a package import frequency approximation. + // A value of -1 means "unknown". + Freq int +} + +// PathByName maps a std package name to its package path. +// +// For packages with multiple choices, like "template", +// only the more common one is accessible ("text/template" in this case). +// +// This map doesn't contain extremely rare packages either. +// Use PackageList variable if you want to construct a different mapping. +// +// It's exported as map to make it easier to re-use it in libraries +// without copying. +var PathByName = generatedPathByName + +// PackagesList is a list of std packages information. +// It's sorted by a package name. +var PackagesList = generatedPackagesList diff --git a/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go b/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go new file mode 100644 index 000000000..ecfff9b6c --- /dev/null +++ b/vendor/github.com/quasilyte/stdinfo/stdinfo_gen.go @@ -0,0 +1,274 @@ +// Code generated by "script/gen.go"; DO NOT EDIT. + +package stdinfo + +var generatedPathByName = map[string]string{ + "fmt": "fmt", // Freq=15795 + "testing": "testing", // Freq=12807 + "context": "context", // Freq=10797 + "time": "time", // Freq=8900 + "strings": "strings", // Freq=8852 + "os": "os", // Freq=5712 + "bytes": "bytes", // Freq=4129 + "io": "io", // Freq=3981 + "http": "net/http", // Freq=3691 + "sync": "sync", // Freq=3492 + "errors": "errors", // Freq=3107 + "strconv": "strconv", // Freq=3076 + "reflect": "reflect", // Freq=3025 + "filepath": "path/filepath", // Freq=2843 + "json": "encoding/json", // Freq=2537 + "sort": "sort", // Freq=2382 + "ioutil": "io/ioutil", // Freq=2164 + "net": "net", // Freq=2025 + "math": "math", // Freq=1746 + "url": "net/url", // Freq=1411 + "regexp": "regexp", // Freq=1320 + "runtime": "runtime", // Freq=1318 + "log": "log", // Freq=1149 + "flag": "flag", // Freq=1002 + "path": "path", // Freq=993 + "unsafe": "unsafe", // Freq=992 + "rand": "math/rand", // Freq=981 + "syscall": "syscall", // Freq=902 + "atomic": "sync/atomic", // Freq=804 + "bufio": "bufio", // Freq=695 + "httptest": "net/http/httptest", // Freq=676 + "exec": "os/exec", // Freq=676 + "binary": "encoding/binary", // Freq=476 + "tls": "crypto/tls", // Freq=475 + "token": "go/token", // Freq=471 + "utf8": "unicode/utf8", // Freq=404 + "base64": "encoding/base64", // Freq=383 + "ast": "go/ast", // Freq=373 + "x509": "crypto/x509", // Freq=357 + "hex": "encoding/hex", // Freq=340 + "unicode": "unicode", // Freq=309 + "types": "go/types", // Freq=309 + "big": "math/big", // Freq=230 + "sha256": "crypto/sha256", // Freq=227 + "template": "text/template", // Freq=211 + "fs": "io/fs", // Freq=162 + "parser": "go/parser", // Freq=160 + "sql": "database/sql", // Freq=157 + "gzip": "compress/gzip", // Freq=150 + "signal": "os/signal", // Freq=139 + "pem": "encoding/pem", // Freq=137 + "hash": "hash", // Freq=137 + "crypto": "crypto", // Freq=132 + "build": "go/build", // Freq=121 + "debug": "runtime/debug", // Freq=121 + "bits": "math/bits", // Freq=120 + "constant": "go/constant", // Freq=120 + "xml": "encoding/xml", // Freq=118 + "tabwriter": "text/tabwriter", // Freq=116 + "md5": "crypto/md5", // Freq=110 + "rsa": "crypto/rsa", // Freq=103 + "format": "go/format", // Freq=88 + "sha1": "crypto/sha1", // Freq=85 + "driver": "database/sql/driver", // Freq=81 + "pkix": "crypto/x509/pkix", // Freq=80 + "heap": "container/heap", // Freq=78 + "tar": "archive/tar", // Freq=77 + "ecdsa": "crypto/ecdsa", // Freq=75 + "cipher": "crypto/cipher", // Freq=74 + "crc32": "hash/crc32", // Freq=70 + "gob": "encoding/gob", // Freq=65 + "elliptic": "crypto/elliptic", // Freq=60 + "subtle": "crypto/subtle", // Freq=54 + "zip": "archive/zip", // Freq=54 + "aes": "crypto/aes", // Freq=53 + "mime": "mime", // Freq=51 + "pprof": "runtime/pprof", // Freq=47 + "textproto": "net/textproto", // Freq=47 + "image": "image", // Freq=45 + "fnv": "hash/fnv", // Freq=45 + "hmac": "crypto/hmac", // Freq=45 + "httputil": "net/http/httputil", // Freq=44 + "elf": "debug/elf", // Freq=44 + "encoding": "encoding", // Freq=41 + "sha512": "crypto/sha512", // Freq=41 + "cmplx": "math/cmplx", // Freq=40 + "color": "image/color", // Freq=38 + "html": "html", // Freq=37 + "expvar": "expvar", // Freq=34 + "embed": "embed", // Freq=32 + "csv": "encoding/csv", // Freq=31 + "importer": "go/importer", // Freq=31 + "multipart": "mime/multipart", // Freq=30 + "printer": "go/printer", // Freq=27 + "syslog": "log/syslog", // Freq=27 + "asn1": "encoding/asn1", // Freq=27 + "list": "container/list", // Freq=27 + "scanner": "go/scanner", // Freq=25 + "ed25519": "crypto/ed25519", // Freq=25 + "dwarf": "debug/dwarf", // Freq=23 + "flate": "compress/flate", // Freq=22 + "zlib": "compress/zlib", // Freq=21 + "png": "image/png", // Freq=20 + "trace": "runtime/trace", // Freq=20 + "httptrace": "net/http/httptrace", // Freq=19 + "utf16": "unicode/utf16", // Freq=19 + "rpc": "net/rpc", // Freq=19 + "macho": "debug/macho", // Freq=16 + "iotest": "testing/iotest", // Freq=15 + "dsa": "crypto/dsa", // Freq=13 + "parse": "text/template/parse", // Freq=13 + "cookiejar": "net/http/cookiejar", // Freq=12 + "fstest": "testing/fstest", // Freq=11 + "jpeg": "image/jpeg", // Freq=11 +} + +var generatedPackagesList = []Package{ + {Name: "adler32", Path: "hash/adler32", Freq: 7}, + {Name: "aes", Path: "crypto/aes", Freq: 53}, + {Name: "ascii85", Path: "encoding/ascii85", Freq: -1}, + {Name: "asn1", Path: "encoding/asn1", Freq: 27}, + {Name: "ast", Path: "go/ast", Freq: 373}, + {Name: "atomic", Path: "sync/atomic", Freq: 804}, + {Name: "base32", Path: "encoding/base32", Freq: 5}, + {Name: "base64", Path: "encoding/base64", Freq: 383}, + {Name: "big", Path: "math/big", Freq: 230}, + {Name: "binary", Path: "encoding/binary", Freq: 476}, + {Name: "bits", Path: "math/bits", Freq: 120}, + {Name: "bufio", Path: "bufio", Freq: 695}, + {Name: "build", Path: "go/build", Freq: 121}, + {Name: "bytes", Path: "bytes", Freq: 4129}, + {Name: "bzip2", Path: "compress/bzip2", Freq: 7}, + {Name: "cgi", Path: "net/http/cgi", Freq: 1}, + {Name: "cgo", Path: "runtime/cgo", Freq: -1}, + {Name: "cipher", Path: "crypto/cipher", Freq: 74}, + {Name: "cmplx", Path: "math/cmplx", Freq: 40}, + {Name: "color", Path: "image/color", Freq: 38}, + {Name: "constant", Path: "go/constant", Freq: 120}, + {Name: "constraint", Path: "go/build/constraint", Freq: 5}, + {Name: "context", Path: "context", Freq: 10797}, + {Name: "cookiejar", Path: "net/http/cookiejar", Freq: 12}, + {Name: "crc32", Path: "hash/crc32", Freq: 70}, + {Name: "crc64", Path: "hash/crc64", Freq: 3}, + {Name: "crypto", Path: "crypto", Freq: 132}, + {Name: "csv", Path: "encoding/csv", Freq: 31}, + {Name: "debug", Path: "runtime/debug", Freq: 121}, + {Name: "des", Path: "crypto/des", Freq: 8}, + {Name: "doc", Path: "go/doc", Freq: 15}, + {Name: "draw", Path: "image/draw", Freq: 7}, + {Name: "driver", Path: "database/sql/driver", Freq: 81}, + {Name: "dsa", Path: "crypto/dsa", Freq: 13}, + {Name: "dwarf", Path: "debug/dwarf", Freq: 23}, + {Name: "ecdsa", Path: "crypto/ecdsa", Freq: 75}, + {Name: "ed25519", Path: "crypto/ed25519", Freq: 25}, + {Name: "elf", Path: "debug/elf", Freq: 44}, + {Name: "elliptic", Path: "crypto/elliptic", Freq: 60}, + {Name: "embed", Path: "embed", Freq: 32}, + {Name: "encoding", Path: "encoding", Freq: 41}, + {Name: "errors", Path: "errors", Freq: 3107}, + {Name: "exec", Path: "os/exec", Freq: 676}, + {Name: "expvar", Path: "expvar", Freq: 34}, + {Name: "fcgi", Path: "net/http/fcgi", Freq: 2}, + {Name: "filepath", Path: "path/filepath", Freq: 2843}, + {Name: "flag", Path: "flag", Freq: 1002}, + {Name: "flate", Path: "compress/flate", Freq: 22}, + {Name: "fmt", Path: "fmt", Freq: 15795}, + {Name: "fnv", Path: "hash/fnv", Freq: 45}, + {Name: "format", Path: "go/format", Freq: 88}, + {Name: "fs", Path: "io/fs", Freq: 162}, + {Name: "fstest", Path: "testing/fstest", Freq: 11}, + {Name: "gif", Path: "image/gif", Freq: 5}, + {Name: "gob", Path: "encoding/gob", Freq: 65}, + {Name: "gosym", Path: "debug/gosym", Freq: 3}, + {Name: "gzip", Path: "compress/gzip", Freq: 150}, + {Name: "hash", Path: "hash", Freq: 137}, + {Name: "heap", Path: "container/heap", Freq: 78}, + {Name: "hex", Path: "encoding/hex", Freq: 340}, + {Name: "hmac", Path: "crypto/hmac", Freq: 45}, + {Name: "html", Path: "html", Freq: 37}, + {Name: "http", Path: "net/http", Freq: 3691}, + {Name: "httptest", Path: "net/http/httptest", Freq: 676}, + {Name: "httptrace", Path: "net/http/httptrace", Freq: 19}, + {Name: "httputil", Path: "net/http/httputil", Freq: 44}, + {Name: "image", Path: "image", Freq: 45}, + {Name: "importer", Path: "go/importer", Freq: 31}, + {Name: "io", Path: "io", Freq: 3981}, + {Name: "iotest", Path: "testing/iotest", Freq: 15}, + {Name: "ioutil", Path: "io/ioutil", Freq: 2164}, + {Name: "jpeg", Path: "image/jpeg", Freq: 11}, + {Name: "json", Path: "encoding/json", Freq: 2537}, + {Name: "jsonrpc", Path: "net/rpc/jsonrpc", Freq: -1}, + {Name: "list", Path: "container/list", Freq: 27}, + {Name: "log", Path: "log", Freq: 1149}, + {Name: "lzw", Path: "compress/lzw", Freq: 3}, + {Name: "macho", Path: "debug/macho", Freq: 16}, + {Name: "mail", Path: "net/mail", Freq: 7}, + {Name: "maphash", Path: "hash/maphash", Freq: 1}, + {Name: "math", Path: "math", Freq: 1746}, + {Name: "md5", Path: "crypto/md5", Freq: 110}, + {Name: "metrics", Path: "runtime/metrics", Freq: 3}, + {Name: "mime", Path: "mime", Freq: 51}, + {Name: "multipart", Path: "mime/multipart", Freq: 30}, + {Name: "net", Path: "net", Freq: 2025}, + {Name: "os", Path: "os", Freq: 5712}, + {Name: "palette", Path: "image/color/palette", Freq: 4}, + {Name: "parse", Path: "text/template/parse", Freq: 13}, + {Name: "parser", Path: "go/parser", Freq: 160}, + {Name: "path", Path: "path", Freq: 993}, + {Name: "pe", Path: "debug/pe", Freq: 12}, + {Name: "pem", Path: "encoding/pem", Freq: 137}, + {Name: "pkix", Path: "crypto/x509/pkix", Freq: 80}, + {Name: "plan9obj", Path: "debug/plan9obj", Freq: 1}, + {Name: "plugin", Path: "plugin", Freq: 4}, + {Name: "png", Path: "image/png", Freq: 20}, + {Name: "pprof", Path: "runtime/pprof", Freq: 47}, + {Name: "pprof", Path: "net/http/pprof", Freq: 33}, + {Name: "printer", Path: "go/printer", Freq: 27}, + {Name: "quick", Path: "testing/quick", Freq: 51}, + {Name: "quotedprintable", Path: "mime/quotedprintable", Freq: 2}, + {Name: "race", Path: "runtime/race", Freq: -1}, + {Name: "rand", Path: "math/rand", Freq: 981}, + {Name: "rand", Path: "crypto/rand", Freq: 256}, + {Name: "rc4", Path: "crypto/rc4", Freq: 3}, + {Name: "reflect", Path: "reflect", Freq: 3025}, + {Name: "regexp", Path: "regexp", Freq: 1320}, + {Name: "ring", Path: "container/ring", Freq: 2}, + {Name: "rpc", Path: "net/rpc", Freq: 19}, + {Name: "rsa", Path: "crypto/rsa", Freq: 103}, + {Name: "runtime", Path: "runtime", Freq: 1318}, + {Name: "scanner", Path: "text/scanner", Freq: 23}, + {Name: "scanner", Path: "go/scanner", Freq: 25}, + {Name: "sha1", Path: "crypto/sha1", Freq: 85}, + {Name: "sha256", Path: "crypto/sha256", Freq: 227}, + {Name: "sha512", Path: "crypto/sha512", Freq: 41}, + {Name: "signal", Path: "os/signal", Freq: 139}, + {Name: "smtp", Path: "net/smtp", Freq: 6}, + {Name: "sort", Path: "sort", Freq: 2382}, + {Name: "sql", Path: "database/sql", Freq: 157}, + {Name: "strconv", Path: "strconv", Freq: 3076}, + {Name: "strings", Path: "strings", Freq: 8852}, + {Name: "subtle", Path: "crypto/subtle", Freq: 54}, + {Name: "suffixarray", Path: "index/suffixarray", Freq: 2}, + {Name: "sync", Path: "sync", Freq: 3492}, + {Name: "syntax", Path: "regexp/syntax", Freq: 11}, + {Name: "syscall", Path: "syscall", Freq: 902}, + {Name: "syslog", Path: "log/syslog", Freq: 27}, + {Name: "tabwriter", Path: "text/tabwriter", Freq: 116}, + {Name: "tar", Path: "archive/tar", Freq: 77}, + {Name: "template", Path: "html/template", Freq: 173}, + {Name: "template", Path: "text/template", Freq: 211}, + {Name: "testing", Path: "testing", Freq: 12807}, + {Name: "textproto", Path: "net/textproto", Freq: 47}, + {Name: "time", Path: "time", Freq: 8900}, + {Name: "tls", Path: "crypto/tls", Freq: 475}, + {Name: "token", Path: "go/token", Freq: 471}, + {Name: "trace", Path: "runtime/trace", Freq: 20}, + {Name: "types", Path: "go/types", Freq: 309}, + {Name: "tzdata", Path: "time/tzdata", Freq: 6}, + {Name: "unicode", Path: "unicode", Freq: 309}, + {Name: "unsafe", Path: "unsafe", Freq: 992}, + {Name: "url", Path: "net/url", Freq: 1411}, + {Name: "user", Path: "os/user", Freq: 51}, + {Name: "utf16", Path: "unicode/utf16", Freq: 19}, + {Name: "utf8", Path: "unicode/utf8", Freq: 404}, + {Name: "x509", Path: "crypto/x509", Freq: 357}, + {Name: "xml", Path: "encoding/xml", Freq: 118}, + {Name: "zip", Path: "archive/zip", Freq: 54}, + {Name: "zlib", Path: "compress/zlib", Freq: 21}, +} diff --git a/vendor/github.com/securego/gosec/v2/Makefile b/vendor/github.com/securego/gosec/v2/Makefile index 4cb90c014..5dbfd7764 100644 --- a/vendor/github.com/securego/gosec/v2/Makefile +++ b/vendor/github.com/securego/gosec/v2/Makefile @@ -12,7 +12,7 @@ GOBIN ?= $(GOPATH)/bin GOLINT ?= $(GOBIN)/golint GOSEC ?= $(GOBIN)/gosec GINKGO ?= $(GOBIN)/ginkgo -GO_VERSION = 1.17 +GO_VERSION = 1.18 default: $(MAKE) build @@ -31,12 +31,16 @@ fmt: @([ ! -z "$(FORMATTED)" ] && printf "Fixed unformatted files:\n$(FORMATTED)") || true lint: - @echo "LINTING" + @echo "LINTING: golint" $(GO_NOMOD) get -u golang.org/x/lint/golint $(GOLINT) -set_exit_status ./... @echo "VETTING" $(GO) vet ./... +golangci: + @echo "LINTING: golangci-lint" + golangci-lint run + sec: @echo "SECURITY SCANNING" ./$(BIN) ./... diff --git a/vendor/github.com/securego/gosec/v2/README.md b/vendor/github.com/securego/gosec/v2/README.md index 772e124f8..817b59c19 100644 --- a/vendor/github.com/securego/gosec/v2/README.md +++ b/vendor/github.com/securego/gosec/v2/README.md @@ -143,6 +143,7 @@ directory you can supply `./...` as the input argument. - G108: Profiling endpoint automatically exposed on /debug/pprof - G109: Potential Integer overflow made by strconv.Atoi result conversion to int16/32 - G110: Potential DoS vulnerability via decompression bomb +- G111: Potential directory traversal - G201: SQL query construction using format string - G202: SQL query construction using string concatenation - G203: Use of unescaped data in HTML templates diff --git a/vendor/github.com/securego/gosec/v2/issue.go b/vendor/github.com/securego/gosec/v2/issue.go index f0f028e10..ecda70737 100644 --- a/vendor/github.com/securego/gosec/v2/issue.go +++ b/vendor/github.com/securego/gosec/v2/issue.go @@ -63,6 +63,7 @@ var ruleToCWE = map[string]string{ "G108": "200", "G109": "190", "G110": "409", + "G111": "22", "G201": "89", "G202": "89", "G203": "79", diff --git a/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go b/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go new file mode 100644 index 000000000..04237faae --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/directory-traversal.go @@ -0,0 +1,64 @@ +package rules + +import ( + "go/ast" + "regexp" + + "github.com/securego/gosec/v2" +) + +type traversal struct { + pattern *regexp.Regexp + gosec.MetaData +} + +func (r *traversal) ID() string { + return r.MetaData.ID +} + +func (r *traversal) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + switch node := n.(type) { + case *ast.CallExpr: + return r.matchCallExpr(node, ctx) + } + return nil, nil +} + +func (r *traversal) matchCallExpr(assign *ast.CallExpr, ctx *gosec.Context) (*gosec.Issue, error) { + for _, i := range assign.Args { + if basiclit, ok1 := i.(*ast.BasicLit); ok1 { + if fun, ok2 := assign.Fun.(*ast.SelectorExpr); ok2 { + if x, ok3 := fun.X.(*ast.Ident); ok3 { + string := x.Name + "." + fun.Sel.Name + "(" + basiclit.Value + ")" + if r.pattern.MatchString(string) { + return gosec.NewIssue(ctx, assign, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + } + } + return nil, nil +} + +// NewDirectoryTraversal attempts to find the use of http.Dir("/") +func NewDirectoryTraversal(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + pattern := `http\.Dir\("\/"\)|http\.Dir\('\/'\)` + if val, ok := conf["G101"]; ok { + conf := val.(map[string]interface{}) + if configPattern, ok := conf["pattern"]; ok { + if cfgPattern, ok := configPattern.(string); ok { + pattern = cfgPattern + } + } + } + + return &traversal{ + pattern: regexp.MustCompile(pattern), + MetaData: gosec.MetaData{ + ID: id, + What: "Potential directory traversal", + Confidence: gosec.Medium, + Severity: gosec.Medium, + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/rulelist.go b/vendor/github.com/securego/gosec/v2/rules/rulelist.go index dc9f14916..bbe72fafb 100644 --- a/vendor/github.com/securego/gosec/v2/rules/rulelist.go +++ b/vendor/github.com/securego/gosec/v2/rules/rulelist.go @@ -73,6 +73,7 @@ func Generate(trackSuppressions bool, filters ...RuleFilter) RuleList { {"G108", "Profiling endpoint is automatically exposed", NewPprofCheck}, {"G109", "Converting strconv.Atoi result to int32/int16", NewIntegerOverflowCheck}, {"G110", "Detect io.Copy instead of io.CopyN when decompression", NewDecompressionBombCheck}, + {"G111", "Detect http.Dir('/') as a potential risk", NewDirectoryTraversal}, // injection {"G201", "SQL query construction using format string", NewSQLStrFormat}, diff --git a/vendor/github.com/sivchari/tenv/.gitignore b/vendor/github.com/sivchari/tenv/.gitignore index 66fd13c90..83470100f 100644 --- a/vendor/github.com/sivchari/tenv/.gitignore +++ b/vendor/github.com/sivchari/tenv/.gitignore @@ -11,5 +11,7 @@ # Output of the go coverage tool, specifically when used with LiteIDE *.out +.idea + # Dependency directories (remove the comment below to include it) # vendor/ diff --git a/vendor/github.com/sivchari/tenv/README.md b/vendor/github.com/sivchari/tenv/README.md index 120408940..c5d004773 100644 --- a/vendor/github.com/sivchari/tenv/README.md +++ b/vendor/github.com/sivchari/tenv/README.md @@ -1,8 +1,12 @@ # tenv -tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17 + +![tenv Gopher](./tenv.png "Gopher") + [![test_and_lint](https://github.com/sivchari/tenv/actions/workflows/workflows.yml/badge.svg?branch=main)](https://github.com/sivchari/tenv/actions/workflows/workflows.yml) +tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17 + ## Instruction ```sh @@ -12,79 +16,70 @@ go install github.com/sivchari/tenv/cmd/tenv ## Usage ```go -package sandbox_test +package main import ( - "os" - "testing" + "fmt" + "os" + "testing" ) -var ( - e = os.Setenv("a", "b") - _ = e -) +func TestMain(t *testing.T) { + fmt.Println(os.Getenv("GO")) + os.Setenv("GO", "HACKING GOPHER") +} -func setup() { - os.Setenv("a", "b") - err := os.Setenv("a", "b") - if err != nil { - _ = err - } +func TestMain2(t *testing.T) { + fmt.Println(os.Getenv("GO")) } -func TestF(t *testing.T) { - setup() - os.Setenv("a", "b") - if err := os.Setenv("a", "b"); err != nil { - _ = err - } +func helper() { + os.Setenv("GO", "HACKING GOPHER") } ``` -### fish - ```console -go vet -vettool=(which tenv) sandbox_test.go - -# command-line-arguments -./sandbox_test.go:9:2: variable e is not using t.Setenv -./sandbox_test.go:14:2: func setup is not using t.Setenv -./sandbox_test.go:15:2: func setup is not using t.Setenv -./sandbox_test.go:23:2: func TestF is not using t.Setenv -./sandbox_test.go:24:2: func TestF is not using t.Setenv -``` +go vet -vettool=(which tenv) ./... -### bash - -```console -$ go vet -vettool=`which tenv` main.go - -# command-line-arguments -./sandbox_test.go:9:2: variable e is not using t.Setenv -./sandbox_test.go:14:2: func setup is not using t.Setenv -./sandbox_test.go:15:2: func setup is not using t.Setenv -./sandbox_test.go:23:2: func TestF is not using t.Setenv -./sandbox_test.go:24:2: func TestF is not using t.Setenv +# a +./main_test.go:11:2: os.Setenv() can be replaced by `t.Setenv()` in TestMain ``` ### option -t.Setenv can use since Go1.17. -This linter diagnostics, if Go version is since 1.17. -But, if you wanna exec this linter in prior Go1.17, you can use it that you set `-tenv.f` flag. +The option `all` will run against whole test files (`_test.go`) regardless of method/function signatures. -e.g. +By default, only methods that take `*testing.T`, `*testing.B`, and `testing.TB` as arguments are checked. -### fish +```go +package main -```console -go vet -vettool=(which tenv) -tenv.f sandbox_test.go -``` +import ( + "fmt" + "os" + "testing" +) -### bash +func TestMain(t *testing.T) { + fmt.Println(os.Getenv("GO")) + os.Setenv("GO", "HACKING GOPHER") +} + +func TestMain2(t *testing.T) { + fmt.Println(os.Getenv("GO")) +} + +func helper() { + os.Setenv("GO", "HACKING GOPHER") +} +``` ```console -go vet -vettool=`which tenv` -tenv.f main.go +go vet -vettool=(which tenv) -tenv.all ./... + +# a +./main_test.go:11:2: os.Setenv() can be replaced by `t.Setenv()` in TestMain +./main_test.go:19:2: os.Setenv() can be replaced by `testing.Setenv()` in helper ``` ## CI @@ -93,20 +88,20 @@ go vet -vettool=`which tenv` -tenv.f main.go ```yaml - run: - name: Install tenv + name: install tenv command: go install github.com/sivchari/tenv - run: - name: Run tenv + name: run tenv command: go vet -vettool=`which tenv` ./... ``` ### GitHub Actions ```yaml -- name: Install tenv +- name: install tenv run: go install github.com/sivchari/tenv -- name: Run tenv +- name: run tenv run: go vet -vettool=`which tenv` ./... ``` diff --git a/vendor/github.com/sivchari/tenv/tenv.go b/vendor/github.com/sivchari/tenv/tenv.go index 9dac6a411..9cdeb69bf 100644 --- a/vendor/github.com/sivchari/tenv/tenv.go +++ b/vendor/github.com/sivchari/tenv/tenv.go @@ -182,7 +182,7 @@ func checkStarExprTarget(typ *ast.StarExpr) bool { } targetName := x.Name + "." + selector.Sel.Name switch targetName { - case "testing.T", "testing.B": + case "testing.T", "testing.B", "testing.F": return true default: return false diff --git a/vendor/github.com/sivchari/tenv/tenv.png b/vendor/github.com/sivchari/tenv/tenv.png new file mode 100644 index 000000000..96dc967e3 Binary files /dev/null and b/vendor/github.com/sivchari/tenv/tenv.png differ diff --git a/vendor/github.com/spf13/afero/README.md b/vendor/github.com/spf13/afero/README.md index fb8eaaf89..cab257f56 100644 --- a/vendor/github.com/spf13/afero/README.md +++ b/vendor/github.com/spf13/afero/README.md @@ -79,11 +79,11 @@ would. So if my application before had: ```go -os.Open('/tmp/foo') +os.Open("/tmp/foo") ``` We would replace it with: ```go -AppFs.Open('/tmp/foo') +AppFs.Open("/tmp/foo") ``` `AppFs` being the variable we defined above. @@ -259,6 +259,18 @@ system using InMemoryFile. Afero has experimental support for secure file transfer protocol (sftp). Which can be used to perform file operations over a encrypted channel. +### GCSFs + +Afero has experimental support for Google Cloud Storage (GCS). You can either set the +`GOOGLE_APPLICATION_CREDENTIALS_JSON` env variable to your JSON credentials or use `opts` in +`NewGcsFS` to configure access to your GCS bucket. + +Some known limitations of the existing implementation: +* No Chmod support - The GCS ACL could probably be mapped to *nix style permissions but that would add another level of complexity and is ignored in this version. +* No Chtimes support - Could be simulated with attributes (gcs a/m-times are set implicitly) but that's is left for another version. +* Not thread safe - Also assumes all file operations are done through the same instance of the GcsFs. File operations between different GcsFs instances are not guaranteed to be consistent. + + ## Filtering Backends ### BasePathFs diff --git a/vendor/github.com/spf13/afero/cacheOnReadFs.go b/vendor/github.com/spf13/afero/cacheOnReadFs.go index 71471aa25..017d344fd 100644 --- a/vendor/github.com/spf13/afero/cacheOnReadFs.go +++ b/vendor/github.com/spf13/afero/cacheOnReadFs.go @@ -75,6 +75,10 @@ func (u *CacheOnReadFs) copyToLayer(name string) error { return copyToLayer(u.base, u.layer, name) } +func (u *CacheOnReadFs) copyFileToLayer(name string, flag int, perm os.FileMode) error { + return copyFileToLayer(u.base, u.layer, name, flag, perm) +} + func (u *CacheOnReadFs) Chtimes(name string, atime, mtime time.Time) error { st, _, err := u.cacheStatus(name) if err != nil { @@ -212,7 +216,7 @@ func (u *CacheOnReadFs) OpenFile(name string, flag int, perm os.FileMode) (File, switch st { case cacheLocal, cacheHit: default: - if err := u.copyToLayer(name); err != nil { + if err := u.copyFileToLayer(name, flag, perm); err != nil { return nil, err } } diff --git a/vendor/github.com/spf13/afero/mem/file.go b/vendor/github.com/spf13/afero/mem/file.go index 5a20730c2..5ef8b6a39 100644 --- a/vendor/github.com/spf13/afero/mem/file.go +++ b/vendor/github.com/spf13/afero/mem/file.go @@ -71,7 +71,7 @@ func CreateFile(name string) *FileData { } func CreateDir(name string) *FileData { - return &FileData{name: name, memDir: &DirMap{}, dir: true} + return &FileData{name: name, memDir: &DirMap{}, dir: true, modtime: time.Now()} } func ChangeFileName(f *FileData, newname string) { diff --git a/vendor/github.com/spf13/afero/memmap.go b/vendor/github.com/spf13/afero/memmap.go index 5c265f92b..ea0798d87 100644 --- a/vendor/github.com/spf13/afero/memmap.go +++ b/vendor/github.com/spf13/afero/memmap.go @@ -279,7 +279,7 @@ func (m *MemMapFs) RemoveAll(path string) error { defer m.mu.RUnlock() for p := range m.getData() { - if strings.HasPrefix(p, path) { + if p == path || strings.HasPrefix(p, path+FilePathSeparator) { m.mu.RUnlock() m.mu.Lock() delete(m.getData(), p) diff --git a/vendor/github.com/spf13/afero/unionFile.go b/vendor/github.com/spf13/afero/unionFile.go index 985363eea..34f99a40c 100644 --- a/vendor/github.com/spf13/afero/unionFile.go +++ b/vendor/github.com/spf13/afero/unionFile.go @@ -268,13 +268,7 @@ func (f *UnionFile) WriteString(s string) (n int, err error) { return 0, BADFD } -func copyToLayer(base Fs, layer Fs, name string) error { - bfh, err := base.Open(name) - if err != nil { - return err - } - defer bfh.Close() - +func copyFile(base Fs, layer Fs, name string, bfh File) error { // First make sure the directory exists exists, err := Exists(layer, filepath.Dir(name)) if err != nil { @@ -315,3 +309,23 @@ func copyToLayer(base Fs, layer Fs, name string) error { } return layer.Chtimes(name, bfi.ModTime(), bfi.ModTime()) } + +func copyToLayer(base Fs, layer Fs, name string) error { + bfh, err := base.Open(name) + if err != nil { + return err + } + defer bfh.Close() + + return copyFile(base, layer, name, bfh) +} + +func copyFileToLayer(base Fs, layer Fs, name string, flag int, perm os.FileMode) error { + bfh, err := base.OpenFile(name, flag, perm) + if err != nil { + return err + } + defer bfh.Close() + + return copyFile(base, layer, name, bfh) +} diff --git a/vendor/github.com/spf13/viper/.golangci.yml b/vendor/github.com/spf13/viper/.golangci.yml index 52e77eef0..16e039652 100644 --- a/vendor/github.com/spf13/viper/.golangci.yml +++ b/vendor/github.com/spf13/viper/.golangci.yml @@ -3,7 +3,10 @@ run: linters-settings: gci: - local-prefixes: github.com/spf13/viper + sections: + - standard + - default + - prefix(github.com/spf13/viper) golint: min-confidence: 0 goimports: diff --git a/vendor/github.com/spf13/viper/Makefile b/vendor/github.com/spf13/viper/Makefile index 1279096f4..02d3e3715 100644 --- a/vendor/github.com/spf13/viper/Makefile +++ b/vendor/github.com/spf13/viper/Makefile @@ -15,8 +15,8 @@ TEST_FORMAT = short-verbose endif # Dependency versions -GOTESTSUM_VERSION = 1.7.0 -GOLANGCI_VERSION = 1.43.0 +GOTESTSUM_VERSION = 1.8.0 +GOLANGCI_VERSION = 1.45.2 # Add the ability to override some variables # Use with care diff --git a/vendor/github.com/spf13/viper/README.md b/vendor/github.com/spf13/viper/README.md index 9712e7051..c14e8927a 100644 --- a/vendor/github.com/spf13/viper/README.md +++ b/vendor/github.com/spf13/viper/README.md @@ -11,7 +11,7 @@ [![GitHub Workflow Status](https://img.shields.io/github/workflow/status/spf13/viper/CI?style=flat-square)](https://github.com/spf13/viper/actions?query=workflow%3ACI) [![Join the chat at https://gitter.im/spf13/viper](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/spf13/viper?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Go Report Card](https://goreportcard.com/badge/github.com/spf13/viper?style=flat-square)](https://goreportcard.com/report/github.com/spf13/viper) -![Go Version](https://img.shields.io/badge/go%20version-%3E=1.14-61CFDD.svg?style=flat-square) +![Go Version](https://img.shields.io/badge/go%20version-%3E=1.15-61CFDD.svg?style=flat-square) [![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/spf13/viper)](https://pkg.go.dev/mod/github.com/spf13/viper) **Go configuration with fangs!** diff --git a/vendor/github.com/spf13/viper/TROUBLESHOOTING.md b/vendor/github.com/spf13/viper/TROUBLESHOOTING.md index 096277af7..c4e36c686 100644 --- a/vendor/github.com/spf13/viper/TROUBLESHOOTING.md +++ b/vendor/github.com/spf13/viper/TROUBLESHOOTING.md @@ -21,3 +21,12 @@ The solution is easy: switch to using Go Modules. Please refer to the [wiki](https://github.com/golang/go/wiki/Modules) on how to do that. **tl;dr* `export GO111MODULE=on` + +## Unquoted 'y' and 'n' characters get replaced with _true_ and _false_ when reading a YAML file + +This is a YAML 1.1 feature according to [go-yaml/yaml#740](https://github.com/go-yaml/yaml/issues/740). + +Potential solutions are: + +1. Quoting values resolved as boolean +1. Upgrading to YAML v3 (for the time being this is possible by passing the `viper_yaml3` tag to your build) diff --git a/vendor/github.com/spf13/viper/experimental_logger.go b/vendor/github.com/spf13/viper/experimental_logger.go new file mode 100644 index 000000000..206dad6a0 --- /dev/null +++ b/vendor/github.com/spf13/viper/experimental_logger.go @@ -0,0 +1,11 @@ +//go:build viper_logger +// +build viper_logger + +package viper + +// WithLogger sets a custom logger. +func WithLogger(l Logger) Option { + return optionFunc(func(v *Viper) { + v.logger = l + }) +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/decoder.go b/vendor/github.com/spf13/viper/internal/encoding/decoder.go index 08b1bb66b..f472e9ff1 100644 --- a/vendor/github.com/spf13/viper/internal/encoding/decoder.go +++ b/vendor/github.com/spf13/viper/internal/encoding/decoder.go @@ -4,10 +4,10 @@ import ( "sync" ) -// Decoder decodes the contents of b into a v representation. +// Decoder decodes the contents of b into v. // It's primarily used for decoding contents of a file into a map[string]interface{}. type Decoder interface { - Decode(b []byte, v interface{}) error + Decode(b []byte, v map[string]interface{}) error } const ( @@ -48,7 +48,7 @@ func (e *DecoderRegistry) RegisterDecoder(format string, enc Decoder) error { } // Decode calls the underlying Decoder based on the format. -func (e *DecoderRegistry) Decode(format string, b []byte, v interface{}) error { +func (e *DecoderRegistry) Decode(format string, b []byte, v map[string]interface{}) error { e.mu.RLock() decoder, ok := e.decoders[format] e.mu.RUnlock() diff --git a/vendor/github.com/spf13/viper/internal/encoding/dotenv/codec.go b/vendor/github.com/spf13/viper/internal/encoding/dotenv/codec.go new file mode 100644 index 000000000..4485063b6 --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/dotenv/codec.go @@ -0,0 +1,61 @@ +package dotenv + +import ( + "bytes" + "fmt" + "sort" + "strings" + + "github.com/subosito/gotenv" +) + +const keyDelimiter = "_" + +// Codec implements the encoding.Encoder and encoding.Decoder interfaces for encoding data containing environment variables +// (commonly called as dotenv format). +type Codec struct{} + +func (Codec) Encode(v map[string]interface{}) ([]byte, error) { + flattened := map[string]interface{}{} + + flattened = flattenAndMergeMap(flattened, v, "", keyDelimiter) + + keys := make([]string, 0, len(flattened)) + + for key := range flattened { + keys = append(keys, key) + } + + sort.Strings(keys) + + var buf bytes.Buffer + + for _, key := range keys { + _, err := buf.WriteString(fmt.Sprintf("%v=%v\n", strings.ToUpper(key), flattened[key])) + if err != nil { + return nil, err + } + } + + return buf.Bytes(), nil +} + +func (Codec) Decode(b []byte, v map[string]interface{}) error { + var buf bytes.Buffer + + _, err := buf.Write(b) + if err != nil { + return err + } + + env, err := gotenv.StrictParse(&buf) + if err != nil { + return err + } + + for key, value := range env { + v[key] = value + } + + return nil +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/dotenv/map_utils.go b/vendor/github.com/spf13/viper/internal/encoding/dotenv/map_utils.go new file mode 100644 index 000000000..ce6e6efa3 --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/dotenv/map_utils.go @@ -0,0 +1,41 @@ +package dotenv + +import ( + "strings" + + "github.com/spf13/cast" +) + +// flattenAndMergeMap recursively flattens the given map into a new map +// Code is based on the function with the same name in tha main package. +// TODO: move it to a common place +func flattenAndMergeMap(shadow map[string]interface{}, m map[string]interface{}, prefix string, delimiter string) map[string]interface{} { + if shadow != nil && prefix != "" && shadow[prefix] != nil { + // prefix is shadowed => nothing more to flatten + return shadow + } + if shadow == nil { + shadow = make(map[string]interface{}) + } + + var m2 map[string]interface{} + if prefix != "" { + prefix += delimiter + } + for k, val := range m { + fullKey := prefix + k + switch val.(type) { + case map[string]interface{}: + m2 = val.(map[string]interface{}) + case map[interface{}]interface{}: + m2 = cast.ToStringMap(val) + default: + // immediate value + shadow[strings.ToLower(fullKey)] = val + continue + } + // recursively merge to shadow map + shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter) + } + return shadow +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/encoder.go b/vendor/github.com/spf13/viper/internal/encoding/encoder.go index 82c7996cb..2341bf235 100644 --- a/vendor/github.com/spf13/viper/internal/encoding/encoder.go +++ b/vendor/github.com/spf13/viper/internal/encoding/encoder.go @@ -7,7 +7,7 @@ import ( // Encoder encodes the contents of v into a byte representation. // It's primarily used for encoding a map[string]interface{} into a file format. type Encoder interface { - Encode(v interface{}) ([]byte, error) + Encode(v map[string]interface{}) ([]byte, error) } const ( @@ -47,7 +47,7 @@ func (e *EncoderRegistry) RegisterEncoder(format string, enc Encoder) error { return nil } -func (e *EncoderRegistry) Encode(format string, v interface{}) ([]byte, error) { +func (e *EncoderRegistry) Encode(format string, v map[string]interface{}) ([]byte, error) { e.mu.RLock() encoder, ok := e.encoders[format] e.mu.RUnlock() diff --git a/vendor/github.com/spf13/viper/internal/encoding/hcl/codec.go b/vendor/github.com/spf13/viper/internal/encoding/hcl/codec.go index f3e4ab122..7fde8e4bc 100644 --- a/vendor/github.com/spf13/viper/internal/encoding/hcl/codec.go +++ b/vendor/github.com/spf13/viper/internal/encoding/hcl/codec.go @@ -12,7 +12,7 @@ import ( // TODO: add printer config to the codec? type Codec struct{} -func (Codec) Encode(v interface{}) ([]byte, error) { +func (Codec) Encode(v map[string]interface{}) ([]byte, error) { b, err := json.Marshal(v) if err != nil { return nil, err @@ -35,6 +35,6 @@ func (Codec) Encode(v interface{}) ([]byte, error) { return buf.Bytes(), nil } -func (Codec) Decode(b []byte, v interface{}) error { - return hcl.Unmarshal(b, v) +func (Codec) Decode(b []byte, v map[string]interface{}) error { + return hcl.Unmarshal(b, &v) } diff --git a/vendor/github.com/spf13/viper/internal/encoding/ini/codec.go b/vendor/github.com/spf13/viper/internal/encoding/ini/codec.go new file mode 100644 index 000000000..9acd87fc3 --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/ini/codec.go @@ -0,0 +1,99 @@ +package ini + +import ( + "bytes" + "sort" + "strings" + + "github.com/spf13/cast" + "gopkg.in/ini.v1" +) + +// LoadOptions contains all customized options used for load data source(s). +// This type is added here for convenience: this way consumers can import a single package called "ini". +type LoadOptions = ini.LoadOptions + +// Codec implements the encoding.Encoder and encoding.Decoder interfaces for INI encoding. +type Codec struct { + KeyDelimiter string + LoadOptions LoadOptions +} + +func (c Codec) Encode(v map[string]interface{}) ([]byte, error) { + cfg := ini.Empty() + ini.PrettyFormat = false + + flattened := map[string]interface{}{} + + flattened = flattenAndMergeMap(flattened, v, "", c.keyDelimiter()) + + keys := make([]string, 0, len(flattened)) + + for key := range flattened { + keys = append(keys, key) + } + + sort.Strings(keys) + + for _, key := range keys { + sectionName, keyName := "", key + + lastSep := strings.LastIndex(key, ".") + if lastSep != -1 { + sectionName = key[:(lastSep)] + keyName = key[(lastSep + 1):] + } + + // TODO: is this a good idea? + if sectionName == "default" { + sectionName = "" + } + + cfg.Section(sectionName).Key(keyName).SetValue(cast.ToString(flattened[key])) + } + + var buf bytes.Buffer + + _, err := cfg.WriteTo(&buf) + if err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +func (c Codec) Decode(b []byte, v map[string]interface{}) error { + cfg := ini.Empty(c.LoadOptions) + + err := cfg.Append(b) + if err != nil { + return err + } + + sections := cfg.Sections() + + for i := 0; i < len(sections); i++ { + section := sections[i] + keys := section.Keys() + + for j := 0; j < len(keys); j++ { + key := keys[j] + value := cfg.Section(section.Name()).Key(key.Name()).String() + + deepestMap := deepSearch(v, strings.Split(section.Name(), c.keyDelimiter())) + + // set innermost value + deepestMap[key.Name()] = value + } + } + + return nil +} + +func (c Codec) keyDelimiter() string { + if c.KeyDelimiter == "" { + return "." + } + + return c.KeyDelimiter +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/ini/map_utils.go b/vendor/github.com/spf13/viper/internal/encoding/ini/map_utils.go new file mode 100644 index 000000000..8329856b5 --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/ini/map_utils.go @@ -0,0 +1,74 @@ +package ini + +import ( + "strings" + + "github.com/spf13/cast" +) + +// THIS CODE IS COPIED HERE: IT SHOULD NOT BE MODIFIED +// AT SOME POINT IT WILL BE MOVED TO A COMMON PLACE +// deepSearch scans deep maps, following the key indexes listed in the +// sequence "path". +// The last value is expected to be another map, and is returned. +// +// In case intermediate keys do not exist, or map to a non-map value, +// a new map is created and inserted, and the search continues from there: +// the initial map "m" may be modified! +func deepSearch(m map[string]interface{}, path []string) map[string]interface{} { + for _, k := range path { + m2, ok := m[k] + if !ok { + // intermediate key does not exist + // => create it and continue from there + m3 := make(map[string]interface{}) + m[k] = m3 + m = m3 + continue + } + m3, ok := m2.(map[string]interface{}) + if !ok { + // intermediate key is a value + // => replace with a new map + m3 = make(map[string]interface{}) + m[k] = m3 + } + // continue search from here + m = m3 + } + return m +} + +// flattenAndMergeMap recursively flattens the given map into a new map +// Code is based on the function with the same name in tha main package. +// TODO: move it to a common place +func flattenAndMergeMap(shadow map[string]interface{}, m map[string]interface{}, prefix string, delimiter string) map[string]interface{} { + if shadow != nil && prefix != "" && shadow[prefix] != nil { + // prefix is shadowed => nothing more to flatten + return shadow + } + if shadow == nil { + shadow = make(map[string]interface{}) + } + + var m2 map[string]interface{} + if prefix != "" { + prefix += delimiter + } + for k, val := range m { + fullKey := prefix + k + switch val.(type) { + case map[string]interface{}: + m2 = val.(map[string]interface{}) + case map[interface{}]interface{}: + m2 = cast.ToStringMap(val) + default: + // immediate value + shadow[strings.ToLower(fullKey)] = val + continue + } + // recursively merge to shadow map + shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter) + } + return shadow +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/javaproperties/codec.go b/vendor/github.com/spf13/viper/internal/encoding/javaproperties/codec.go new file mode 100644 index 000000000..b8a2251c1 --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/javaproperties/codec.go @@ -0,0 +1,86 @@ +package javaproperties + +import ( + "bytes" + "sort" + "strings" + + "github.com/magiconair/properties" + "github.com/spf13/cast" +) + +// Codec implements the encoding.Encoder and encoding.Decoder interfaces for Java properties encoding. +type Codec struct { + KeyDelimiter string + + // Store read properties on the object so that we can write back in order with comments. + // This will only be used if the configuration read is a properties file. + // TODO: drop this feature in v2 + // TODO: make use of the global properties object optional + Properties *properties.Properties +} + +func (c *Codec) Encode(v map[string]interface{}) ([]byte, error) { + if c.Properties == nil { + c.Properties = properties.NewProperties() + } + + flattened := map[string]interface{}{} + + flattened = flattenAndMergeMap(flattened, v, "", c.keyDelimiter()) + + keys := make([]string, 0, len(flattened)) + + for key := range flattened { + keys = append(keys, key) + } + + sort.Strings(keys) + + for _, key := range keys { + _, _, err := c.Properties.Set(key, cast.ToString(flattened[key])) + if err != nil { + return nil, err + } + } + + var buf bytes.Buffer + + _, err := c.Properties.WriteComment(&buf, "#", properties.UTF8) + if err != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +func (c *Codec) Decode(b []byte, v map[string]interface{}) error { + var err error + c.Properties, err = properties.Load(b, properties.UTF8) + if err != nil { + return err + } + + for _, key := range c.Properties.Keys() { + // ignore existence check: we know it's there + value, _ := c.Properties.Get(key) + + // recursively build nested maps + path := strings.Split(key, c.keyDelimiter()) + lastKey := strings.ToLower(path[len(path)-1]) + deepestMap := deepSearch(v, path[0:len(path)-1]) + + // set innermost value + deepestMap[lastKey] = value + } + + return nil +} + +func (c Codec) keyDelimiter() string { + if c.KeyDelimiter == "" { + return "." + } + + return c.KeyDelimiter +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/javaproperties/map_utils.go b/vendor/github.com/spf13/viper/internal/encoding/javaproperties/map_utils.go new file mode 100644 index 000000000..93755cac1 --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/javaproperties/map_utils.go @@ -0,0 +1,74 @@ +package javaproperties + +import ( + "strings" + + "github.com/spf13/cast" +) + +// THIS CODE IS COPIED HERE: IT SHOULD NOT BE MODIFIED +// AT SOME POINT IT WILL BE MOVED TO A COMMON PLACE +// deepSearch scans deep maps, following the key indexes listed in the +// sequence "path". +// The last value is expected to be another map, and is returned. +// +// In case intermediate keys do not exist, or map to a non-map value, +// a new map is created and inserted, and the search continues from there: +// the initial map "m" may be modified! +func deepSearch(m map[string]interface{}, path []string) map[string]interface{} { + for _, k := range path { + m2, ok := m[k] + if !ok { + // intermediate key does not exist + // => create it and continue from there + m3 := make(map[string]interface{}) + m[k] = m3 + m = m3 + continue + } + m3, ok := m2.(map[string]interface{}) + if !ok { + // intermediate key is a value + // => replace with a new map + m3 = make(map[string]interface{}) + m[k] = m3 + } + // continue search from here + m = m3 + } + return m +} + +// flattenAndMergeMap recursively flattens the given map into a new map +// Code is based on the function with the same name in tha main package. +// TODO: move it to a common place +func flattenAndMergeMap(shadow map[string]interface{}, m map[string]interface{}, prefix string, delimiter string) map[string]interface{} { + if shadow != nil && prefix != "" && shadow[prefix] != nil { + // prefix is shadowed => nothing more to flatten + return shadow + } + if shadow == nil { + shadow = make(map[string]interface{}) + } + + var m2 map[string]interface{} + if prefix != "" { + prefix += delimiter + } + for k, val := range m { + fullKey := prefix + k + switch val.(type) { + case map[string]interface{}: + m2 = val.(map[string]interface{}) + case map[interface{}]interface{}: + m2 = cast.ToStringMap(val) + default: + // immediate value + shadow[strings.ToLower(fullKey)] = val + continue + } + // recursively merge to shadow map + shadow = flattenAndMergeMap(shadow, m2, fullKey, delimiter) + } + return shadow +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/json/codec.go b/vendor/github.com/spf13/viper/internal/encoding/json/codec.go index dff9ec982..1b7caaceb 100644 --- a/vendor/github.com/spf13/viper/internal/encoding/json/codec.go +++ b/vendor/github.com/spf13/viper/internal/encoding/json/codec.go @@ -7,11 +7,11 @@ import ( // Codec implements the encoding.Encoder and encoding.Decoder interfaces for JSON encoding. type Codec struct{} -func (Codec) Encode(v interface{}) ([]byte, error) { +func (Codec) Encode(v map[string]interface{}) ([]byte, error) { // TODO: expose prefix and indent in the Codec as setting? return json.MarshalIndent(v, "", " ") } -func (Codec) Decode(b []byte, v interface{}) error { - return json.Unmarshal(b, v) +func (Codec) Decode(b []byte, v map[string]interface{}) error { + return json.Unmarshal(b, &v) } diff --git a/vendor/github.com/spf13/viper/internal/encoding/toml/codec.go b/vendor/github.com/spf13/viper/internal/encoding/toml/codec.go index c043802b9..ff1112cac 100644 --- a/vendor/github.com/spf13/viper/internal/encoding/toml/codec.go +++ b/vendor/github.com/spf13/viper/internal/encoding/toml/codec.go @@ -1,3 +1,6 @@ +//go:build !viper_toml2 +// +build !viper_toml2 + package toml import ( @@ -7,39 +10,30 @@ import ( // Codec implements the encoding.Encoder and encoding.Decoder interfaces for TOML encoding. type Codec struct{} -func (Codec) Encode(v interface{}) ([]byte, error) { - if m, ok := v.(map[string]interface{}); ok { - t, err := toml.TreeFromMap(m) - if err != nil { - return nil, err - } - - s, err := t.ToTomlString() - if err != nil { - return nil, err - } +func (Codec) Encode(v map[string]interface{}) ([]byte, error) { + t, err := toml.TreeFromMap(v) + if err != nil { + return nil, err + } - return []byte(s), nil + s, err := t.ToTomlString() + if err != nil { + return nil, err } - return toml.Marshal(v) + return []byte(s), nil } -func (Codec) Decode(b []byte, v interface{}) error { +func (Codec) Decode(b []byte, v map[string]interface{}) error { tree, err := toml.LoadBytes(b) if err != nil { return err } - if m, ok := v.(*map[string]interface{}); ok { - vmap := *m - tmap := tree.ToMap() - for k, v := range tmap { - vmap[k] = v - } - - return nil + tmap := tree.ToMap() + for key, value := range tmap { + v[key] = value } - return tree.Unmarshal(v) + return nil } diff --git a/vendor/github.com/spf13/viper/internal/encoding/toml/codec2.go b/vendor/github.com/spf13/viper/internal/encoding/toml/codec2.go new file mode 100644 index 000000000..566b70628 --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/toml/codec2.go @@ -0,0 +1,19 @@ +//go:build viper_toml2 +// +build viper_toml2 + +package toml + +import ( + "github.com/pelletier/go-toml/v2" +) + +// Codec implements the encoding.Encoder and encoding.Decoder interfaces for TOML encoding. +type Codec struct{} + +func (Codec) Encode(v map[string]interface{}) ([]byte, error) { + return toml.Marshal(v) +} + +func (Codec) Decode(b []byte, v map[string]interface{}) error { + return toml.Unmarshal(b, &v) +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/yaml/codec.go b/vendor/github.com/spf13/viper/internal/encoding/yaml/codec.go index f94b26996..24cc19dfc 100644 --- a/vendor/github.com/spf13/viper/internal/encoding/yaml/codec.go +++ b/vendor/github.com/spf13/viper/internal/encoding/yaml/codec.go @@ -1,14 +1,14 @@ package yaml -import "gopkg.in/yaml.v2" +// import "gopkg.in/yaml.v2" // Codec implements the encoding.Encoder and encoding.Decoder interfaces for YAML encoding. type Codec struct{} -func (Codec) Encode(v interface{}) ([]byte, error) { +func (Codec) Encode(v map[string]interface{}) ([]byte, error) { return yaml.Marshal(v) } -func (Codec) Decode(b []byte, v interface{}) error { - return yaml.Unmarshal(b, v) +func (Codec) Decode(b []byte, v map[string]interface{}) error { + return yaml.Unmarshal(b, &v) } diff --git a/vendor/github.com/spf13/viper/internal/encoding/yaml/yaml2.go b/vendor/github.com/spf13/viper/internal/encoding/yaml/yaml2.go new file mode 100644 index 000000000..ca29b84db --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/yaml/yaml2.go @@ -0,0 +1,14 @@ +//go:build !viper_yaml3 +// +build !viper_yaml3 + +package yaml + +import yamlv2 "gopkg.in/yaml.v2" + +var yaml = struct { + Marshal func(in interface{}) (out []byte, err error) + Unmarshal func(in []byte, out interface{}) (err error) +}{ + Marshal: yamlv2.Marshal, + Unmarshal: yamlv2.Unmarshal, +} diff --git a/vendor/github.com/spf13/viper/internal/encoding/yaml/yaml3.go b/vendor/github.com/spf13/viper/internal/encoding/yaml/yaml3.go new file mode 100644 index 000000000..96b8957fa --- /dev/null +++ b/vendor/github.com/spf13/viper/internal/encoding/yaml/yaml3.go @@ -0,0 +1,14 @@ +//go:build viper_yaml3 +// +build viper_yaml3 + +package yaml + +import yamlv3 "gopkg.in/yaml.v3" + +var yaml = struct { + Marshal func(in interface{}) (out []byte, err error) + Unmarshal func(in []byte, out interface{}) (err error) +}{ + Marshal: yamlv3.Marshal, + Unmarshal: yamlv3.Unmarshal, +} diff --git a/vendor/github.com/spf13/viper/viper.go b/vendor/github.com/spf13/viper/viper.go index 4a9935899..4a9dac9d4 100644 --- a/vendor/github.com/spf13/viper/viper.go +++ b/vendor/github.com/spf13/viper/viper.go @@ -35,16 +35,16 @@ import ( "time" "github.com/fsnotify/fsnotify" - "github.com/magiconair/properties" "github.com/mitchellh/mapstructure" "github.com/spf13/afero" "github.com/spf13/cast" "github.com/spf13/pflag" - "github.com/subosito/gotenv" - "gopkg.in/ini.v1" "github.com/spf13/viper/internal/encoding" + "github.com/spf13/viper/internal/encoding/dotenv" "github.com/spf13/viper/internal/encoding/hcl" + "github.com/spf13/viper/internal/encoding/ini" + "github.com/spf13/viper/internal/encoding/javaproperties" "github.com/spf13/viper/internal/encoding/json" "github.com/spf13/viper/internal/encoding/toml" "github.com/spf13/viper/internal/encoding/yaml" @@ -67,47 +67,8 @@ type RemoteResponse struct { Error error } -var ( - encoderRegistry = encoding.NewEncoderRegistry() - decoderRegistry = encoding.NewDecoderRegistry() -) - func init() { v = New() - - { - codec := yaml.Codec{} - - encoderRegistry.RegisterEncoder("yaml", codec) - decoderRegistry.RegisterDecoder("yaml", codec) - - encoderRegistry.RegisterEncoder("yml", codec) - decoderRegistry.RegisterDecoder("yml", codec) - } - - { - codec := json.Codec{} - - encoderRegistry.RegisterEncoder("json", codec) - decoderRegistry.RegisterDecoder("json", codec) - } - - { - codec := toml.Codec{} - - encoderRegistry.RegisterEncoder("toml", codec) - decoderRegistry.RegisterDecoder("toml", codec) - } - - { - codec := hcl.Codec{} - - encoderRegistry.RegisterEncoder("hcl", codec) - decoderRegistry.RegisterDecoder("hcl", codec) - - encoderRegistry.RegisterEncoder("tfvars", codec) - decoderRegistry.RegisterDecoder("tfvars", codec) - } } type remoteConfigFactory interface { @@ -254,13 +215,13 @@ type Viper struct { aliases map[string]string typeByDefValue bool - // Store read properties on the object so that we can write back in order with comments. - // This will only be used if the configuration read is a properties file. - properties *properties.Properties - onConfigChange func(fsnotify.Event) logger Logger + + // TODO: should probably be protected with a mutex + encoderRegistry *encoding.EncoderRegistry + decoderRegistry *encoding.DecoderRegistry } // New returns an initialized Viper instance. @@ -280,6 +241,8 @@ func New() *Viper { v.typeByDefValue = false v.logger = jwwLogger{} + v.resetEncoding() + return v } @@ -326,6 +289,8 @@ func NewWithOptions(opts ...Option) *Viper { opt.apply(v) } + v.resetEncoding() + return v } @@ -338,6 +303,84 @@ func Reset() { SupportedRemoteProviders = []string{"etcd", "consul", "firestore"} } +// TODO: make this lazy initialization instead +func (v *Viper) resetEncoding() { + encoderRegistry := encoding.NewEncoderRegistry() + decoderRegistry := encoding.NewDecoderRegistry() + + { + codec := yaml.Codec{} + + encoderRegistry.RegisterEncoder("yaml", codec) + decoderRegistry.RegisterDecoder("yaml", codec) + + encoderRegistry.RegisterEncoder("yml", codec) + decoderRegistry.RegisterDecoder("yml", codec) + } + + { + codec := json.Codec{} + + encoderRegistry.RegisterEncoder("json", codec) + decoderRegistry.RegisterDecoder("json", codec) + } + + { + codec := toml.Codec{} + + encoderRegistry.RegisterEncoder("toml", codec) + decoderRegistry.RegisterDecoder("toml", codec) + } + + { + codec := hcl.Codec{} + + encoderRegistry.RegisterEncoder("hcl", codec) + decoderRegistry.RegisterDecoder("hcl", codec) + + encoderRegistry.RegisterEncoder("tfvars", codec) + decoderRegistry.RegisterDecoder("tfvars", codec) + } + + { + codec := ini.Codec{ + KeyDelimiter: v.keyDelim, + LoadOptions: v.iniLoadOptions, + } + + encoderRegistry.RegisterEncoder("ini", codec) + decoderRegistry.RegisterDecoder("ini", codec) + } + + { + codec := &javaproperties.Codec{ + KeyDelimiter: v.keyDelim, + } + + encoderRegistry.RegisterEncoder("properties", codec) + decoderRegistry.RegisterDecoder("properties", codec) + + encoderRegistry.RegisterEncoder("props", codec) + decoderRegistry.RegisterDecoder("props", codec) + + encoderRegistry.RegisterEncoder("prop", codec) + decoderRegistry.RegisterDecoder("prop", codec) + } + + { + codec := &dotenv.Codec{} + + encoderRegistry.RegisterEncoder("dotenv", codec) + decoderRegistry.RegisterDecoder("dotenv", codec) + + encoderRegistry.RegisterEncoder("env", codec) + decoderRegistry.RegisterDecoder("env", codec) + } + + v.encoderRegistry = encoderRegistry + v.decoderRegistry = decoderRegistry +} + type defaultRemoteProvider struct { provider string endpoint string @@ -433,7 +476,7 @@ func (v *Viper) WatchConfig() { v.onConfigChange(event) } } else if filepath.Clean(event.Name) == configFile && - event.Op&fsnotify.Remove&fsnotify.Remove != 0 { + event.Op&fsnotify.Remove != 0 { eventsWG.Done() return } @@ -1634,53 +1677,11 @@ func (v *Viper) unmarshalReader(in io.Reader, c map[string]interface{}) error { buf.ReadFrom(in) switch format := strings.ToLower(v.getConfigType()); format { - case "yaml", "yml", "json", "toml", "hcl", "tfvars": - err := decoderRegistry.Decode(format, buf.Bytes(), &c) - if err != nil { - return ConfigParseError{err} - } - - case "dotenv", "env": - env, err := gotenv.StrictParse(buf) - if err != nil { - return ConfigParseError{err} - } - for k, v := range env { - c[k] = v - } - - case "properties", "props", "prop": - v.properties = properties.NewProperties() - var err error - if v.properties, err = properties.Load(buf.Bytes(), properties.UTF8); err != nil { - return ConfigParseError{err} - } - for _, key := range v.properties.Keys() { - value, _ := v.properties.Get(key) - // recursively build nested maps - path := strings.Split(key, ".") - lastKey := strings.ToLower(path[len(path)-1]) - deepestMap := deepSearch(c, path[0:len(path)-1]) - // set innermost value - deepestMap[lastKey] = value - } - - case "ini": - cfg := ini.Empty(v.iniLoadOptions) - err := cfg.Append(buf.Bytes()) + case "yaml", "yml", "json", "toml", "hcl", "tfvars", "ini", "properties", "props", "prop", "dotenv", "env": + err := v.decoderRegistry.Decode(format, buf.Bytes(), c) if err != nil { return ConfigParseError{err} } - sections := cfg.Sections() - for i := 0; i < len(sections); i++ { - section := sections[i] - keys := section.Keys() - for j := 0; j < len(keys); j++ { - key := keys[j] - value := cfg.Section(section.Name()).Key(key.Name()).String() - c[section.Name()+"."+key.Name()] = value - } - } } insensitiviseMap(c) @@ -1691,8 +1692,8 @@ func (v *Viper) unmarshalReader(in io.Reader, c map[string]interface{}) error { func (v *Viper) marshalWriter(f afero.File, configType string) error { c := v.AllSettings() switch configType { - case "yaml", "yml", "json", "toml", "hcl", "tfvars": - b, err := encoderRegistry.Encode(configType, c) + case "yaml", "yml", "json", "toml", "hcl", "tfvars", "ini", "prop", "props", "properties", "dotenv", "env": + b, err := v.encoderRegistry.Encode(configType, c) if err != nil { return ConfigMarshalError{err} } @@ -1701,50 +1702,6 @@ func (v *Viper) marshalWriter(f afero.File, configType string) error { if err != nil { return ConfigMarshalError{err} } - - case "prop", "props", "properties": - if v.properties == nil { - v.properties = properties.NewProperties() - } - p := v.properties - for _, key := range v.AllKeys() { - _, _, err := p.Set(key, v.GetString(key)) - if err != nil { - return ConfigMarshalError{err} - } - } - _, err := p.WriteComment(f, "#", properties.UTF8) - if err != nil { - return ConfigMarshalError{err} - } - - case "dotenv", "env": - lines := []string{} - for _, key := range v.AllKeys() { - envName := strings.ToUpper(strings.Replace(key, ".", "_", -1)) - val := v.Get(key) - lines = append(lines, fmt.Sprintf("%v=%v", envName, val)) - } - s := strings.Join(lines, "\n") - if _, err := f.WriteString(s); err != nil { - return ConfigMarshalError{err} - } - - case "ini": - keys := v.AllKeys() - cfg := ini.Empty() - ini.PrettyFormat = false - for i := 0; i < len(keys); i++ { - key := keys[i] - lastSep := strings.LastIndex(key, ".") - sectionName := key[:(lastSep)] - keyName := key[(lastSep + 1):] - if sectionName == "default" { - sectionName = "" - } - cfg.Section(sectionName).Key(keyName).SetValue(v.GetString(key)) - } - cfg.WriteTo(f) } return nil } @@ -1761,7 +1718,8 @@ func keyExists(k string, m map[string]interface{}) string { } func castToMapStringInterface( - src map[interface{}]interface{}) map[string]interface{} { + src map[interface{}]interface{}, +) map[string]interface{} { tgt := map[string]interface{}{} for k, v := range src { tgt[fmt.Sprintf("%v", k)] = v @@ -1799,7 +1757,8 @@ func castMapFlagToMapInterface(src map[string]FlagValue) map[string]interface{} // deep. Both map types are supported as there is a go-yaml fork that uses // `map[string]interface{}` instead. func mergeMaps( - src, tgt map[string]interface{}, itgt map[interface{}]interface{}) { + src, tgt map[string]interface{}, itgt map[interface{}]interface{}, +) { for sk, sv := range src { tk := keyExists(sk, tgt) if tk == "" { @@ -1823,17 +1782,6 @@ func mergeMaps( svType := reflect.TypeOf(sv) tvType := reflect.TypeOf(tv) - if tvType != nil && svType != tvType { // Allow for the target to be nil - v.logger.Error( - "svType != tvType", - "key", sk, - "st", svType, - "tt", tvType, - "sv", sv, - "tv", tv, - ) - continue - } v.logger.Trace( "processing", @@ -1847,13 +1795,27 @@ func mergeMaps( switch ttv := tv.(type) { case map[interface{}]interface{}: v.logger.Trace("merging maps (must convert)") - tsv := sv.(map[interface{}]interface{}) + tsv, ok := sv.(map[interface{}]interface{}) + if !ok { + v.logger.Error( + "Could not cast sv to map[interface{}]interface{}; key=%s, st=%v, tt=%v, sv=%v, tv=%v", + sk, svType, tvType, sv, tv) + continue + } + ssv := castToMapStringInterface(tsv) stv := castToMapStringInterface(ttv) mergeMaps(ssv, stv, ttv) case map[string]interface{}: v.logger.Trace("merging maps") - mergeMaps(sv.(map[string]interface{}), ttv, nil) + tsv, ok := sv.(map[string]interface{}) + if !ok { + v.logger.Error( + "Could not cast sv to map[string]interface{}; key=%s, st=%v, tt=%v, sv=%v, tv=%v", + sk, svType, tvType, sv, tv) + continue + } + mergeMaps(tsv, ttv, nil) default: v.logger.Trace("setting value") tgt[tk] = sv diff --git a/vendor/github.com/stbenjam/no-sprintf-host-port/LICENSE b/vendor/github.com/stbenjam/no-sprintf-host-port/LICENSE new file mode 100644 index 000000000..586dfd8cc --- /dev/null +++ b/vendor/github.com/stbenjam/no-sprintf-host-port/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Stephen Benjamin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/stbenjam/no-sprintf-host-port/pkg/analyzer/analyzer.go b/vendor/github.com/stbenjam/no-sprintf-host-port/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..374bb0d24 --- /dev/null +++ b/vendor/github.com/stbenjam/no-sprintf-host-port/pkg/analyzer/analyzer.go @@ -0,0 +1,96 @@ +package analyzer + +import ( + "fmt" + "go/ast" + "go/token" + "regexp" + + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + + "golang.org/x/tools/go/analysis" +) + +var Analyzer = &analysis.Analyzer{ + Name: "nosprintfhostport", + Doc: "Checks for misuse of Sprintf to construct a host with port in a URL.", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + + inspector.Preorder(nodeFilter, func(node ast.Node) { + callExpr := node.(*ast.CallExpr) + if p, f, ok := getCallExprFunction(callExpr); ok && p == "fmt" && f == "Sprintf" { + if err := checkForHostPortConstruction(callExpr); err != nil { + pass.Reportf(node.Pos(), err.Error()) + } + } + }) + + return nil, nil +} + +// getCallExprFunction returns the package and function name from a callExpr, if any. +func getCallExprFunction(callExpr *ast.CallExpr) (pkg string, fn string, result bool) { + selector, ok := callExpr.Fun.(*ast.SelectorExpr) + if !ok { + return "", "", false + } + gopkg, ok := selector.X.(*ast.Ident) + if !ok { + return "", "", false + } + return gopkg.Name, selector.Sel.Name, true +} + +// getStringLiteral returns the value at a position if it's a string literal. +func getStringLiteral(args []ast.Expr, pos int) (string, bool) { + if len(args) < pos + 1 { + return "", false + } + + // Let's see if our format string is a string literal. + fsRaw, ok := args[pos].(*ast.BasicLit) + if !ok { + return "", false + } + if fsRaw.Kind == token.STRING && len(fsRaw.Value) >= 2 { + return fsRaw.Value[1 : len(fsRaw.Value)-1], true + } else { + return "", false + } +} + +// checkForHostPortConstruction checks to see if a sprintf call looks like a URI with a port, +// essentially scheme://%s:, or scheme://user:pass@%s:. +// +// Matching requirements: +// - Scheme as per RFC3986 is ALPHA *( ALPHA / DIGIT / "+" / "-" / "." ) +// - A format string substitution in the host portion, preceded by an optional username/password@ +// - A colon indicating a port will be specified +func checkForHostPortConstruction(sprintf *ast.CallExpr) error { + fs, ok := getStringLiteral(sprintf.Args, 0) + if !ok { + return nil + } + + regexes := []*regexp.Regexp{ + regexp.MustCompile(`^[a-zA-Z][a-zA-Z0-9+-.]*://%s:[^@]*$`), // URL without basic auth user + regexp.MustCompile(`^[a-zA-Z][a-zA-Z0-9+-.]*://[^/]*@%s:.*$`), // URL with basic auth + } + + for _, re := range regexes { + if re.MatchString(fs) { + return fmt.Errorf("host:port in url should be constructed with net.JoinHostPort and not directly with fmt.Sprintf") + } + } + + return nil +} \ No newline at end of file diff --git a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go index 8a6310501..3d492ee9c 100644 --- a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go +++ b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go @@ -5,8 +5,6 @@ import ( "go/ast" "go/token" "go/types" - "log" - "os" "regexp" "strings" @@ -51,11 +49,11 @@ type WrapcheckConfig struct { // unwrapped. // // For example, an ignoreSigRegexp of `[]string{"\.New.*Err\("}`` will ignore errors - // returned from any signture whose method name starts with "New" and ends with "Err" + // returned from any signature whose method name starts with "New" and ends with "Err" // due to the signature matching the regular expression `\.New.*Err\(`. // // Note that this is similar to the ignoreSigs configuration, but provides - // slightly more flexibility in defining rules by which signtures will be + // slightly more flexibility in defining rules by which signatures will be // ignored. IgnoreSigRegexps []string `mapstructure:"ignoreSigRegexps" yaml:"ignoreSigRegexps"` @@ -71,13 +69,23 @@ type WrapcheckConfig struct { // ignorePackageGlobs: // - encoding/* IgnorePackageGlobs []string `mapstructure:"ignorePackageGlobs" yaml:"ignorePackageGlobs"` + + // IgnoreInterfaceRegexps defines a list of regular expressions which, if matched + // to a underlying interface name, will ignore unwrapped errors returned from a + // function whose call is defined on the given interface. + // + // For example, an ignoreInterfaceRegexps of `[]string{"Transac(tor|tion)"}`` will ignore errors + // returned from any function whose call is defined on a interface named 'Transactor' + // or 'Transaction' due to the name matching the regular expression `Transac(tor|tion)`. + IgnoreInterfaceRegexps []string `mapstructure:"ignoreInterfaceRegexps" yaml:"ignoreInterfaceRegexps"` } func NewDefaultConfig() WrapcheckConfig { return WrapcheckConfig{ - IgnoreSigs: DefaultIgnoreSigs, - IgnoreSigRegexps: []string{}, - IgnorePackageGlobs: []string{}, + IgnoreSigs: DefaultIgnoreSigs, + IgnoreSigRegexps: []string{}, + IgnorePackageGlobs: []string{}, + IgnoreInterfaceRegexps: []string{}, } } @@ -91,7 +99,21 @@ func NewAnalyzer(cfg WrapcheckConfig) *analysis.Analyzer { func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { // Precompile the regexps, report the error - ignoreSigRegexp, err := compileRegexps(cfg.IgnoreSigRegexps) + var ( + ignoreSigRegexp []*regexp.Regexp + ignoreInterfaceRegexps []*regexp.Regexp + ignorePackageGlobs []glob.Glob + err error + ) + + ignoreSigRegexp, err = compileRegexps(cfg.IgnoreSigRegexps) + if err == nil { + ignoreInterfaceRegexps, err = compileRegexps(cfg.IgnoreInterfaceRegexps) + } + if err == nil { + ignorePackageGlobs, err = compileGlobs(cfg.IgnorePackageGlobs) + + } return func(pass *analysis.Pass) (interface{}, error) { if err != nil { @@ -120,7 +142,7 @@ func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { // tuple check is required. if isError(pass.TypesInfo.TypeOf(expr)) { - reportUnwrapped(pass, retFn, retFn.Pos(), cfg, ignoreSigRegexp) + reportUnwrapped(pass, retFn, retFn.Pos(), cfg, ignoreSigRegexp, ignoreInterfaceRegexps, ignorePackageGlobs) return true } @@ -138,7 +160,7 @@ func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { return true } if isError(v.Type()) { - reportUnwrapped(pass, retFn, expr.Pos(), cfg, ignoreSigRegexp) + reportUnwrapped(pass, retFn, expr.Pos(), cfg, ignoreSigRegexp, ignoreInterfaceRegexps, ignorePackageGlobs) return true } } @@ -200,7 +222,7 @@ func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { return true } - reportUnwrapped(pass, call, ident.NamePos, cfg, ignoreSigRegexp) + reportUnwrapped(pass, call, ident.NamePos, cfg, ignoreSigRegexp, ignoreInterfaceRegexps, ignorePackageGlobs) } return true @@ -213,7 +235,7 @@ func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { // Report unwrapped takes a call expression and an identifier and reports // if the call is unwrapped. -func reportUnwrapped(pass *analysis.Pass, call *ast.CallExpr, tokenPos token.Pos, cfg WrapcheckConfig, regexps []*regexp.Regexp) { +func reportUnwrapped(pass *analysis.Pass, call *ast.CallExpr, tokenPos token.Pos, cfg WrapcheckConfig, regexpsSig []*regexp.Regexp, regexpsInter []*regexp.Regexp, pkgGlobs []glob.Glob) { sel, ok := call.Fun.(*ast.SelectorExpr) if !ok { return @@ -224,21 +246,26 @@ func reportUnwrapped(pass *analysis.Pass, call *ast.CallExpr, tokenPos token.Pos if contains(cfg.IgnoreSigs, fnSig) { return - } else if containsMatch(regexps, fnSig) { + } else if containsMatch(regexpsSig, fnSig) { return } // Check if the underlying type of the "x" in x.y.z is an interface, as - // errors returned from interface types should be wrapped. + // errors returned from interface types should be wrapped, unless ignored + // as per `ignoreInterfaceRegexps` if isInterface(pass, sel) { - pass.Reportf(tokenPos, "error returned from interface method should be wrapped: sig: %s", fnSig) - return + name := types.TypeString(pass.TypesInfo.TypeOf(sel.X), func(p *types.Package) string { return p.Name() }) + if containsMatch(regexpsInter, name) { + } else { + pass.Reportf(tokenPos, "error returned from interface method should be wrapped: sig: %s", fnSig) + return + } } // Check whether the function being called comes from another package, // as functions called across package boundaries which returns errors // should be wrapped - if isFromOtherPkg(pass, sel, cfg) { + if isFromOtherPkg(pass, sel, pkgGlobs) { pass.Reportf(tokenPos, "error returned from external package is unwrapped: sig: %s", fnSig) return } @@ -251,23 +278,14 @@ func isInterface(pass *analysis.Pass, sel *ast.SelectorExpr) bool { return ok } -// isFromotherPkg returns whether the function is defined in the pacakge +// isFromotherPkg returns whether the function is defined in the package // currently under analysis or is considered external. It will ignore packages // defined in config.IgnorePackageGlobs. -func isFromOtherPkg(pass *analysis.Pass, sel *ast.SelectorExpr, config WrapcheckConfig) bool { +func isFromOtherPkg(pass *analysis.Pass, sel *ast.SelectorExpr, pkgGlobs []glob.Glob) bool { // The package of the function that we are calling which returns the error fn := pass.TypesInfo.ObjectOf(sel.Sel) - - for _, globString := range config.IgnorePackageGlobs { - g, err := glob.Compile(globString) - if err != nil { - log.Printf("unable to parse glob: %s\n", globString) - os.Exit(1) - } - - if g.Match(fn.Pkg().Path()) { - return false - } + if containsMatchGlob(pkgGlobs, fn.Pkg().Path()) { + return false } // If it's not a package name, then we should check the selector to make sure @@ -350,6 +368,15 @@ func containsMatch(regexps []*regexp.Regexp, el string) bool { return false } +func containsMatchGlob(globs []glob.Glob, el string) bool { + for _, g := range globs { + if g.Match(el) { + return true + } + } + return false +} + // isError returns whether or not the provided type interface is an error func isError(typ types.Type) bool { if typ == nil { @@ -384,3 +411,18 @@ func compileRegexps(regexps []string) ([]*regexp.Regexp, error) { return compiledRegexps, nil } + +// compileGlobs compiles a set of globs, returning them for use, +// or the first encountered error due to an invalid expression. +func compileGlobs(globs []string) ([]glob.Glob, error) { + var compiledGlobs []glob.Glob + for _, globString := range globs { + glob, err := glob.Compile(globString) + if err != nil { + return nil, fmt.Errorf("unable to compile globs %s: %v\n", glob, err) + } + + compiledGlobs = append(compiledGlobs, glob) + } + return compiledGlobs, nil +} diff --git a/vendor/golang.org/x/exp/typeparams/LICENSE b/vendor/golang.org/x/exp/typeparams/LICENSE new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/vendor/golang.org/x/exp/typeparams/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/exp/typeparams/common.go b/vendor/golang.org/x/exp/typeparams/common.go new file mode 100644 index 000000000..e697083bc --- /dev/null +++ b/vendor/golang.org/x/exp/typeparams/common.go @@ -0,0 +1,186 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeparams contains common utilities for writing tools that interact +// with generic Go code, as introduced with Go 1.18. +// +// THIS PACKAGE IS CURRENTLY EXPERIMENTAL AND MAY CHANGE. While the API is +// being tested, we may find the need for improvement. This caveat will be +// removed shortly. +// +// Many of the types and functions in this package are proxies for the new APIs +// introduced in the standard library with Go 1.18. For example, the +// typeparams.Union type is an alias for go/types.Union, and the ForTypeSpec +// function returns the value of the go/ast.TypeSpec.TypeParams field. At Go +// versions older than 1.18 these helpers are implemented as stubs, allowing +// users of this package to write code that handles generic constructs inline, +// even if the Go version being used to compile does not support generics. +// +// Additionally, this package contains common utilities for working with the +// new generic constructs, to supplement the standard library APIs. Notably, +// the NormalTerms API computes a minimal representation of the structural +// restrictions on a type parameter. In the future, these supplemental APIs may +// be available in the standard library.. +package typeparams + +import ( + "go/ast" + "go/token" + "go/types" +) + +// Enabled reports whether type parameters are enabled in the current build +// environment. +func Enabled() bool { + return enabled +} + +// UnpackIndexExpr extracts data from AST nodes that represent index +// expressions. +// +// For an ast.IndexExpr, the resulting indices slice will contain exactly one +// index expression. For an ast.IndexListExpr (go1.18+), it may have a variable +// number of index expressions. +// +// For nodes that don't represent index expressions, the first return value of +// UnpackIndexExpr will be nil. +func UnpackIndexExpr(n ast.Node) (x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) { + switch e := n.(type) { + case *ast.IndexExpr: + return e.X, e.Lbrack, []ast.Expr{e.Index}, e.Rbrack + case *IndexListExpr: + return e.X, e.Lbrack, e.Indices, e.Rbrack + } + return nil, token.NoPos, nil, token.NoPos +} + +// PackIndexExpr returns an *ast.IndexExpr or *ast.IndexListExpr, depending on +// the cardinality of indices. Calling PackIndexExpr with len(indices) == 0 +// will panic. +func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack token.Pos) ast.Expr { + switch len(indices) { + case 0: + panic("empty indices") + case 1: + return &ast.IndexExpr{ + X: x, + Lbrack: lbrack, + Index: indices[0], + Rbrack: rbrack, + } + default: + return &IndexListExpr{ + X: x, + Lbrack: lbrack, + Indices: indices, + Rbrack: rbrack, + } + } +} + +// IsTypeParam reports whether t is a type parameter. +func IsTypeParam(t types.Type) bool { + _, ok := t.(*TypeParam) + return ok +} + +// OriginMethod returns the origin method associated with the method fn. For +// methods on a non-generic receiver base type, this is just fn. However, for +// methods with a generic receiver, OriginMethod returns the corresponding +// method in the method set of the origin type. +// +// As a special case, if fn is not a method (has no receiver), OriginMethod +// returns fn. +func OriginMethod(fn *types.Func) *types.Func { + recv := fn.Type().(*types.Signature).Recv() + if recv == nil { + return fn + } + base := recv.Type() + p, isPtr := base.(*types.Pointer) + if isPtr { + base = p.Elem() + } + named, isNamed := base.(*types.Named) + if !isNamed { + // Receiver is a *types.Interface. + return fn + } + if ForNamed(named).Len() == 0 { + // Receiver base has no type parameters, so we can avoid the lookup below. + return fn + } + orig := NamedTypeOrigin(named) + gfn, _, _ := types.LookupFieldOrMethod(orig, true, fn.Pkg(), fn.Name()) + return gfn.(*types.Func) +} + +// GenericAssignableTo is a generalization of types.AssignableTo that +// implements the following rule for uninstantiated generic types: +// +// If V and T are generic named types, then V is considered assignable to T if, +// for every possible instantation of V[A_1, ..., A_N], the instantiation +// T[A_1, ..., A_N] is valid and V[A_1, ..., A_N] implements T[A_1, ..., A_N]. +// +// If T has structural constraints, they must be satisfied by V. +// +// For example, consider the following type declarations: +// +// type Interface[T any] interface { +// Accept(T) +// } +// +// type Container[T any] struct { +// Element T +// } +// +// func (c Container[T]) Accept(t T) { c.Element = t } +// +// In this case, GenericAssignableTo reports that instantiations of Container +// are assignable to the corresponding instantiation of Interface. +func GenericAssignableTo(ctxt *Context, V, T types.Type) bool { + // If V and T are not both named, or do not have matching non-empty type + // parameter lists, fall back on types.AssignableTo. + + VN, Vnamed := V.(*types.Named) + TN, Tnamed := T.(*types.Named) + if !Vnamed || !Tnamed { + return types.AssignableTo(V, T) + } + + vtparams := ForNamed(VN) + ttparams := ForNamed(TN) + if vtparams.Len() == 0 || vtparams.Len() != ttparams.Len() || NamedTypeArgs(VN).Len() != 0 || NamedTypeArgs(TN).Len() != 0 { + return types.AssignableTo(V, T) + } + + // V and T have the same (non-zero) number of type params. Instantiate both + // with the type parameters of V. This must always succeed for V, and will + // succeed for T if and only if the type set of each type parameter of V is a + // subset of the type set of the corresponding type parameter of T, meaning + // that every instantiation of V corresponds to a valid instantiation of T. + + // Minor optimization: ensure we share a context across the two + // instantiations below. + if ctxt == nil { + ctxt = NewContext() + } + + var targs []types.Type + for i := 0; i < vtparams.Len(); i++ { + targs = append(targs, vtparams.At(i)) + } + + vinst, err := Instantiate(ctxt, V, targs, true) + if err != nil { + panic("type parameters should satisfy their own constraints") + } + + tinst, err := Instantiate(ctxt, T, targs, true) + if err != nil { + return false + } + + return types.AssignableTo(vinst, tinst) +} diff --git a/vendor/golang.org/x/exp/typeparams/normalize.go b/vendor/golang.org/x/exp/typeparams/normalize.go new file mode 100644 index 000000000..55c788a3b --- /dev/null +++ b/vendor/golang.org/x/exp/typeparams/normalize.go @@ -0,0 +1,200 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeparams + +import ( + "errors" + "fmt" + "go/types" + "os" + "strings" +) + +const debug = false + +// ErrEmptyTypeSet is returned if a type set computation results in a type set +// with no types. +var ErrEmptyTypeSet = errors.New("empty type set") + +// NormalTerms returns a slice of terms representing the normalized structural +// type restrictions of a type, if any. +// +// For all types other than *types.TypeParam, *types.Interface, and +// *types.Union, this is just a single term with Tilde() == false and +// Type() == typ. For *types.TypeParam, *types.Interface, and *types.Union, see +// below. +// +// Structural type restrictions of a type parameter are created via +// non-interface types embedded in its constraint interface (directly, or via a +// chain of interface embeddings). For example, in the declaration type +// T[P interface{~int; m()}] int the structural restriction of the type +// parameter P is ~int. +// +// With interface embedding and unions, the specification of structural type +// restrictions may be arbitrarily complex. For example, consider the +// following: +// +// type A interface{ ~string|~[]byte } +// +// type B interface{ int|string } +// +// type C interface { ~string|~int } +// +// type T[P interface{ A|B; C }] int +// +// In this example, the structural type restriction of P is ~string|int: A|B +// expands to ~string|~[]byte|int|string, which reduces to ~string|~[]byte|int, +// which when intersected with C (~string|~int) yields ~string|int. +// +// NormalTerms computes these expansions and reductions, producing a +// "normalized" form of the embeddings. A structural restriction is normalized +// if it is a single union containing no interface terms, and is minimal in the +// sense that removing any term changes the set of types satisfying the +// constraint. It is left as a proof for the reader that, modulo sorting, there +// is exactly one such normalized form. +// +// Because the minimal representation always takes this form, NormalTerms +// returns a slice of tilde terms corresponding to the terms of the union in +// the normalized structural restriction. An error is returned if the type is +// invalid, exceeds complexity bounds, or has an empty type set. In the latter +// case, NormalTerms returns ErrEmptyTypeSet. +// +// NormalTerms makes no guarantees about the order of terms, except that it +// is deterministic. +func NormalTerms(typ types.Type) ([]*Term, error) { + if tparam, ok := typ.(*TypeParam); ok { + constraint := tparam.Constraint() + if constraint == nil { + return nil, fmt.Errorf("%s has nil constraint", tparam) + } + iface, _ := constraint.Underlying().(*types.Interface) + if iface == nil { + return nil, fmt.Errorf("constraint is %T, not *types.Interface", constraint.Underlying()) + } + typ = iface + } + tset, err := computeTermSetInternal(typ, make(map[types.Type]*termSet), 0) + if err != nil { + return nil, err + } + if tset.terms.isEmpty() { + return nil, ErrEmptyTypeSet + } + if tset.terms.isAll() { + return nil, nil + } + var terms []*Term + for _, term := range tset.terms { + terms = append(terms, NewTerm(term.tilde, term.typ)) + } + return terms, nil +} + +// A termSet holds the normalized set of terms for a given type. +// +// The name termSet is intentionally distinct from 'type set': a type set is +// all types that implement a type (and includes method restrictions), whereas +// a term set just represents the structural restrictions on a type. +type termSet struct { + complete bool + terms termlist +} + +func indentf(depth int, format string, args ...interface{}) { + fmt.Fprintf(os.Stderr, strings.Repeat(".", depth)+format+"\n", args...) +} + +func computeTermSetInternal(t types.Type, seen map[types.Type]*termSet, depth int) (res *termSet, err error) { + if t == nil { + panic("nil type") + } + + if debug { + indentf(depth, "%s", t.String()) + defer func() { + if err != nil { + indentf(depth, "=> %s", err) + } else { + indentf(depth, "=> %s", res.terms.String()) + } + }() + } + + const maxTermCount = 100 + if tset, ok := seen[t]; ok { + if !tset.complete { + return nil, fmt.Errorf("cycle detected in the declaration of %s", t) + } + return tset, nil + } + + // Mark the current type as seen to avoid infinite recursion. + tset := new(termSet) + defer func() { + tset.complete = true + }() + seen[t] = tset + + switch u := t.Underlying().(type) { + case *types.Interface: + // The term set of an interface is the intersection of the term sets of its + // embedded types. + tset.terms = allTermlist + for i := 0; i < u.NumEmbeddeds(); i++ { + embedded := u.EmbeddedType(i) + if _, ok := embedded.Underlying().(*TypeParam); ok { + return nil, fmt.Errorf("invalid embedded type %T", embedded) + } + tset2, err := computeTermSetInternal(embedded, seen, depth+1) + if err != nil { + return nil, err + } + tset.terms = tset.terms.intersect(tset2.terms) + } + case *Union: + // The term set of a union is the union of term sets of its terms. + tset.terms = nil + for i := 0; i < u.Len(); i++ { + t := u.Term(i) + var terms termlist + switch t.Type().Underlying().(type) { + case *types.Interface: + tset2, err := computeTermSetInternal(t.Type(), seen, depth+1) + if err != nil { + return nil, err + } + terms = tset2.terms + case *TypeParam, *Union: + // A stand-alone type parameter or union is not permitted as union + // term. + return nil, fmt.Errorf("invalid union term %T", t) + default: + if t.Type() == types.Typ[types.Invalid] { + continue + } + terms = termlist{{t.Tilde(), t.Type()}} + } + tset.terms = tset.terms.union(terms) + if len(tset.terms) > maxTermCount { + return nil, fmt.Errorf("exceeded max term count %d", maxTermCount) + } + } + case *TypeParam: + panic("unreachable") + default: + // For all other types, the term set is just a single non-tilde term + // holding the type itself. + if u != types.Typ[types.Invalid] { + tset.terms = termlist{{false, t}} + } + } + return tset, nil +} + +// under is a facade for the go/types internal function of the same name. It is +// used by typeterm.go. +func under(t types.Type) types.Type { + return t.Underlying() +} diff --git a/vendor/golang.org/x/exp/typeparams/termlist.go b/vendor/golang.org/x/exp/typeparams/termlist.go new file mode 100644 index 000000000..6f88bfa93 --- /dev/null +++ b/vendor/golang.org/x/exp/typeparams/termlist.go @@ -0,0 +1,172 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by copytermlist.go DO NOT EDIT. + +package typeparams + +import ( + "bytes" + "go/types" +) + +// A termlist represents the type set represented by the union +// t1 ∪ y2 ∪ ... tn of the type sets of the terms t1 to tn. +// A termlist is in normal form if all terms are disjoint. +// termlist operations don't require the operands to be in +// normal form. +type termlist []*term + +// allTermlist represents the set of all types. +// It is in normal form. +var allTermlist = termlist{new(term)} + +// String prints the termlist exactly (without normalization). +func (xl termlist) String() string { + if len(xl) == 0 { + return "∅" + } + var buf bytes.Buffer + for i, x := range xl { + if i > 0 { + buf.WriteString(" ∪ ") + } + buf.WriteString(x.String()) + } + return buf.String() +} + +// isEmpty reports whether the termlist xl represents the empty set of types. +func (xl termlist) isEmpty() bool { + // If there's a non-nil term, the entire list is not empty. + // If the termlist is in normal form, this requires at most + // one iteration. + for _, x := range xl { + if x != nil { + return false + } + } + return true +} + +// isAll reports whether the termlist xl represents the set of all types. +func (xl termlist) isAll() bool { + // If there's a 𝓤 term, the entire list is 𝓤. + // If the termlist is in normal form, this requires at most + // one iteration. + for _, x := range xl { + if x != nil && x.typ == nil { + return true + } + } + return false +} + +// norm returns the normal form of xl. +func (xl termlist) norm() termlist { + // Quadratic algorithm, but good enough for now. + // TODO(gri) fix asymptotic performance + used := make([]bool, len(xl)) + var rl termlist + for i, xi := range xl { + if xi == nil || used[i] { + continue + } + for j := i + 1; j < len(xl); j++ { + xj := xl[j] + if xj == nil || used[j] { + continue + } + if u1, u2 := xi.union(xj); u2 == nil { + // If we encounter a 𝓤 term, the entire list is 𝓤. + // Exit early. + // (Note that this is not just an optimization; + // if we continue, we may end up with a 𝓤 term + // and other terms and the result would not be + // in normal form.) + if u1.typ == nil { + return allTermlist + } + xi = u1 + used[j] = true // xj is now unioned into xi - ignore it in future iterations + } + } + rl = append(rl, xi) + } + return rl +} + +// If the type set represented by xl is specified by a single (non-𝓤) term, +// singleType returns that type. Otherwise it returns nil. +func (xl termlist) singleType() types.Type { + if nl := xl.norm(); len(nl) == 1 { + return nl[0].typ // if nl.isAll() then typ is nil, which is ok + } + return nil +} + +// union returns the union xl ∪ yl. +func (xl termlist) union(yl termlist) termlist { + return append(xl, yl...).norm() +} + +// intersect returns the intersection xl ∩ yl. +func (xl termlist) intersect(yl termlist) termlist { + if xl.isEmpty() || yl.isEmpty() { + return nil + } + + // Quadratic algorithm, but good enough for now. + // TODO(gri) fix asymptotic performance + var rl termlist + for _, x := range xl { + for _, y := range yl { + if r := x.intersect(y); r != nil { + rl = append(rl, r) + } + } + } + return rl.norm() +} + +// equal reports whether xl and yl represent the same type set. +func (xl termlist) equal(yl termlist) bool { + // TODO(gri) this should be more efficient + return xl.subsetOf(yl) && yl.subsetOf(xl) +} + +// includes reports whether t ∈ xl. +func (xl termlist) includes(t types.Type) bool { + for _, x := range xl { + if x.includes(t) { + return true + } + } + return false +} + +// supersetOf reports whether y ⊆ xl. +func (xl termlist) supersetOf(y *term) bool { + for _, x := range xl { + if y.subsetOf(x) { + return true + } + } + return false +} + +// subsetOf reports whether xl ⊆ yl. +func (xl termlist) subsetOf(yl termlist) bool { + if yl.isEmpty() { + return xl.isEmpty() + } + + // each term x of xl must be a subset of yl + for _, x := range xl { + if !yl.supersetOf(x) { + return false // x is not a subset yl + } + } + return true +} diff --git a/vendor/golang.org/x/exp/typeparams/typeparams_go117.go b/vendor/golang.org/x/exp/typeparams/typeparams_go117.go new file mode 100644 index 000000000..efc33f10f --- /dev/null +++ b/vendor/golang.org/x/exp/typeparams/typeparams_go117.go @@ -0,0 +1,202 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.18 +// +build !go1.18 + +package typeparams + +import ( + "go/ast" + "go/token" + "go/types" +) + +const enabled = false + +func unsupported() { + panic("type parameters are unsupported at this go version") +} + +// IndexListExpr is a placeholder type, as type parameters are not supported at +// this Go version. Its methods panic on use. +type IndexListExpr struct { + ast.Expr + X ast.Expr // expression + Lbrack token.Pos // position of "[" + Indices []ast.Expr // index expressions + Rbrack token.Pos // position of "]" +} + +func (*IndexListExpr) Pos() token.Pos { unsupported(); return token.NoPos } +func (*IndexListExpr) End() token.Pos { unsupported(); return token.NoPos } + +// ForTypeSpec returns an empty field list, as type parameters on not supported +// at this Go version. +func ForTypeSpec(*ast.TypeSpec) *ast.FieldList { + return nil +} + +// ForFuncType returns an empty field list, as type parameters are not +// supported at this Go version. +func ForFuncType(*ast.FuncType) *ast.FieldList { + return nil +} + +// TypeParam is a placeholder type, as type parameters are not supported at +// this Go version. Its methods panic on use. +type TypeParam struct{ types.Type } + +func (*TypeParam) String() string { unsupported(); return "" } +func (*TypeParam) Underlying() types.Type { unsupported(); return nil } +func (*TypeParam) Index() int { unsupported(); return 0 } +func (*TypeParam) Constraint() types.Type { unsupported(); return nil } +func (*TypeParam) SetConstraint(types.Type) { unsupported() } +func (*TypeParam) Obj() *types.TypeName { unsupported(); return nil } + +// TypeParamList is a placeholder for an empty type parameter list. +type TypeParamList struct{} + +func (*TypeParamList) Len() int { return 0 } +func (*TypeParamList) At(int) *TypeParam { unsupported(); return nil } + +// TypeList is a placeholder for an empty type list. +type TypeList struct{} + +func (*TypeList) Len() int { return 0 } +func (*TypeList) At(int) types.Type { unsupported(); return nil } + +// NewTypeParam is unsupported at this Go version, and panics. +func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam { + unsupported() + return nil +} + +// NewSignatureType calls types.NewSignature, panicking if recvTypeParams or +// typeParams is non-empty. +func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature { + if len(recvTypeParams) != 0 || len(typeParams) != 0 { + unsupported() + } + return types.NewSignature(recv, params, results, variadic) +} + +// ForSignature returns an empty slice. +func ForSignature(*types.Signature) *TypeParamList { + return nil +} + +// RecvTypeParams returns a nil slice. +func RecvTypeParams(sig *types.Signature) *TypeParamList { + return nil +} + +// IsComparable returns false, as no interfaces are type-restricted at this Go +// version. +func IsComparable(*types.Interface) bool { + return false +} + +// IsMethodSet returns true, as no interfaces are type-restricted at this Go +// version. +func IsMethodSet(*types.Interface) bool { + return true +} + +// IsImplicit returns false, as no interfaces are implicit at this Go version. +func IsImplicit(*types.Interface) bool { + return false +} + +// MarkImplicit does nothing, because this Go version does not have implicit +// interfaces. +func MarkImplicit(*types.Interface) {} + +// ForNamed returns an empty type parameter list, as type parameters are not +// supported at this Go version. +func ForNamed(*types.Named) *TypeParamList { + return nil +} + +// SetForNamed panics if tparams is non-empty. +func SetForNamed(_ *types.Named, tparams []*TypeParam) { + if len(tparams) > 0 { + unsupported() + } +} + +// NamedTypeArgs returns nil. +func NamedTypeArgs(*types.Named) *TypeList { + return nil +} + +// NamedTypeOrigin is the identity method at this Go version. +func NamedTypeOrigin(named *types.Named) types.Type { + return named +} + +// Term holds information about a structural type restriction. +type Term struct { + tilde bool + typ types.Type +} + +func (m *Term) Tilde() bool { return m.tilde } +func (m *Term) Type() types.Type { return m.typ } +func (m *Term) String() string { + pre := "" + if m.tilde { + pre = "~" + } + return pre + m.typ.String() +} + +// NewTerm creates a new placeholder term type. +func NewTerm(tilde bool, typ types.Type) *Term { + return &Term{tilde, typ} +} + +// Union is a placeholder type, as type parameters are not supported at this Go +// version. Its methods panic on use. +type Union struct{ types.Type } + +func (*Union) String() string { unsupported(); return "" } +func (*Union) Underlying() types.Type { unsupported(); return nil } +func (*Union) Len() int { return 0 } +func (*Union) Term(i int) *Term { unsupported(); return nil } + +// NewUnion is unsupported at this Go version, and panics. +func NewUnion(terms []*Term) *Union { + unsupported() + return nil +} + +// InitInstances is a noop at this Go version. +func InitInstances(*types.Info) {} + +// Instance is a placeholder type, as type parameters are not supported at this +// Go version. +type Instance struct { + TypeArgs *TypeList + Type types.Type +} + +// GetInstances returns a nil map, as type parameters are not supported at this +// Go version. +func GetInstances(info *types.Info) map[*ast.Ident]Instance { return nil } + +// Context is a placeholder type, as type parameters are not supported at +// this Go version. +type Context struct{} + +// NewContext returns a placeholder Context instance. +func NewContext() *Context { + return &Context{} +} + +// Instantiate is unsupported on this Go version, and panics. +func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { + unsupported() + return nil, nil +} diff --git a/vendor/golang.org/x/exp/typeparams/typeparams_go118.go b/vendor/golang.org/x/exp/typeparams/typeparams_go118.go new file mode 100644 index 000000000..1176104b2 --- /dev/null +++ b/vendor/golang.org/x/exp/typeparams/typeparams_go118.go @@ -0,0 +1,148 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.18 +// +build go1.18 + +package typeparams + +import ( + "go/ast" + "go/types" +) + +const enabled = true + +// IndexListExpr is an alias for ast.IndexListExpr. +type IndexListExpr = ast.IndexListExpr + +// ForTypeSpec returns n.TypeParams. +func ForTypeSpec(n *ast.TypeSpec) *ast.FieldList { + if n == nil { + return nil + } + return n.TypeParams +} + +// ForFuncType returns n.TypeParams. +func ForFuncType(n *ast.FuncType) *ast.FieldList { + if n == nil { + return nil + } + return n.TypeParams +} + +// TypeParam is an alias for types.TypeParam +type TypeParam = types.TypeParam + +// TypeParamList is an alias for types.TypeParamList +type TypeParamList = types.TypeParamList + +// TypeList is an alias for types.TypeList +type TypeList = types.TypeList + +// NewTypeParam calls types.NewTypeParam. +func NewTypeParam(name *types.TypeName, constraint types.Type) *TypeParam { + return types.NewTypeParam(name, constraint) +} + +// NewSignatureType calls types.NewSignatureType. +func NewSignatureType(recv *types.Var, recvTypeParams, typeParams []*TypeParam, params, results *types.Tuple, variadic bool) *types.Signature { + return types.NewSignatureType(recv, recvTypeParams, typeParams, params, results, variadic) +} + +// ForSignature returns sig.TypeParams() +func ForSignature(sig *types.Signature) *TypeParamList { + return sig.TypeParams() +} + +// RecvTypeParams returns sig.RecvTypeParams(). +func RecvTypeParams(sig *types.Signature) *TypeParamList { + return sig.RecvTypeParams() +} + +// IsComparable calls iface.IsComparable(). +func IsComparable(iface *types.Interface) bool { + return iface.IsComparable() +} + +// IsMethodSet calls iface.IsMethodSet(). +func IsMethodSet(iface *types.Interface) bool { + return iface.IsMethodSet() +} + +// IsImplicit calls iface.IsImplicit(). +func IsImplicit(iface *types.Interface) bool { + return iface.IsImplicit() +} + +// MarkImplicit calls iface.MarkImplicit(). +func MarkImplicit(iface *types.Interface) { + iface.MarkImplicit() +} + +// ForNamed extracts the (possibly empty) type parameter object list from +// named. +func ForNamed(named *types.Named) *TypeParamList { + return named.TypeParams() +} + +// SetForNamed sets the type params tparams on n. Each tparam must be of +// dynamic type *types.TypeParam. +func SetForNamed(n *types.Named, tparams []*TypeParam) { + n.SetTypeParams(tparams) +} + +// NamedTypeArgs returns named.TypeArgs(). +func NamedTypeArgs(named *types.Named) *TypeList { + return named.TypeArgs() +} + +// NamedTypeOrigin returns named.Orig(). +func NamedTypeOrigin(named *types.Named) types.Type { + return named.Origin() +} + +// Term is an alias for types.Term. +type Term = types.Term + +// NewTerm calls types.NewTerm. +func NewTerm(tilde bool, typ types.Type) *Term { + return types.NewTerm(tilde, typ) +} + +// Union is an alias for types.Union +type Union = types.Union + +// NewUnion calls types.NewUnion. +func NewUnion(terms []*Term) *Union { + return types.NewUnion(terms) +} + +// InitInstances initializes info to record information about type and function +// instances. +func InitInstances(info *types.Info) { + info.Instances = make(map[*ast.Ident]types.Instance) +} + +// Instance is an alias for types.Instance. +type Instance = types.Instance + +// GetInstances returns info.Instances. +func GetInstances(info *types.Info) map[*ast.Ident]Instance { + return info.Instances +} + +// Context is an alias for types.Context. +type Context = types.Context + +// NewContext calls types.NewContext. +func NewContext() *Context { + return types.NewContext() +} + +// Instantiate calls types.Instantiate. +func Instantiate(ctxt *Context, typ types.Type, targs []types.Type, validate bool) (types.Type, error) { + return types.Instantiate(ctxt, typ, targs, validate) +} diff --git a/vendor/golang.org/x/exp/typeparams/typeterm.go b/vendor/golang.org/x/exp/typeparams/typeterm.go new file mode 100644 index 000000000..7ddee28d9 --- /dev/null +++ b/vendor/golang.org/x/exp/typeparams/typeterm.go @@ -0,0 +1,170 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Code generated by copytermlist.go DO NOT EDIT. + +package typeparams + +import "go/types" + +// A term describes elementary type sets: +// +// ∅: (*term)(nil) == ∅ // set of no types (empty set) +// 𝓤: &term{} == 𝓤 // set of all types (𝓤niverse) +// T: &term{false, T} == {T} // set of type T +// ~t: &term{true, t} == {t' | under(t') == t} // set of types with underlying type t +// +type term struct { + tilde bool // valid if typ != nil + typ types.Type +} + +func (x *term) String() string { + switch { + case x == nil: + return "∅" + case x.typ == nil: + return "𝓤" + case x.tilde: + return "~" + x.typ.String() + default: + return x.typ.String() + } +} + +// equal reports whether x and y represent the same type set. +func (x *term) equal(y *term) bool { + // easy cases + switch { + case x == nil || y == nil: + return x == y + case x.typ == nil || y.typ == nil: + return x.typ == y.typ + } + // ∅ ⊂ x, y ⊂ 𝓤 + + return x.tilde == y.tilde && types.Identical(x.typ, y.typ) +} + +// union returns the union x ∪ y: zero, one, or two non-nil terms. +func (x *term) union(y *term) (_, _ *term) { + // easy cases + switch { + case x == nil && y == nil: + return nil, nil // ∅ ∪ ∅ == ∅ + case x == nil: + return y, nil // ∅ ∪ y == y + case y == nil: + return x, nil // x ∪ ∅ == x + case x.typ == nil: + return x, nil // 𝓤 ∪ y == 𝓤 + case y.typ == nil: + return y, nil // x ∪ 𝓤 == 𝓤 + } + // ∅ ⊂ x, y ⊂ 𝓤 + + if x.disjoint(y) { + return x, y // x ∪ y == (x, y) if x ∩ y == ∅ + } + // x.typ == y.typ + + // ~t ∪ ~t == ~t + // ~t ∪ T == ~t + // T ∪ ~t == ~t + // T ∪ T == T + if x.tilde || !y.tilde { + return x, nil + } + return y, nil +} + +// intersect returns the intersection x ∩ y. +func (x *term) intersect(y *term) *term { + // easy cases + switch { + case x == nil || y == nil: + return nil // ∅ ∩ y == ∅ and ∩ ∅ == ∅ + case x.typ == nil: + return y // 𝓤 ∩ y == y + case y.typ == nil: + return x // x ∩ 𝓤 == x + } + // ∅ ⊂ x, y ⊂ 𝓤 + + if x.disjoint(y) { + return nil // x ∩ y == ∅ if x ∩ y == ∅ + } + // x.typ == y.typ + + // ~t ∩ ~t == ~t + // ~t ∩ T == T + // T ∩ ~t == T + // T ∩ T == T + if !x.tilde || y.tilde { + return x + } + return y +} + +// includes reports whether t ∈ x. +func (x *term) includes(t types.Type) bool { + // easy cases + switch { + case x == nil: + return false // t ∈ ∅ == false + case x.typ == nil: + return true // t ∈ 𝓤 == true + } + // ∅ ⊂ x ⊂ 𝓤 + + u := t + if x.tilde { + u = under(u) + } + return types.Identical(x.typ, u) +} + +// subsetOf reports whether x ⊆ y. +func (x *term) subsetOf(y *term) bool { + // easy cases + switch { + case x == nil: + return true // ∅ ⊆ y == true + case y == nil: + return false // x ⊆ ∅ == false since x != ∅ + case y.typ == nil: + return true // x ⊆ 𝓤 == true + case x.typ == nil: + return false // 𝓤 ⊆ y == false since y != 𝓤 + } + // ∅ ⊂ x, y ⊂ 𝓤 + + if x.disjoint(y) { + return false // x ⊆ y == false if x ∩ y == ∅ + } + // x.typ == y.typ + + // ~t ⊆ ~t == true + // ~t ⊆ T == false + // T ⊆ ~t == true + // T ⊆ T == true + return !x.tilde || y.tilde +} + +// disjoint reports whether x ∩ y == ∅. +// x.typ and y.typ must not be nil. +func (x *term) disjoint(y *term) bool { + if debug && (x.typ == nil || y.typ == nil) { + panic("invalid argument(s)") + } + ux := x.typ + if y.tilde { + ux = under(ux) + } + uy := y.typ + if x.tilde { + uy = under(uy) + } + return !types.Identical(ux, uy) +} diff --git a/vendor/golang.org/x/text/cases/cases.go b/vendor/golang.org/x/text/cases/cases.go new file mode 100644 index 000000000..752cdf031 --- /dev/null +++ b/vendor/golang.org/x/text/cases/cases.go @@ -0,0 +1,162 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run gen.go gen_trieval.go + +// Package cases provides general and language-specific case mappers. +package cases // import "golang.org/x/text/cases" + +import ( + "golang.org/x/text/language" + "golang.org/x/text/transform" +) + +// References: +// - Unicode Reference Manual Chapter 3.13, 4.2, and 5.18. +// - https://www.unicode.org/reports/tr29/ +// - https://www.unicode.org/Public/6.3.0/ucd/CaseFolding.txt +// - https://www.unicode.org/Public/6.3.0/ucd/SpecialCasing.txt +// - https://www.unicode.org/Public/6.3.0/ucd/DerivedCoreProperties.txt +// - https://www.unicode.org/Public/6.3.0/ucd/auxiliary/WordBreakProperty.txt +// - https://www.unicode.org/Public/6.3.0/ucd/auxiliary/WordBreakTest.txt +// - http://userguide.icu-project.org/transforms/casemappings + +// TODO: +// - Case folding +// - Wide and Narrow? +// - Segmenter option for title casing. +// - ASCII fast paths +// - Encode Soft-Dotted property within trie somehow. + +// A Caser transforms given input to a certain case. It implements +// transform.Transformer. +// +// A Caser may be stateful and should therefore not be shared between +// goroutines. +type Caser struct { + t transform.SpanningTransformer +} + +// Bytes returns a new byte slice with the result of converting b to the case +// form implemented by c. +func (c Caser) Bytes(b []byte) []byte { + b, _, _ = transform.Bytes(c.t, b) + return b +} + +// String returns a string with the result of transforming s to the case form +// implemented by c. +func (c Caser) String(s string) string { + s, _, _ = transform.String(c.t, s) + return s +} + +// Reset resets the Caser to be reused for new input after a previous call to +// Transform. +func (c Caser) Reset() { c.t.Reset() } + +// Transform implements the transform.Transformer interface and transforms the +// given input to the case form implemented by c. +func (c Caser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + return c.t.Transform(dst, src, atEOF) +} + +// Span implements the transform.SpanningTransformer interface. +func (c Caser) Span(src []byte, atEOF bool) (n int, err error) { + return c.t.Span(src, atEOF) +} + +// Upper returns a Caser for language-specific uppercasing. +func Upper(t language.Tag, opts ...Option) Caser { + return Caser{makeUpper(t, getOpts(opts...))} +} + +// Lower returns a Caser for language-specific lowercasing. +func Lower(t language.Tag, opts ...Option) Caser { + return Caser{makeLower(t, getOpts(opts...))} +} + +// Title returns a Caser for language-specific title casing. It uses an +// approximation of the default Unicode Word Break algorithm. +func Title(t language.Tag, opts ...Option) Caser { + return Caser{makeTitle(t, getOpts(opts...))} +} + +// Fold returns a Caser that implements Unicode case folding. The returned Caser +// is stateless and safe to use concurrently by multiple goroutines. +// +// Case folding does not normalize the input and may not preserve a normal form. +// Use the collate or search package for more convenient and linguistically +// sound comparisons. Use golang.org/x/text/secure/precis for string comparisons +// where security aspects are a concern. +func Fold(opts ...Option) Caser { + return Caser{makeFold(getOpts(opts...))} +} + +// An Option is used to modify the behavior of a Caser. +type Option func(o options) options + +// TODO: consider these options to take a boolean as well, like FinalSigma. +// The advantage of using this approach is that other providers of a lower-case +// algorithm could set different defaults by prefixing a user-provided slice +// of options with their own. This is handy, for instance, for the precis +// package which would override the default to not handle the Greek final sigma. + +var ( + // NoLower disables the lowercasing of non-leading letters for a title + // caser. + NoLower Option = noLower + + // Compact omits mappings in case folding for characters that would grow the + // input. (Unimplemented.) + Compact Option = compact +) + +// TODO: option to preserve a normal form, if applicable? + +type options struct { + noLower bool + simple bool + + // TODO: segmenter, max ignorable, alternative versions, etc. + + ignoreFinalSigma bool +} + +func getOpts(o ...Option) (res options) { + for _, f := range o { + res = f(res) + } + return +} + +func noLower(o options) options { + o.noLower = true + return o +} + +func compact(o options) options { + o.simple = true + return o +} + +// HandleFinalSigma specifies whether the special handling of Greek final sigma +// should be enabled. Unicode prescribes handling the Greek final sigma for all +// locales, but standards like IDNA and PRECIS override this default. +func HandleFinalSigma(enable bool) Option { + if enable { + return handleFinalSigma + } + return ignoreFinalSigma +} + +func ignoreFinalSigma(o options) options { + o.ignoreFinalSigma = true + return o +} + +func handleFinalSigma(o options) options { + o.ignoreFinalSigma = false + return o +} diff --git a/vendor/golang.org/x/text/cases/context.go b/vendor/golang.org/x/text/cases/context.go new file mode 100644 index 000000000..e9aa9e193 --- /dev/null +++ b/vendor/golang.org/x/text/cases/context.go @@ -0,0 +1,376 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cases + +import "golang.org/x/text/transform" + +// A context is used for iterating over source bytes, fetching case info and +// writing to a destination buffer. +// +// Casing operations may need more than one rune of context to decide how a rune +// should be cased. Casing implementations should call checkpoint on context +// whenever it is known to be safe to return the runes processed so far. +// +// It is recommended for implementations to not allow for more than 30 case +// ignorables as lookahead (analogous to the limit in norm) and to use state if +// unbounded lookahead is needed for cased runes. +type context struct { + dst, src []byte + atEOF bool + + pDst int // pDst points past the last written rune in dst. + pSrc int // pSrc points to the start of the currently scanned rune. + + // checkpoints safe to return in Transform, where nDst <= pDst and nSrc <= pSrc. + nDst, nSrc int + err error + + sz int // size of current rune + info info // case information of currently scanned rune + + // State preserved across calls to Transform. + isMidWord bool // false if next cased letter needs to be title-cased. +} + +func (c *context) Reset() { + c.isMidWord = false +} + +// ret returns the return values for the Transform method. It checks whether +// there were insufficient bytes in src to complete and introduces an error +// accordingly, if necessary. +func (c *context) ret() (nDst, nSrc int, err error) { + if c.err != nil || c.nSrc == len(c.src) { + return c.nDst, c.nSrc, c.err + } + // This point is only reached by mappers if there was no short destination + // buffer. This means that the source buffer was exhausted and that c.sz was + // set to 0 by next. + if c.atEOF && c.pSrc == len(c.src) { + return c.pDst, c.pSrc, nil + } + return c.nDst, c.nSrc, transform.ErrShortSrc +} + +// retSpan returns the return values for the Span method. It checks whether +// there were insufficient bytes in src to complete and introduces an error +// accordingly, if necessary. +func (c *context) retSpan() (n int, err error) { + _, nSrc, err := c.ret() + return nSrc, err +} + +// checkpoint sets the return value buffer points for Transform to the current +// positions. +func (c *context) checkpoint() { + if c.err == nil { + c.nDst, c.nSrc = c.pDst, c.pSrc+c.sz + } +} + +// unreadRune causes the last rune read by next to be reread on the next +// invocation of next. Only one unreadRune may be called after a call to next. +func (c *context) unreadRune() { + c.sz = 0 +} + +func (c *context) next() bool { + c.pSrc += c.sz + if c.pSrc == len(c.src) || c.err != nil { + c.info, c.sz = 0, 0 + return false + } + v, sz := trie.lookup(c.src[c.pSrc:]) + c.info, c.sz = info(v), sz + if c.sz == 0 { + if c.atEOF { + // A zero size means we have an incomplete rune. If we are atEOF, + // this means it is an illegal rune, which we will consume one + // byte at a time. + c.sz = 1 + } else { + c.err = transform.ErrShortSrc + return false + } + } + return true +} + +// writeBytes adds bytes to dst. +func (c *context) writeBytes(b []byte) bool { + if len(c.dst)-c.pDst < len(b) { + c.err = transform.ErrShortDst + return false + } + // This loop is faster than using copy. + for _, ch := range b { + c.dst[c.pDst] = ch + c.pDst++ + } + return true +} + +// writeString writes the given string to dst. +func (c *context) writeString(s string) bool { + if len(c.dst)-c.pDst < len(s) { + c.err = transform.ErrShortDst + return false + } + // This loop is faster than using copy. + for i := 0; i < len(s); i++ { + c.dst[c.pDst] = s[i] + c.pDst++ + } + return true +} + +// copy writes the current rune to dst. +func (c *context) copy() bool { + return c.writeBytes(c.src[c.pSrc : c.pSrc+c.sz]) +} + +// copyXOR copies the current rune to dst and modifies it by applying the XOR +// pattern of the case info. It is the responsibility of the caller to ensure +// that this is a rune with a XOR pattern defined. +func (c *context) copyXOR() bool { + if !c.copy() { + return false + } + if c.info&xorIndexBit == 0 { + // Fast path for 6-bit XOR pattern, which covers most cases. + c.dst[c.pDst-1] ^= byte(c.info >> xorShift) + } else { + // Interpret XOR bits as an index. + // TODO: test performance for unrolling this loop. Verify that we have + // at least two bytes and at most three. + idx := c.info >> xorShift + for p := c.pDst - 1; ; p-- { + c.dst[p] ^= xorData[idx] + idx-- + if xorData[idx] == 0 { + break + } + } + } + return true +} + +// hasPrefix returns true if src[pSrc:] starts with the given string. +func (c *context) hasPrefix(s string) bool { + b := c.src[c.pSrc:] + if len(b) < len(s) { + return false + } + for i, c := range b[:len(s)] { + if c != s[i] { + return false + } + } + return true +} + +// caseType returns an info with only the case bits, normalized to either +// cLower, cUpper, cTitle or cUncased. +func (c *context) caseType() info { + cm := c.info & 0x7 + if cm < 4 { + return cm + } + if cm >= cXORCase { + // xor the last bit of the rune with the case type bits. + b := c.src[c.pSrc+c.sz-1] + return info(b&1) ^ cm&0x3 + } + if cm == cIgnorableCased { + return cLower + } + return cUncased +} + +// lower writes the lowercase version of the current rune to dst. +func lower(c *context) bool { + ct := c.caseType() + if c.info&hasMappingMask == 0 || ct == cLower { + return c.copy() + } + if c.info&exceptionBit == 0 { + return c.copyXOR() + } + e := exceptions[c.info>>exceptionShift:] + offset := 2 + e[0]&lengthMask // size of header + fold string + if nLower := (e[1] >> lengthBits) & lengthMask; nLower != noChange { + return c.writeString(e[offset : offset+nLower]) + } + return c.copy() +} + +func isLower(c *context) bool { + ct := c.caseType() + if c.info&hasMappingMask == 0 || ct == cLower { + return true + } + if c.info&exceptionBit == 0 { + c.err = transform.ErrEndOfSpan + return false + } + e := exceptions[c.info>>exceptionShift:] + if nLower := (e[1] >> lengthBits) & lengthMask; nLower != noChange { + c.err = transform.ErrEndOfSpan + return false + } + return true +} + +// upper writes the uppercase version of the current rune to dst. +func upper(c *context) bool { + ct := c.caseType() + if c.info&hasMappingMask == 0 || ct == cUpper { + return c.copy() + } + if c.info&exceptionBit == 0 { + return c.copyXOR() + } + e := exceptions[c.info>>exceptionShift:] + offset := 2 + e[0]&lengthMask // size of header + fold string + // Get length of first special case mapping. + n := (e[1] >> lengthBits) & lengthMask + if ct == cTitle { + // The first special case mapping is for lower. Set n to the second. + if n == noChange { + n = 0 + } + n, e = e[1]&lengthMask, e[n:] + } + if n != noChange { + return c.writeString(e[offset : offset+n]) + } + return c.copy() +} + +// isUpper writes the isUppercase version of the current rune to dst. +func isUpper(c *context) bool { + ct := c.caseType() + if c.info&hasMappingMask == 0 || ct == cUpper { + return true + } + if c.info&exceptionBit == 0 { + c.err = transform.ErrEndOfSpan + return false + } + e := exceptions[c.info>>exceptionShift:] + // Get length of first special case mapping. + n := (e[1] >> lengthBits) & lengthMask + if ct == cTitle { + n = e[1] & lengthMask + } + if n != noChange { + c.err = transform.ErrEndOfSpan + return false + } + return true +} + +// title writes the title case version of the current rune to dst. +func title(c *context) bool { + ct := c.caseType() + if c.info&hasMappingMask == 0 || ct == cTitle { + return c.copy() + } + if c.info&exceptionBit == 0 { + if ct == cLower { + return c.copyXOR() + } + return c.copy() + } + // Get the exception data. + e := exceptions[c.info>>exceptionShift:] + offset := 2 + e[0]&lengthMask // size of header + fold string + + nFirst := (e[1] >> lengthBits) & lengthMask + if nTitle := e[1] & lengthMask; nTitle != noChange { + if nFirst != noChange { + e = e[nFirst:] + } + return c.writeString(e[offset : offset+nTitle]) + } + if ct == cLower && nFirst != noChange { + // Use the uppercase version instead. + return c.writeString(e[offset : offset+nFirst]) + } + // Already in correct case. + return c.copy() +} + +// isTitle reports whether the current rune is in title case. +func isTitle(c *context) bool { + ct := c.caseType() + if c.info&hasMappingMask == 0 || ct == cTitle { + return true + } + if c.info&exceptionBit == 0 { + if ct == cLower { + c.err = transform.ErrEndOfSpan + return false + } + return true + } + // Get the exception data. + e := exceptions[c.info>>exceptionShift:] + if nTitle := e[1] & lengthMask; nTitle != noChange { + c.err = transform.ErrEndOfSpan + return false + } + nFirst := (e[1] >> lengthBits) & lengthMask + if ct == cLower && nFirst != noChange { + c.err = transform.ErrEndOfSpan + return false + } + return true +} + +// foldFull writes the foldFull version of the current rune to dst. +func foldFull(c *context) bool { + if c.info&hasMappingMask == 0 { + return c.copy() + } + ct := c.caseType() + if c.info&exceptionBit == 0 { + if ct != cLower || c.info&inverseFoldBit != 0 { + return c.copyXOR() + } + return c.copy() + } + e := exceptions[c.info>>exceptionShift:] + n := e[0] & lengthMask + if n == 0 { + if ct == cLower { + return c.copy() + } + n = (e[1] >> lengthBits) & lengthMask + } + return c.writeString(e[2 : 2+n]) +} + +// isFoldFull reports whether the current run is mapped to foldFull +func isFoldFull(c *context) bool { + if c.info&hasMappingMask == 0 { + return true + } + ct := c.caseType() + if c.info&exceptionBit == 0 { + if ct != cLower || c.info&inverseFoldBit != 0 { + c.err = transform.ErrEndOfSpan + return false + } + return true + } + e := exceptions[c.info>>exceptionShift:] + n := e[0] & lengthMask + if n == 0 && ct == cLower { + return true + } + c.err = transform.ErrEndOfSpan + return false +} diff --git a/vendor/golang.org/x/text/cases/fold.go b/vendor/golang.org/x/text/cases/fold.go new file mode 100644 index 000000000..85cc434fa --- /dev/null +++ b/vendor/golang.org/x/text/cases/fold.go @@ -0,0 +1,34 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cases + +import "golang.org/x/text/transform" + +type caseFolder struct{ transform.NopResetter } + +// caseFolder implements the Transformer interface for doing case folding. +func (t *caseFolder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + c := context{dst: dst, src: src, atEOF: atEOF} + for c.next() { + foldFull(&c) + c.checkpoint() + } + return c.ret() +} + +func (t *caseFolder) Span(src []byte, atEOF bool) (n int, err error) { + c := context{src: src, atEOF: atEOF} + for c.next() && isFoldFull(&c) { + c.checkpoint() + } + return c.retSpan() +} + +func makeFold(o options) transform.SpanningTransformer { + // TODO: Special case folding, through option Language, Special/Turkic, or + // both. + // TODO: Implement Compact options. + return &caseFolder{} +} diff --git a/vendor/golang.org/x/text/cases/icu.go b/vendor/golang.org/x/text/cases/icu.go new file mode 100644 index 000000000..2dc84b39e --- /dev/null +++ b/vendor/golang.org/x/text/cases/icu.go @@ -0,0 +1,62 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build icu +// +build icu + +package cases + +// Ideally these functions would be defined in a test file, but go test doesn't +// allow CGO in tests. The build tag should ensure either way that these +// functions will not end up in the package. + +// TODO: Ensure that the correct ICU version is set. + +/* +#cgo LDFLAGS: -licui18n.57 -licuuc.57 +#include +#include +#include +#include +#include +*/ +import "C" + +import "unsafe" + +func doICU(tag, caser, input string) string { + err := C.UErrorCode(0) + loc := C.CString(tag) + cm := C.ucasemap_open(loc, C.uint32_t(0), &err) + + buf := make([]byte, len(input)*4) + dst := (*C.char)(unsafe.Pointer(&buf[0])) + src := C.CString(input) + + cn := C.int32_t(0) + + switch caser { + case "fold": + cn = C.ucasemap_utf8FoldCase(cm, + dst, C.int32_t(len(buf)), + src, C.int32_t(len(input)), + &err) + case "lower": + cn = C.ucasemap_utf8ToLower(cm, + dst, C.int32_t(len(buf)), + src, C.int32_t(len(input)), + &err) + case "upper": + cn = C.ucasemap_utf8ToUpper(cm, + dst, C.int32_t(len(buf)), + src, C.int32_t(len(input)), + &err) + case "title": + cn = C.ucasemap_utf8ToTitle(cm, + dst, C.int32_t(len(buf)), + src, C.int32_t(len(input)), + &err) + } + return string(buf[:cn]) +} diff --git a/vendor/golang.org/x/text/cases/info.go b/vendor/golang.org/x/text/cases/info.go new file mode 100644 index 000000000..87a7c3e95 --- /dev/null +++ b/vendor/golang.org/x/text/cases/info.go @@ -0,0 +1,82 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cases + +func (c info) cccVal() info { + if c&exceptionBit != 0 { + return info(exceptions[c>>exceptionShift]) & cccMask + } + return c & cccMask +} + +func (c info) cccType() info { + ccc := c.cccVal() + if ccc <= cccZero { + return cccZero + } + return ccc +} + +// TODO: Implement full Unicode breaking algorithm: +// 1) Implement breaking in separate package. +// 2) Use the breaker here. +// 3) Compare table size and performance of using the more generic breaker. +// +// Note that we can extend the current algorithm to be much more accurate. This +// only makes sense, though, if the performance and/or space penalty of using +// the generic breaker is big. Extra data will only be needed for non-cased +// runes, which means there are sufficient bits left in the caseType. +// ICU prohibits breaking in such cases as well. + +// For the purpose of title casing we use an approximation of the Unicode Word +// Breaking algorithm defined in Annex #29: +// https://www.unicode.org/reports/tr29/#Default_Grapheme_Cluster_Table. +// +// For our approximation, we group the Word Break types into the following +// categories, with associated rules: +// +// 1) Letter: +// ALetter, Hebrew_Letter, Numeric, ExtendNumLet, Extend, Format_FE, ZWJ. +// Rule: Never break between consecutive runes of this category. +// +// 2) Mid: +// MidLetter, MidNumLet, Single_Quote. +// (Cf. case-ignorable: MidLetter, MidNumLet, Single_Quote or cat is Mn, +// Me, Cf, Lm or Sk). +// Rule: Don't break between Letter and Mid, but break between two Mids. +// +// 3) Break: +// Any other category: NewLine, MidNum, CR, LF, Double_Quote, Katakana, and +// Other. +// These categories should always result in a break between two cased letters. +// Rule: Always break. +// +// Note 1: the Katakana and MidNum categories can, in esoteric cases, result in +// preventing a break between two cased letters. For now we will ignore this +// (e.g. [ALetter] [ExtendNumLet] [Katakana] [ExtendNumLet] [ALetter] and +// [ALetter] [Numeric] [MidNum] [Numeric] [ALetter].) +// +// Note 2: the rule for Mid is very approximate, but works in most cases. To +// improve, we could store the categories in the trie value and use a FA to +// manage breaks. See TODO comment above. +// +// Note 3: according to the spec, it is possible for the Extend category to +// introduce breaks between other categories grouped in Letter. However, this +// is undesirable for our purposes. ICU prevents breaks in such cases as well. + +// isBreak returns whether this rune should introduce a break. +func (c info) isBreak() bool { + return c.cccVal() == cccBreak +} + +// isLetter returns whether the rune is of break type ALetter, Hebrew_Letter, +// Numeric, ExtendNumLet, or Extend. +func (c info) isLetter() bool { + ccc := c.cccVal() + if ccc == cccZero { + return !c.isCaseIgnorable() + } + return ccc != cccBreak +} diff --git a/vendor/golang.org/x/text/cases/map.go b/vendor/golang.org/x/text/cases/map.go new file mode 100644 index 000000000..0f7c6a14b --- /dev/null +++ b/vendor/golang.org/x/text/cases/map.go @@ -0,0 +1,816 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cases + +// This file contains the definitions of case mappings for all supported +// languages. The rules for the language-specific tailorings were taken and +// modified from the CLDR transform definitions in common/transforms. + +import ( + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/text/internal" + "golang.org/x/text/language" + "golang.org/x/text/transform" + "golang.org/x/text/unicode/norm" +) + +// A mapFunc takes a context set to the current rune and writes the mapped +// version to the same context. It may advance the context to the next rune. It +// returns whether a checkpoint is possible: whether the pDst bytes written to +// dst so far won't need changing as we see more source bytes. +type mapFunc func(*context) bool + +// A spanFunc takes a context set to the current rune and returns whether this +// rune would be altered when written to the output. It may advance the context +// to the next rune. It returns whether a checkpoint is possible. +type spanFunc func(*context) bool + +// maxIgnorable defines the maximum number of ignorables to consider for +// lookahead operations. +const maxIgnorable = 30 + +// supported lists the language tags for which we have tailorings. +const supported = "und af az el lt nl tr" + +func init() { + tags := []language.Tag{} + for _, s := range strings.Split(supported, " ") { + tags = append(tags, language.MustParse(s)) + } + matcher = internal.NewInheritanceMatcher(tags) + Supported = language.NewCoverage(tags) +} + +var ( + matcher *internal.InheritanceMatcher + + Supported language.Coverage + + // We keep the following lists separate, instead of having a single per- + // language struct, to give the compiler a chance to remove unused code. + + // Some uppercase mappers are stateless, so we can precompute the + // Transformers and save a bit on runtime allocations. + upperFunc = []struct { + upper mapFunc + span spanFunc + }{ + {nil, nil}, // und + {nil, nil}, // af + {aztrUpper(upper), isUpper}, // az + {elUpper, noSpan}, // el + {ltUpper(upper), noSpan}, // lt + {nil, nil}, // nl + {aztrUpper(upper), isUpper}, // tr + } + + undUpper transform.SpanningTransformer = &undUpperCaser{} + undLower transform.SpanningTransformer = &undLowerCaser{} + undLowerIgnoreSigma transform.SpanningTransformer = &undLowerIgnoreSigmaCaser{} + + lowerFunc = []mapFunc{ + nil, // und + nil, // af + aztrLower, // az + nil, // el + ltLower, // lt + nil, // nl + aztrLower, // tr + } + + titleInfos = []struct { + title mapFunc + lower mapFunc + titleSpan spanFunc + rewrite func(*context) + }{ + {title, lower, isTitle, nil}, // und + {title, lower, isTitle, afnlRewrite}, // af + {aztrUpper(title), aztrLower, isTitle, nil}, // az + {title, lower, isTitle, nil}, // el + {ltUpper(title), ltLower, noSpan, nil}, // lt + {nlTitle, lower, nlTitleSpan, afnlRewrite}, // nl + {aztrUpper(title), aztrLower, isTitle, nil}, // tr + } +) + +func makeUpper(t language.Tag, o options) transform.SpanningTransformer { + _, i, _ := matcher.Match(t) + f := upperFunc[i].upper + if f == nil { + return undUpper + } + return &simpleCaser{f: f, span: upperFunc[i].span} +} + +func makeLower(t language.Tag, o options) transform.SpanningTransformer { + _, i, _ := matcher.Match(t) + f := lowerFunc[i] + if f == nil { + if o.ignoreFinalSigma { + return undLowerIgnoreSigma + } + return undLower + } + if o.ignoreFinalSigma { + return &simpleCaser{f: f, span: isLower} + } + return &lowerCaser{ + first: f, + midWord: finalSigma(f), + } +} + +func makeTitle(t language.Tag, o options) transform.SpanningTransformer { + _, i, _ := matcher.Match(t) + x := &titleInfos[i] + lower := x.lower + if o.noLower { + lower = (*context).copy + } else if !o.ignoreFinalSigma { + lower = finalSigma(lower) + } + return &titleCaser{ + title: x.title, + lower: lower, + titleSpan: x.titleSpan, + rewrite: x.rewrite, + } +} + +func noSpan(c *context) bool { + c.err = transform.ErrEndOfSpan + return false +} + +// TODO: consider a similar special case for the fast majority lower case. This +// is a bit more involved so will require some more precise benchmarking to +// justify it. + +type undUpperCaser struct{ transform.NopResetter } + +// undUpperCaser implements the Transformer interface for doing an upper case +// mapping for the root locale (und). It eliminates the need for an allocation +// as it prevents escaping by not using function pointers. +func (t undUpperCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + c := context{dst: dst, src: src, atEOF: atEOF} + for c.next() { + upper(&c) + c.checkpoint() + } + return c.ret() +} + +func (t undUpperCaser) Span(src []byte, atEOF bool) (n int, err error) { + c := context{src: src, atEOF: atEOF} + for c.next() && isUpper(&c) { + c.checkpoint() + } + return c.retSpan() +} + +// undLowerIgnoreSigmaCaser implements the Transformer interface for doing +// a lower case mapping for the root locale (und) ignoring final sigma +// handling. This casing algorithm is used in some performance-critical packages +// like secure/precis and x/net/http/idna, which warrants its special-casing. +type undLowerIgnoreSigmaCaser struct{ transform.NopResetter } + +func (t undLowerIgnoreSigmaCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + c := context{dst: dst, src: src, atEOF: atEOF} + for c.next() && lower(&c) { + c.checkpoint() + } + return c.ret() + +} + +// Span implements a generic lower-casing. This is possible as isLower works +// for all lowercasing variants. All lowercase variants only vary in how they +// transform a non-lowercase letter. They will never change an already lowercase +// letter. In addition, there is no state. +func (t undLowerIgnoreSigmaCaser) Span(src []byte, atEOF bool) (n int, err error) { + c := context{src: src, atEOF: atEOF} + for c.next() && isLower(&c) { + c.checkpoint() + } + return c.retSpan() +} + +type simpleCaser struct { + context + f mapFunc + span spanFunc +} + +// simpleCaser implements the Transformer interface for doing a case operation +// on a rune-by-rune basis. +func (t *simpleCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + c := context{dst: dst, src: src, atEOF: atEOF} + for c.next() && t.f(&c) { + c.checkpoint() + } + return c.ret() +} + +func (t *simpleCaser) Span(src []byte, atEOF bool) (n int, err error) { + c := context{src: src, atEOF: atEOF} + for c.next() && t.span(&c) { + c.checkpoint() + } + return c.retSpan() +} + +// undLowerCaser implements the Transformer interface for doing a lower case +// mapping for the root locale (und) ignoring final sigma handling. This casing +// algorithm is used in some performance-critical packages like secure/precis +// and x/net/http/idna, which warrants its special-casing. +type undLowerCaser struct{ transform.NopResetter } + +func (t undLowerCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + c := context{dst: dst, src: src, atEOF: atEOF} + + for isInterWord := true; c.next(); { + if isInterWord { + if c.info.isCased() { + if !lower(&c) { + break + } + isInterWord = false + } else if !c.copy() { + break + } + } else { + if c.info.isNotCasedAndNotCaseIgnorable() { + if !c.copy() { + break + } + isInterWord = true + } else if !c.hasPrefix("Σ") { + if !lower(&c) { + break + } + } else if !finalSigmaBody(&c) { + break + } + } + c.checkpoint() + } + return c.ret() +} + +func (t undLowerCaser) Span(src []byte, atEOF bool) (n int, err error) { + c := context{src: src, atEOF: atEOF} + for c.next() && isLower(&c) { + c.checkpoint() + } + return c.retSpan() +} + +// lowerCaser implements the Transformer interface. The default Unicode lower +// casing requires different treatment for the first and subsequent characters +// of a word, most notably to handle the Greek final Sigma. +type lowerCaser struct { + undLowerIgnoreSigmaCaser + + context + + first, midWord mapFunc +} + +func (t *lowerCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + t.context = context{dst: dst, src: src, atEOF: atEOF} + c := &t.context + + for isInterWord := true; c.next(); { + if isInterWord { + if c.info.isCased() { + if !t.first(c) { + break + } + isInterWord = false + } else if !c.copy() { + break + } + } else { + if c.info.isNotCasedAndNotCaseIgnorable() { + if !c.copy() { + break + } + isInterWord = true + } else if !t.midWord(c) { + break + } + } + c.checkpoint() + } + return c.ret() +} + +// titleCaser implements the Transformer interface. Title casing algorithms +// distinguish between the first letter of a word and subsequent letters of the +// same word. It uses state to avoid requiring a potentially infinite lookahead. +type titleCaser struct { + context + + // rune mappings used by the actual casing algorithms. + title mapFunc + lower mapFunc + titleSpan spanFunc + + rewrite func(*context) +} + +// Transform implements the standard Unicode title case algorithm as defined in +// Chapter 3 of The Unicode Standard: +// toTitlecase(X): Find the word boundaries in X according to Unicode Standard +// Annex #29, "Unicode Text Segmentation." For each word boundary, find the +// first cased character F following the word boundary. If F exists, map F to +// Titlecase_Mapping(F); then map all characters C between F and the following +// word boundary to Lowercase_Mapping(C). +func (t *titleCaser) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + t.context = context{dst: dst, src: src, atEOF: atEOF, isMidWord: t.isMidWord} + c := &t.context + + if !c.next() { + return c.ret() + } + + for { + p := c.info + if t.rewrite != nil { + t.rewrite(c) + } + + wasMid := p.isMid() + // Break out of this loop on failure to ensure we do not modify the + // state incorrectly. + if p.isCased() { + if !c.isMidWord { + if !t.title(c) { + break + } + c.isMidWord = true + } else if !t.lower(c) { + break + } + } else if !c.copy() { + break + } else if p.isBreak() { + c.isMidWord = false + } + + // As we save the state of the transformer, it is safe to call + // checkpoint after any successful write. + if !(c.isMidWord && wasMid) { + c.checkpoint() + } + + if !c.next() { + break + } + if wasMid && c.info.isMid() { + c.isMidWord = false + } + } + return c.ret() +} + +func (t *titleCaser) Span(src []byte, atEOF bool) (n int, err error) { + t.context = context{src: src, atEOF: atEOF, isMidWord: t.isMidWord} + c := &t.context + + if !c.next() { + return c.retSpan() + } + + for { + p := c.info + if t.rewrite != nil { + t.rewrite(c) + } + + wasMid := p.isMid() + // Break out of this loop on failure to ensure we do not modify the + // state incorrectly. + if p.isCased() { + if !c.isMidWord { + if !t.titleSpan(c) { + break + } + c.isMidWord = true + } else if !isLower(c) { + break + } + } else if p.isBreak() { + c.isMidWord = false + } + // As we save the state of the transformer, it is safe to call + // checkpoint after any successful write. + if !(c.isMidWord && wasMid) { + c.checkpoint() + } + + if !c.next() { + break + } + if wasMid && c.info.isMid() { + c.isMidWord = false + } + } + return c.retSpan() +} + +// finalSigma adds Greek final Sigma handing to another casing function. It +// determines whether a lowercased sigma should be σ or ς, by looking ahead for +// case-ignorables and a cased letters. +func finalSigma(f mapFunc) mapFunc { + return func(c *context) bool { + if !c.hasPrefix("Σ") { + return f(c) + } + return finalSigmaBody(c) + } +} + +func finalSigmaBody(c *context) bool { + // Current rune must be ∑. + + // ::NFD(); + // # 03A3; 03C2; 03A3; 03A3; Final_Sigma; # GREEK CAPITAL LETTER SIGMA + // Σ } [:case-ignorable:]* [:cased:] → σ; + // [:cased:] [:case-ignorable:]* { Σ → ς; + // ::Any-Lower; + // ::NFC(); + + p := c.pDst + c.writeString("ς") + + // TODO: we should do this here, but right now this will never have an + // effect as this is called when the prefix is Sigma, whereas Dutch and + // Afrikaans only test for an apostrophe. + // + // if t.rewrite != nil { + // t.rewrite(c) + // } + + // We need to do one more iteration after maxIgnorable, as a cased + // letter is not an ignorable and may modify the result. + wasMid := false + for i := 0; i < maxIgnorable+1; i++ { + if !c.next() { + return false + } + if !c.info.isCaseIgnorable() { + // All Midword runes are also case ignorable, so we are + // guaranteed to have a letter or word break here. As we are + // unreading the run, there is no need to unset c.isMidWord; + // the title caser will handle this. + if c.info.isCased() { + // p+1 is guaranteed to be in bounds: if writing ς was + // successful, p+1 will contain the second byte of ς. If not, + // this function will have returned after c.next returned false. + c.dst[p+1]++ // ς → σ + } + c.unreadRune() + return true + } + // A case ignorable may also introduce a word break, so we may need + // to continue searching even after detecting a break. + isMid := c.info.isMid() + if (wasMid && isMid) || c.info.isBreak() { + c.isMidWord = false + } + wasMid = isMid + c.copy() + } + return true +} + +// finalSigmaSpan would be the same as isLower. + +// elUpper implements Greek upper casing, which entails removing a predefined +// set of non-blocked modifiers. Note that these accents should not be removed +// for title casing! +// Example: "Οδός" -> "ΟΔΟΣ". +func elUpper(c *context) bool { + // From CLDR: + // [:Greek:] [^[:ccc=Not_Reordered:][:ccc=Above:]]*? { [\u0313\u0314\u0301\u0300\u0306\u0342\u0308\u0304] → ; + // [:Greek:] [^[:ccc=Not_Reordered:][:ccc=Iota_Subscript:]]*? { \u0345 → ; + + r, _ := utf8.DecodeRune(c.src[c.pSrc:]) + oldPDst := c.pDst + if !upper(c) { + return false + } + if !unicode.Is(unicode.Greek, r) { + return true + } + i := 0 + // Take the properties of the uppercased rune that is already written to the + // destination. This saves us the trouble of having to uppercase the + // decomposed rune again. + if b := norm.NFD.Properties(c.dst[oldPDst:]).Decomposition(); b != nil { + // Restore the destination position and process the decomposed rune. + r, sz := utf8.DecodeRune(b) + if r <= 0xFF { // See A.6.1 + return true + } + c.pDst = oldPDst + // Insert the first rune and ignore the modifiers. See A.6.2. + c.writeBytes(b[:sz]) + i = len(b[sz:]) / 2 // Greek modifiers are always of length 2. + } + + for ; i < maxIgnorable && c.next(); i++ { + switch r, _ := utf8.DecodeRune(c.src[c.pSrc:]); r { + // Above and Iota Subscript + case 0x0300, // U+0300 COMBINING GRAVE ACCENT + 0x0301, // U+0301 COMBINING ACUTE ACCENT + 0x0304, // U+0304 COMBINING MACRON + 0x0306, // U+0306 COMBINING BREVE + 0x0308, // U+0308 COMBINING DIAERESIS + 0x0313, // U+0313 COMBINING COMMA ABOVE + 0x0314, // U+0314 COMBINING REVERSED COMMA ABOVE + 0x0342, // U+0342 COMBINING GREEK PERISPOMENI + 0x0345: // U+0345 COMBINING GREEK YPOGEGRAMMENI + // No-op. Gobble the modifier. + + default: + switch v, _ := trie.lookup(c.src[c.pSrc:]); info(v).cccType() { + case cccZero: + c.unreadRune() + return true + + // We don't need to test for IotaSubscript as the only rune that + // qualifies (U+0345) was already excluded in the switch statement + // above. See A.4. + + case cccAbove: + return c.copy() + default: + // Some other modifier. We're still allowed to gobble Greek + // modifiers after this. + c.copy() + } + } + } + return i == maxIgnorable +} + +// TODO: implement elUpperSpan (low-priority: complex and infrequent). + +func ltLower(c *context) bool { + // From CLDR: + // # Introduce an explicit dot above when lowercasing capital I's and J's + // # whenever there are more accents above. + // # (of the accents used in Lithuanian: grave, acute, tilde above, and ogonek) + // # 0049; 0069 0307; 0049; 0049; lt More_Above; # LATIN CAPITAL LETTER I + // # 004A; 006A 0307; 004A; 004A; lt More_Above; # LATIN CAPITAL LETTER J + // # 012E; 012F 0307; 012E; 012E; lt More_Above; # LATIN CAPITAL LETTER I WITH OGONEK + // # 00CC; 0069 0307 0300; 00CC; 00CC; lt; # LATIN CAPITAL LETTER I WITH GRAVE + // # 00CD; 0069 0307 0301; 00CD; 00CD; lt; # LATIN CAPITAL LETTER I WITH ACUTE + // # 0128; 0069 0307 0303; 0128; 0128; lt; # LATIN CAPITAL LETTER I WITH TILDE + // ::NFD(); + // I } [^[:ccc=Not_Reordered:][:ccc=Above:]]* [:ccc=Above:] → i \u0307; + // J } [^[:ccc=Not_Reordered:][:ccc=Above:]]* [:ccc=Above:] → j \u0307; + // I \u0328 (Į) } [^[:ccc=Not_Reordered:][:ccc=Above:]]* [:ccc=Above:] → i \u0328 \u0307; + // I \u0300 (Ì) → i \u0307 \u0300; + // I \u0301 (Í) → i \u0307 \u0301; + // I \u0303 (Ĩ) → i \u0307 \u0303; + // ::Any-Lower(); + // ::NFC(); + + i := 0 + if r := c.src[c.pSrc]; r < utf8.RuneSelf { + lower(c) + if r != 'I' && r != 'J' { + return true + } + } else { + p := norm.NFD.Properties(c.src[c.pSrc:]) + if d := p.Decomposition(); len(d) >= 3 && (d[0] == 'I' || d[0] == 'J') { + // UTF-8 optimization: the decomposition will only have an above + // modifier if the last rune of the decomposition is in [U+300-U+311]. + // In all other cases, a decomposition starting with I is always + // an I followed by modifiers that are not cased themselves. See A.2. + if d[1] == 0xCC && d[2] <= 0x91 { // A.2.4. + if !c.writeBytes(d[:1]) { + return false + } + c.dst[c.pDst-1] += 'a' - 'A' // lower + + // Assumption: modifier never changes on lowercase. See A.1. + // Assumption: all modifiers added have CCC = Above. See A.2.3. + return c.writeString("\u0307") && c.writeBytes(d[1:]) + } + // In all other cases the additional modifiers will have a CCC + // that is less than 230 (Above). We will insert the U+0307, if + // needed, after these modifiers so that a string in FCD form + // will remain so. See A.2.2. + lower(c) + i = 1 + } else { + return lower(c) + } + } + + for ; i < maxIgnorable && c.next(); i++ { + switch c.info.cccType() { + case cccZero: + c.unreadRune() + return true + case cccAbove: + return c.writeString("\u0307") && c.copy() // See A.1. + default: + c.copy() // See A.1. + } + } + return i == maxIgnorable +} + +// ltLowerSpan would be the same as isLower. + +func ltUpper(f mapFunc) mapFunc { + return func(c *context) bool { + // Unicode: + // 0307; 0307; ; ; lt After_Soft_Dotted; # COMBINING DOT ABOVE + // + // From CLDR: + // # Remove \u0307 following soft-dotteds (i, j, and the like), with possible + // # intervening non-230 marks. + // ::NFD(); + // [:Soft_Dotted:] [^[:ccc=Not_Reordered:][:ccc=Above:]]* { \u0307 → ; + // ::Any-Upper(); + // ::NFC(); + + // TODO: See A.5. A soft-dotted rune never has an exception. This would + // allow us to overload the exception bit and encode this property in + // info. Need to measure performance impact of this. + r, _ := utf8.DecodeRune(c.src[c.pSrc:]) + oldPDst := c.pDst + if !f(c) { + return false + } + if !unicode.Is(unicode.Soft_Dotted, r) { + return true + } + + // We don't need to do an NFD normalization, as a soft-dotted rune never + // contains U+0307. See A.3. + + i := 0 + for ; i < maxIgnorable && c.next(); i++ { + switch c.info.cccType() { + case cccZero: + c.unreadRune() + return true + case cccAbove: + if c.hasPrefix("\u0307") { + // We don't do a full NFC, but rather combine runes for + // some of the common cases. (Returning NFC or + // preserving normal form is neither a requirement nor + // a possibility anyway). + if !c.next() { + return false + } + if c.dst[oldPDst] == 'I' && c.pDst == oldPDst+1 && c.src[c.pSrc] == 0xcc { + s := "" + switch c.src[c.pSrc+1] { + case 0x80: // U+0300 COMBINING GRAVE ACCENT + s = "\u00cc" // U+00CC LATIN CAPITAL LETTER I WITH GRAVE + case 0x81: // U+0301 COMBINING ACUTE ACCENT + s = "\u00cd" // U+00CD LATIN CAPITAL LETTER I WITH ACUTE + case 0x83: // U+0303 COMBINING TILDE + s = "\u0128" // U+0128 LATIN CAPITAL LETTER I WITH TILDE + case 0x88: // U+0308 COMBINING DIAERESIS + s = "\u00cf" // U+00CF LATIN CAPITAL LETTER I WITH DIAERESIS + default: + } + if s != "" { + c.pDst = oldPDst + return c.writeString(s) + } + } + } + return c.copy() + default: + c.copy() + } + } + return i == maxIgnorable + } +} + +// TODO: implement ltUpperSpan (low priority: complex and infrequent). + +func aztrUpper(f mapFunc) mapFunc { + return func(c *context) bool { + // i→İ; + if c.src[c.pSrc] == 'i' { + return c.writeString("İ") + } + return f(c) + } +} + +func aztrLower(c *context) (done bool) { + // From CLDR: + // # I and i-dotless; I-dot and i are case pairs in Turkish and Azeri + // # 0130; 0069; 0130; 0130; tr; # LATIN CAPITAL LETTER I WITH DOT ABOVE + // İ→i; + // # When lowercasing, remove dot_above in the sequence I + dot_above, which will turn into i. + // # This matches the behavior of the canonically equivalent I-dot_above + // # 0307; ; 0307; 0307; tr After_I; # COMBINING DOT ABOVE + // # When lowercasing, unless an I is before a dot_above, it turns into a dotless i. + // # 0049; 0131; 0049; 0049; tr Not_Before_Dot; # LATIN CAPITAL LETTER I + // I([^[:ccc=Not_Reordered:][:ccc=Above:]]*)\u0307 → i$1 ; + // I→ı ; + // ::Any-Lower(); + if c.hasPrefix("\u0130") { // İ + return c.writeString("i") + } + if c.src[c.pSrc] != 'I' { + return lower(c) + } + + // We ignore the lower-case I for now, but insert it later when we know + // which form we need. + start := c.pSrc + c.sz + + i := 0 +Loop: + // We check for up to n ignorables before \u0307. As \u0307 is an + // ignorable as well, n is maxIgnorable-1. + for ; i < maxIgnorable && c.next(); i++ { + switch c.info.cccType() { + case cccAbove: + if c.hasPrefix("\u0307") { + return c.writeString("i") && c.writeBytes(c.src[start:c.pSrc]) // ignore U+0307 + } + done = true + break Loop + case cccZero: + c.unreadRune() + done = true + break Loop + default: + // We'll write this rune after we know which starter to use. + } + } + if i == maxIgnorable { + done = true + } + return c.writeString("ı") && c.writeBytes(c.src[start:c.pSrc+c.sz]) && done +} + +// aztrLowerSpan would be the same as isLower. + +func nlTitle(c *context) bool { + // From CLDR: + // # Special titlecasing for Dutch initial "ij". + // ::Any-Title(); + // # Fix up Ij at the beginning of a "word" (per Any-Title, notUAX #29) + // [:^WB=ALetter:] [:WB=Extend:]* [[:WB=MidLetter:][:WB=MidNumLet:]]? { Ij } → IJ ; + if c.src[c.pSrc] != 'I' && c.src[c.pSrc] != 'i' { + return title(c) + } + + if !c.writeString("I") || !c.next() { + return false + } + if c.src[c.pSrc] == 'j' || c.src[c.pSrc] == 'J' { + return c.writeString("J") + } + c.unreadRune() + return true +} + +func nlTitleSpan(c *context) bool { + // From CLDR: + // # Special titlecasing for Dutch initial "ij". + // ::Any-Title(); + // # Fix up Ij at the beginning of a "word" (per Any-Title, notUAX #29) + // [:^WB=ALetter:] [:WB=Extend:]* [[:WB=MidLetter:][:WB=MidNumLet:]]? { Ij } → IJ ; + if c.src[c.pSrc] != 'I' { + return isTitle(c) + } + if !c.next() || c.src[c.pSrc] == 'j' { + return false + } + if c.src[c.pSrc] != 'J' { + c.unreadRune() + } + return true +} + +// Not part of CLDR, but see https://unicode.org/cldr/trac/ticket/7078. +func afnlRewrite(c *context) { + if c.hasPrefix("'") || c.hasPrefix("’") { + c.isMidWord = true + } +} diff --git a/vendor/golang.org/x/text/cases/tables10.0.0.go b/vendor/golang.org/x/text/cases/tables10.0.0.go new file mode 100644 index 000000000..ca9923105 --- /dev/null +++ b/vendor/golang.org/x/text/cases/tables10.0.0.go @@ -0,0 +1,2256 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.10 && !go1.13 +// +build go1.10,!go1.13 + +package cases + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "10.0.0" + +var xorData string = "" + // Size: 185 bytes + "\x00\x06\x07\x00\x01?\x00\x0f\x03\x00\x0f\x12\x00\x0f\x1f\x00\x0f\x1d" + + "\x00\x01\x13\x00\x0f\x16\x00\x0f\x0b\x00\x0f3\x00\x0f7\x00\x01#\x00\x0f?" + + "\x00\x0e'\x00\x0f/\x00\x0e>\x00\x0f*\x00\x0c&\x00\x0c*\x00\x0c;\x00\x0c9" + + "\x00\x0c%\x00\x01\x08\x00\x03\x0d\x00\x03\x09\x00\x02\x06\x00\x02\x02" + + "\x00\x02\x0c\x00\x01\x00\x00\x01\x03\x00\x01\x01\x00\x01 \x00\x01\x0c" + + "\x00\x01\x10\x00\x03\x10\x00\x036 \x00\x037 \x00\x0b#\x10\x00\x0b 0\x00" + + "\x0b!\x10\x00\x0b!0\x00\x0b(\x04\x00\x03\x04\x1e\x00\x03\x0a\x00\x02:" + + "\x00\x02>\x00\x02,\x00\x02\x00\x00\x02\x10\x00\x01<\x00\x01&\x00\x01*" + + "\x00\x01.\x00\x010\x003 \x00\x01\x18\x00\x01(\x00\x01\x1e\x00\x01\x22" + +var exceptions string = "" + // Size: 2068 bytes + "\x00\x12\x12μΜΜ\x12\x12ssSSSs\x13\x18i̇i̇\x10\x09II\x13\x1bʼnʼNʼN\x11" + + "\x09sSS\x12\x12dždžDž\x12\x12dždžDŽ\x10\x12DŽDž\x12\x12ljljLj\x12\x12ljljLJ\x10\x12LJLj" + + "\x12\x12njnjNj\x12\x12njnjNJ\x10\x12NJNj\x13\x1bǰJ̌J̌\x12\x12dzdzDz\x12\x12dzdzDZ\x10" + + "\x12DZDz\x13\x18ⱥⱥ\x13\x18ⱦⱦ\x10\x1bⱾⱾ\x10\x1bⱿⱿ\x10\x1bⱯⱯ\x10\x1bⱭⱭ\x10" + + "\x1bⱰⱰ\x10\x1bꞫꞫ\x10\x1bꞬꞬ\x10\x1bꞍꞍ\x10\x1bꞪꞪ\x10\x1bꞮꞮ\x10\x1bⱢⱢ\x10" + + "\x1bꞭꞭ\x10\x1bⱮⱮ\x10\x1bⱤⱤ\x10\x1bꞱꞱ\x10\x1bꞲꞲ\x10\x1bꞰꞰ2\x12ιΙΙ\x166ΐ" + + "Ϊ́Ϊ́\x166ΰΫ́Ϋ́\x12\x12σΣΣ\x12\x12βΒΒ\x12\x12θΘΘ\x12\x12φΦΦ\x12" + + "\x12πΠΠ\x12\x12κΚΚ\x12\x12ρΡΡ\x12\x12εΕΕ\x14$եւԵՒԵւ\x12\x12вВВ\x12\x12дД" + + "Д\x12\x12оОО\x12\x12сСС\x12\x12тТТ\x12\x12тТТ\x12\x12ъЪЪ\x12\x12ѣѢѢ\x13" + + "\x1bꙋꙊꙊ\x13\x1bẖH̱H̱\x13\x1bẗT̈T̈\x13\x1bẘW̊W̊\x13\x1bẙY̊Y̊\x13\x1ba" + + "ʾAʾAʾ\x13\x1bṡṠṠ\x12\x10ssß\x14$ὐΥ̓Υ̓\x166ὒΥ̓̀Υ̓̀\x166ὔΥ̓́Υ̓́\x166" + + "ὖΥ̓͂Υ̓͂\x15+ἀιἈΙᾈ\x15+ἁιἉΙᾉ\x15+ἂιἊΙᾊ\x15+ἃιἋΙᾋ\x15+ἄιἌΙᾌ\x15+ἅιἍΙᾍ" + + "\x15+ἆιἎΙᾎ\x15+ἇιἏΙᾏ\x15\x1dἀιᾀἈΙ\x15\x1dἁιᾁἉΙ\x15\x1dἂιᾂἊΙ\x15\x1dἃιᾃἋΙ" + + "\x15\x1dἄιᾄἌΙ\x15\x1dἅιᾅἍΙ\x15\x1dἆιᾆἎΙ\x15\x1dἇιᾇἏΙ\x15+ἠιἨΙᾘ\x15+ἡιἩΙᾙ" + + "\x15+ἢιἪΙᾚ\x15+ἣιἫΙᾛ\x15+ἤιἬΙᾜ\x15+ἥιἭΙᾝ\x15+ἦιἮΙᾞ\x15+ἧιἯΙᾟ\x15\x1dἠιᾐἨ" + + "Ι\x15\x1dἡιᾑἩΙ\x15\x1dἢιᾒἪΙ\x15\x1dἣιᾓἫΙ\x15\x1dἤιᾔἬΙ\x15\x1dἥιᾕἭΙ\x15" + + "\x1dἦιᾖἮΙ\x15\x1dἧιᾗἯΙ\x15+ὠιὨΙᾨ\x15+ὡιὩΙᾩ\x15+ὢιὪΙᾪ\x15+ὣιὫΙᾫ\x15+ὤιὬΙᾬ" + + "\x15+ὥιὭΙᾭ\x15+ὦιὮΙᾮ\x15+ὧιὯΙᾯ\x15\x1dὠιᾠὨΙ\x15\x1dὡιᾡὩΙ\x15\x1dὢιᾢὪΙ" + + "\x15\x1dὣιᾣὫΙ\x15\x1dὤιᾤὬΙ\x15\x1dὥιᾥὭΙ\x15\x1dὦιᾦὮΙ\x15\x1dὧιᾧὯΙ\x15-ὰι" + + "ᾺΙᾺͅ\x14#αιΑΙᾼ\x14$άιΆΙΆͅ\x14$ᾶΑ͂Α͂\x166ᾶιΑ͂Ιᾼ͂\x14\x1cαιᾳΑΙ\x12" + + "\x12ιΙΙ\x15-ὴιῊΙῊͅ\x14#ηιΗΙῌ\x14$ήιΉΙΉͅ\x14$ῆΗ͂Η͂\x166ῆιΗ͂Ιῌ͂\x14\x1c" + + "ηιῃΗΙ\x166ῒΪ̀Ϊ̀\x166ΐΪ́Ϊ́\x14$ῖΙ͂Ι͂\x166ῗΪ͂Ϊ͂\x166ῢΫ̀Ϋ" + + "̀\x166ΰΫ́Ϋ́\x14$ῤΡ̓Ρ̓\x14$ῦΥ͂Υ͂\x166ῧΫ͂Ϋ͂\x15-ὼιῺΙῺͅ\x14#ωιΩΙ" + + "ῼ\x14$ώιΏΙΏͅ\x14$ῶΩ͂Ω͂\x166ῶιΩ͂Ιῼ͂\x14\x1cωιῳΩΙ\x12\x10ωω\x11\x08kk" + + "\x12\x10åå\x12\x10ɫɫ\x12\x10ɽɽ\x10\x12ȺȺ\x10\x12ȾȾ\x12\x10ɑɑ\x12\x10ɱɱ" + + "\x12\x10ɐɐ\x12\x10ɒɒ\x12\x10ȿȿ\x12\x10ɀɀ\x12\x10ɥɥ\x12\x10ɦɦ\x12\x10ɜɜ" + + "\x12\x10ɡɡ\x12\x10ɬɬ\x12\x10ɪɪ\x12\x10ʞʞ\x12\x10ʇʇ\x12\x10ʝʝ\x12\x12ffFF" + + "Ff\x12\x12fiFIFi\x12\x12flFLFl\x13\x1bffiFFIFfi\x13\x1bfflFFLFfl\x12\x12" + + "stSTSt\x12\x12stSTSt\x14$մնՄՆՄն\x14$մեՄԵՄե\x14$միՄԻՄի\x14$վնՎՆՎն\x14$մխՄ" + + "ԽՄխ" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// caseTrie. Total size: 11892 bytes (11.61 KiB). Checksum: c6f15484b7653775. +type caseTrie struct{} + +func newCaseTrie(i int) *caseTrie { + return &caseTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *caseTrie) lookupValue(n uint32, b byte) uint16 { + switch { + case n < 18: + return uint16(caseValues[n<<6+uint32(b)]) + default: + n -= 18 + return uint16(sparse.lookup(n, b)) + } +} + +// caseValues: 20 blocks, 1280 entries, 2560 bytes +// The third block is the zero block. +var caseValues = [1280]uint16{ + // Block 0x0, offset 0x0 + 0x27: 0x0054, + 0x2e: 0x0054, + 0x30: 0x0010, 0x31: 0x0010, 0x32: 0x0010, 0x33: 0x0010, 0x34: 0x0010, 0x35: 0x0010, + 0x36: 0x0010, 0x37: 0x0010, 0x38: 0x0010, 0x39: 0x0010, 0x3a: 0x0054, + // Block 0x1, offset 0x40 + 0x41: 0x2013, 0x42: 0x2013, 0x43: 0x2013, 0x44: 0x2013, 0x45: 0x2013, + 0x46: 0x2013, 0x47: 0x2013, 0x48: 0x2013, 0x49: 0x2013, 0x4a: 0x2013, 0x4b: 0x2013, + 0x4c: 0x2013, 0x4d: 0x2013, 0x4e: 0x2013, 0x4f: 0x2013, 0x50: 0x2013, 0x51: 0x2013, + 0x52: 0x2013, 0x53: 0x2013, 0x54: 0x2013, 0x55: 0x2013, 0x56: 0x2013, 0x57: 0x2013, + 0x58: 0x2013, 0x59: 0x2013, 0x5a: 0x2013, + 0x5e: 0x0004, 0x5f: 0x0010, 0x60: 0x0004, 0x61: 0x2012, 0x62: 0x2012, 0x63: 0x2012, + 0x64: 0x2012, 0x65: 0x2012, 0x66: 0x2012, 0x67: 0x2012, 0x68: 0x2012, 0x69: 0x2012, + 0x6a: 0x2012, 0x6b: 0x2012, 0x6c: 0x2012, 0x6d: 0x2012, 0x6e: 0x2012, 0x6f: 0x2012, + 0x70: 0x2012, 0x71: 0x2012, 0x72: 0x2012, 0x73: 0x2012, 0x74: 0x2012, 0x75: 0x2012, + 0x76: 0x2012, 0x77: 0x2012, 0x78: 0x2012, 0x79: 0x2012, 0x7a: 0x2012, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc0: 0x0852, 0xc1: 0x0b53, 0xc2: 0x0113, 0xc3: 0x0112, 0xc4: 0x0113, 0xc5: 0x0112, + 0xc6: 0x0b53, 0xc7: 0x0f13, 0xc8: 0x0f12, 0xc9: 0x0e53, 0xca: 0x1153, 0xcb: 0x0713, + 0xcc: 0x0712, 0xcd: 0x0012, 0xce: 0x1453, 0xcf: 0x1753, 0xd0: 0x1a53, 0xd1: 0x0313, + 0xd2: 0x0312, 0xd3: 0x1d53, 0xd4: 0x2053, 0xd5: 0x2352, 0xd6: 0x2653, 0xd7: 0x2653, + 0xd8: 0x0113, 0xd9: 0x0112, 0xda: 0x2952, 0xdb: 0x0012, 0xdc: 0x1d53, 0xdd: 0x2c53, + 0xde: 0x2f52, 0xdf: 0x3253, 0xe0: 0x0113, 0xe1: 0x0112, 0xe2: 0x0113, 0xe3: 0x0112, + 0xe4: 0x0113, 0xe5: 0x0112, 0xe6: 0x3553, 0xe7: 0x0f13, 0xe8: 0x0f12, 0xe9: 0x3853, + 0xea: 0x0012, 0xeb: 0x0012, 0xec: 0x0113, 0xed: 0x0112, 0xee: 0x3553, 0xef: 0x1f13, + 0xf0: 0x1f12, 0xf1: 0x3b53, 0xf2: 0x3e53, 0xf3: 0x0713, 0xf4: 0x0712, 0xf5: 0x0313, + 0xf6: 0x0312, 0xf7: 0x4153, 0xf8: 0x0113, 0xf9: 0x0112, 0xfa: 0x0012, 0xfb: 0x0010, + 0xfc: 0x0113, 0xfd: 0x0112, 0xfe: 0x0012, 0xff: 0x4452, + // Block 0x4, offset 0x100 + 0x100: 0x0010, 0x101: 0x0010, 0x102: 0x0010, 0x103: 0x0010, 0x104: 0x02db, 0x105: 0x0359, + 0x106: 0x03da, 0x107: 0x043b, 0x108: 0x04b9, 0x109: 0x053a, 0x10a: 0x059b, 0x10b: 0x0619, + 0x10c: 0x069a, 0x10d: 0x0313, 0x10e: 0x0312, 0x10f: 0x1f13, 0x110: 0x1f12, 0x111: 0x0313, + 0x112: 0x0312, 0x113: 0x0713, 0x114: 0x0712, 0x115: 0x0313, 0x116: 0x0312, 0x117: 0x0f13, + 0x118: 0x0f12, 0x119: 0x0313, 0x11a: 0x0312, 0x11b: 0x0713, 0x11c: 0x0712, 0x11d: 0x1452, + 0x11e: 0x0113, 0x11f: 0x0112, 0x120: 0x0113, 0x121: 0x0112, 0x122: 0x0113, 0x123: 0x0112, + 0x124: 0x0113, 0x125: 0x0112, 0x126: 0x0113, 0x127: 0x0112, 0x128: 0x0113, 0x129: 0x0112, + 0x12a: 0x0113, 0x12b: 0x0112, 0x12c: 0x0113, 0x12d: 0x0112, 0x12e: 0x0113, 0x12f: 0x0112, + 0x130: 0x06fa, 0x131: 0x07ab, 0x132: 0x0829, 0x133: 0x08aa, 0x134: 0x0113, 0x135: 0x0112, + 0x136: 0x2353, 0x137: 0x4453, 0x138: 0x0113, 0x139: 0x0112, 0x13a: 0x0113, 0x13b: 0x0112, + 0x13c: 0x0113, 0x13d: 0x0112, 0x13e: 0x0113, 0x13f: 0x0112, + // Block 0x5, offset 0x140 + 0x140: 0x0a8a, 0x141: 0x0313, 0x142: 0x0312, 0x143: 0x0853, 0x144: 0x4753, 0x145: 0x4a53, + 0x146: 0x0113, 0x147: 0x0112, 0x148: 0x0113, 0x149: 0x0112, 0x14a: 0x0113, 0x14b: 0x0112, + 0x14c: 0x0113, 0x14d: 0x0112, 0x14e: 0x0113, 0x14f: 0x0112, 0x150: 0x0b0a, 0x151: 0x0b8a, + 0x152: 0x0c0a, 0x153: 0x0b52, 0x154: 0x0b52, 0x155: 0x0012, 0x156: 0x0e52, 0x157: 0x1152, + 0x158: 0x0012, 0x159: 0x1752, 0x15a: 0x0012, 0x15b: 0x1a52, 0x15c: 0x0c8a, 0x15d: 0x0012, + 0x15e: 0x0012, 0x15f: 0x0012, 0x160: 0x1d52, 0x161: 0x0d0a, 0x162: 0x0012, 0x163: 0x2052, + 0x164: 0x0012, 0x165: 0x0d8a, 0x166: 0x0e0a, 0x167: 0x0012, 0x168: 0x2652, 0x169: 0x2652, + 0x16a: 0x0e8a, 0x16b: 0x0f0a, 0x16c: 0x0f8a, 0x16d: 0x0012, 0x16e: 0x0012, 0x16f: 0x1d52, + 0x170: 0x0012, 0x171: 0x100a, 0x172: 0x2c52, 0x173: 0x0012, 0x174: 0x0012, 0x175: 0x3252, + 0x176: 0x0012, 0x177: 0x0012, 0x178: 0x0012, 0x179: 0x0012, 0x17a: 0x0012, 0x17b: 0x0012, + 0x17c: 0x0012, 0x17d: 0x108a, 0x17e: 0x0012, 0x17f: 0x0012, + // Block 0x6, offset 0x180 + 0x180: 0x3552, 0x181: 0x0012, 0x182: 0x0012, 0x183: 0x3852, 0x184: 0x0012, 0x185: 0x0012, + 0x186: 0x0012, 0x187: 0x110a, 0x188: 0x3552, 0x189: 0x4752, 0x18a: 0x3b52, 0x18b: 0x3e52, + 0x18c: 0x4a52, 0x18d: 0x0012, 0x18e: 0x0012, 0x18f: 0x0012, 0x190: 0x0012, 0x191: 0x0012, + 0x192: 0x4152, 0x193: 0x0012, 0x194: 0x0010, 0x195: 0x0012, 0x196: 0x0012, 0x197: 0x0012, + 0x198: 0x0012, 0x199: 0x0012, 0x19a: 0x0012, 0x19b: 0x0012, 0x19c: 0x0012, 0x19d: 0x118a, + 0x19e: 0x120a, 0x19f: 0x0012, 0x1a0: 0x0012, 0x1a1: 0x0012, 0x1a2: 0x0012, 0x1a3: 0x0012, + 0x1a4: 0x0012, 0x1a5: 0x0012, 0x1a6: 0x0012, 0x1a7: 0x0012, 0x1a8: 0x0012, 0x1a9: 0x0012, + 0x1aa: 0x0012, 0x1ab: 0x0012, 0x1ac: 0x0012, 0x1ad: 0x0012, 0x1ae: 0x0012, 0x1af: 0x0012, + 0x1b0: 0x0015, 0x1b1: 0x0015, 0x1b2: 0x0015, 0x1b3: 0x0015, 0x1b4: 0x0015, 0x1b5: 0x0015, + 0x1b6: 0x0015, 0x1b7: 0x0015, 0x1b8: 0x0015, 0x1b9: 0x0014, 0x1ba: 0x0014, 0x1bb: 0x0014, + 0x1bc: 0x0014, 0x1bd: 0x0014, 0x1be: 0x0014, 0x1bf: 0x0014, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0024, 0x1c1: 0x0024, 0x1c2: 0x0024, 0x1c3: 0x0024, 0x1c4: 0x0024, 0x1c5: 0x128d, + 0x1c6: 0x0024, 0x1c7: 0x0034, 0x1c8: 0x0034, 0x1c9: 0x0034, 0x1ca: 0x0024, 0x1cb: 0x0024, + 0x1cc: 0x0024, 0x1cd: 0x0034, 0x1ce: 0x0034, 0x1cf: 0x0014, 0x1d0: 0x0024, 0x1d1: 0x0024, + 0x1d2: 0x0024, 0x1d3: 0x0034, 0x1d4: 0x0034, 0x1d5: 0x0034, 0x1d6: 0x0034, 0x1d7: 0x0024, + 0x1d8: 0x0034, 0x1d9: 0x0034, 0x1da: 0x0034, 0x1db: 0x0024, 0x1dc: 0x0034, 0x1dd: 0x0034, + 0x1de: 0x0034, 0x1df: 0x0034, 0x1e0: 0x0034, 0x1e1: 0x0034, 0x1e2: 0x0034, 0x1e3: 0x0024, + 0x1e4: 0x0024, 0x1e5: 0x0024, 0x1e6: 0x0024, 0x1e7: 0x0024, 0x1e8: 0x0024, 0x1e9: 0x0024, + 0x1ea: 0x0024, 0x1eb: 0x0024, 0x1ec: 0x0024, 0x1ed: 0x0024, 0x1ee: 0x0024, 0x1ef: 0x0024, + 0x1f0: 0x0113, 0x1f1: 0x0112, 0x1f2: 0x0113, 0x1f3: 0x0112, 0x1f4: 0x0014, 0x1f5: 0x0004, + 0x1f6: 0x0113, 0x1f7: 0x0112, 0x1fa: 0x0015, 0x1fb: 0x4d52, + 0x1fc: 0x5052, 0x1fd: 0x5052, 0x1ff: 0x5353, + // Block 0x8, offset 0x200 + 0x204: 0x0004, 0x205: 0x0004, + 0x206: 0x2a13, 0x207: 0x0054, 0x208: 0x2513, 0x209: 0x2713, 0x20a: 0x2513, + 0x20c: 0x5653, 0x20e: 0x5953, 0x20f: 0x5c53, 0x210: 0x130a, 0x211: 0x2013, + 0x212: 0x2013, 0x213: 0x2013, 0x214: 0x2013, 0x215: 0x2013, 0x216: 0x2013, 0x217: 0x2013, + 0x218: 0x2013, 0x219: 0x2013, 0x21a: 0x2013, 0x21b: 0x2013, 0x21c: 0x2013, 0x21d: 0x2013, + 0x21e: 0x2013, 0x21f: 0x2013, 0x220: 0x5f53, 0x221: 0x5f53, 0x223: 0x5f53, + 0x224: 0x5f53, 0x225: 0x5f53, 0x226: 0x5f53, 0x227: 0x5f53, 0x228: 0x5f53, 0x229: 0x5f53, + 0x22a: 0x5f53, 0x22b: 0x5f53, 0x22c: 0x2a12, 0x22d: 0x2512, 0x22e: 0x2712, 0x22f: 0x2512, + 0x230: 0x144a, 0x231: 0x2012, 0x232: 0x2012, 0x233: 0x2012, 0x234: 0x2012, 0x235: 0x2012, + 0x236: 0x2012, 0x237: 0x2012, 0x238: 0x2012, 0x239: 0x2012, 0x23a: 0x2012, 0x23b: 0x2012, + 0x23c: 0x2012, 0x23d: 0x2012, 0x23e: 0x2012, 0x23f: 0x2012, + // Block 0x9, offset 0x240 + 0x240: 0x5f52, 0x241: 0x5f52, 0x242: 0x158a, 0x243: 0x5f52, 0x244: 0x5f52, 0x245: 0x5f52, + 0x246: 0x5f52, 0x247: 0x5f52, 0x248: 0x5f52, 0x249: 0x5f52, 0x24a: 0x5f52, 0x24b: 0x5f52, + 0x24c: 0x5652, 0x24d: 0x5952, 0x24e: 0x5c52, 0x24f: 0x1813, 0x250: 0x160a, 0x251: 0x168a, + 0x252: 0x0013, 0x253: 0x0013, 0x254: 0x0013, 0x255: 0x170a, 0x256: 0x178a, 0x257: 0x1812, + 0x258: 0x0113, 0x259: 0x0112, 0x25a: 0x0113, 0x25b: 0x0112, 0x25c: 0x0113, 0x25d: 0x0112, + 0x25e: 0x0113, 0x25f: 0x0112, 0x260: 0x0113, 0x261: 0x0112, 0x262: 0x0113, 0x263: 0x0112, + 0x264: 0x0113, 0x265: 0x0112, 0x266: 0x0113, 0x267: 0x0112, 0x268: 0x0113, 0x269: 0x0112, + 0x26a: 0x0113, 0x26b: 0x0112, 0x26c: 0x0113, 0x26d: 0x0112, 0x26e: 0x0113, 0x26f: 0x0112, + 0x270: 0x180a, 0x271: 0x188a, 0x272: 0x0b12, 0x273: 0x5352, 0x274: 0x6253, 0x275: 0x190a, + 0x277: 0x0f13, 0x278: 0x0f12, 0x279: 0x0b13, 0x27a: 0x0113, 0x27b: 0x0112, + 0x27c: 0x0012, 0x27d: 0x4d53, 0x27e: 0x5053, 0x27f: 0x5053, + // Block 0xa, offset 0x280 + 0x280: 0x0812, 0x281: 0x0812, 0x282: 0x0812, 0x283: 0x0812, 0x284: 0x0812, 0x285: 0x0812, + 0x288: 0x0813, 0x289: 0x0813, 0x28a: 0x0813, 0x28b: 0x0813, + 0x28c: 0x0813, 0x28d: 0x0813, 0x290: 0x239a, 0x291: 0x0812, + 0x292: 0x247a, 0x293: 0x0812, 0x294: 0x25ba, 0x295: 0x0812, 0x296: 0x26fa, 0x297: 0x0812, + 0x299: 0x0813, 0x29b: 0x0813, 0x29d: 0x0813, + 0x29f: 0x0813, 0x2a0: 0x0812, 0x2a1: 0x0812, 0x2a2: 0x0812, 0x2a3: 0x0812, + 0x2a4: 0x0812, 0x2a5: 0x0812, 0x2a6: 0x0812, 0x2a7: 0x0812, 0x2a8: 0x0813, 0x2a9: 0x0813, + 0x2aa: 0x0813, 0x2ab: 0x0813, 0x2ac: 0x0813, 0x2ad: 0x0813, 0x2ae: 0x0813, 0x2af: 0x0813, + 0x2b0: 0x8b52, 0x2b1: 0x8b52, 0x2b2: 0x8e52, 0x2b3: 0x8e52, 0x2b4: 0x9152, 0x2b5: 0x9152, + 0x2b6: 0x9452, 0x2b7: 0x9452, 0x2b8: 0x9752, 0x2b9: 0x9752, 0x2ba: 0x9a52, 0x2bb: 0x9a52, + 0x2bc: 0x4d52, 0x2bd: 0x4d52, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x283a, 0x2c1: 0x292a, 0x2c2: 0x2a1a, 0x2c3: 0x2b0a, 0x2c4: 0x2bfa, 0x2c5: 0x2cea, + 0x2c6: 0x2dda, 0x2c7: 0x2eca, 0x2c8: 0x2fb9, 0x2c9: 0x30a9, 0x2ca: 0x3199, 0x2cb: 0x3289, + 0x2cc: 0x3379, 0x2cd: 0x3469, 0x2ce: 0x3559, 0x2cf: 0x3649, 0x2d0: 0x373a, 0x2d1: 0x382a, + 0x2d2: 0x391a, 0x2d3: 0x3a0a, 0x2d4: 0x3afa, 0x2d5: 0x3bea, 0x2d6: 0x3cda, 0x2d7: 0x3dca, + 0x2d8: 0x3eb9, 0x2d9: 0x3fa9, 0x2da: 0x4099, 0x2db: 0x4189, 0x2dc: 0x4279, 0x2dd: 0x4369, + 0x2de: 0x4459, 0x2df: 0x4549, 0x2e0: 0x463a, 0x2e1: 0x472a, 0x2e2: 0x481a, 0x2e3: 0x490a, + 0x2e4: 0x49fa, 0x2e5: 0x4aea, 0x2e6: 0x4bda, 0x2e7: 0x4cca, 0x2e8: 0x4db9, 0x2e9: 0x4ea9, + 0x2ea: 0x4f99, 0x2eb: 0x5089, 0x2ec: 0x5179, 0x2ed: 0x5269, 0x2ee: 0x5359, 0x2ef: 0x5449, + 0x2f0: 0x0812, 0x2f1: 0x0812, 0x2f2: 0x553a, 0x2f3: 0x564a, 0x2f4: 0x571a, + 0x2f6: 0x57fa, 0x2f7: 0x58da, 0x2f8: 0x0813, 0x2f9: 0x0813, 0x2fa: 0x8b53, 0x2fb: 0x8b53, + 0x2fc: 0x5a19, 0x2fd: 0x0004, 0x2fe: 0x5aea, 0x2ff: 0x0004, + // Block 0xc, offset 0x300 + 0x300: 0x0004, 0x301: 0x0004, 0x302: 0x5b6a, 0x303: 0x5c7a, 0x304: 0x5d4a, + 0x306: 0x5e2a, 0x307: 0x5f0a, 0x308: 0x8e53, 0x309: 0x8e53, 0x30a: 0x9153, 0x30b: 0x9153, + 0x30c: 0x6049, 0x30d: 0x0004, 0x30e: 0x0004, 0x30f: 0x0004, 0x310: 0x0812, 0x311: 0x0812, + 0x312: 0x611a, 0x313: 0x625a, 0x316: 0x639a, 0x317: 0x647a, + 0x318: 0x0813, 0x319: 0x0813, 0x31a: 0x9453, 0x31b: 0x9453, 0x31d: 0x0004, + 0x31e: 0x0004, 0x31f: 0x0004, 0x320: 0x0812, 0x321: 0x0812, 0x322: 0x65ba, 0x323: 0x66fa, + 0x324: 0x683a, 0x325: 0x0912, 0x326: 0x691a, 0x327: 0x69fa, 0x328: 0x0813, 0x329: 0x0813, + 0x32a: 0x9a53, 0x32b: 0x9a53, 0x32c: 0x0913, 0x32d: 0x0004, 0x32e: 0x0004, 0x32f: 0x0004, + 0x332: 0x6b3a, 0x333: 0x6c4a, 0x334: 0x6d1a, + 0x336: 0x6dfa, 0x337: 0x6eda, 0x338: 0x9753, 0x339: 0x9753, 0x33a: 0x4d53, 0x33b: 0x4d53, + 0x33c: 0x7019, 0x33d: 0x0004, 0x33e: 0x0004, + // Block 0xd, offset 0x340 + 0x342: 0x0013, + 0x347: 0x0013, 0x34a: 0x0012, 0x34b: 0x0013, + 0x34c: 0x0013, 0x34d: 0x0013, 0x34e: 0x0012, 0x34f: 0x0012, 0x350: 0x0013, 0x351: 0x0013, + 0x352: 0x0013, 0x353: 0x0012, 0x355: 0x0013, + 0x359: 0x0013, 0x35a: 0x0013, 0x35b: 0x0013, 0x35c: 0x0013, 0x35d: 0x0013, + 0x364: 0x0013, 0x366: 0x70eb, 0x368: 0x0013, + 0x36a: 0x714b, 0x36b: 0x718b, 0x36c: 0x0013, 0x36d: 0x0013, 0x36f: 0x0012, + 0x370: 0x0013, 0x371: 0x0013, 0x372: 0x9d53, 0x373: 0x0013, 0x374: 0x0012, 0x375: 0x0010, + 0x376: 0x0010, 0x377: 0x0010, 0x378: 0x0010, 0x379: 0x0012, + 0x37c: 0x0012, 0x37d: 0x0012, 0x37e: 0x0013, 0x37f: 0x0013, + // Block 0xe, offset 0x380 + 0x380: 0x1a13, 0x381: 0x1a13, 0x382: 0x1e13, 0x383: 0x1e13, 0x384: 0x1a13, 0x385: 0x1a13, + 0x386: 0x2613, 0x387: 0x2613, 0x388: 0x2a13, 0x389: 0x2a13, 0x38a: 0x2e13, 0x38b: 0x2e13, + 0x38c: 0x2a13, 0x38d: 0x2a13, 0x38e: 0x2613, 0x38f: 0x2613, 0x390: 0xa052, 0x391: 0xa052, + 0x392: 0xa352, 0x393: 0xa352, 0x394: 0xa652, 0x395: 0xa652, 0x396: 0xa352, 0x397: 0xa352, + 0x398: 0xa052, 0x399: 0xa052, 0x39a: 0x1a12, 0x39b: 0x1a12, 0x39c: 0x1e12, 0x39d: 0x1e12, + 0x39e: 0x1a12, 0x39f: 0x1a12, 0x3a0: 0x2612, 0x3a1: 0x2612, 0x3a2: 0x2a12, 0x3a3: 0x2a12, + 0x3a4: 0x2e12, 0x3a5: 0x2e12, 0x3a6: 0x2a12, 0x3a7: 0x2a12, 0x3a8: 0x2612, 0x3a9: 0x2612, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x6552, 0x3c1: 0x6552, 0x3c2: 0x6552, 0x3c3: 0x6552, 0x3c4: 0x6552, 0x3c5: 0x6552, + 0x3c6: 0x6552, 0x3c7: 0x6552, 0x3c8: 0x6552, 0x3c9: 0x6552, 0x3ca: 0x6552, 0x3cb: 0x6552, + 0x3cc: 0x6552, 0x3cd: 0x6552, 0x3ce: 0x6552, 0x3cf: 0x6552, 0x3d0: 0xa952, 0x3d1: 0xa952, + 0x3d2: 0xa952, 0x3d3: 0xa952, 0x3d4: 0xa952, 0x3d5: 0xa952, 0x3d6: 0xa952, 0x3d7: 0xa952, + 0x3d8: 0xa952, 0x3d9: 0xa952, 0x3da: 0xa952, 0x3db: 0xa952, 0x3dc: 0xa952, 0x3dd: 0xa952, + 0x3de: 0xa952, 0x3e0: 0x0113, 0x3e1: 0x0112, 0x3e2: 0x71eb, 0x3e3: 0x8853, + 0x3e4: 0x724b, 0x3e5: 0x72aa, 0x3e6: 0x730a, 0x3e7: 0x0f13, 0x3e8: 0x0f12, 0x3e9: 0x0313, + 0x3ea: 0x0312, 0x3eb: 0x0713, 0x3ec: 0x0712, 0x3ed: 0x736b, 0x3ee: 0x73cb, 0x3ef: 0x742b, + 0x3f0: 0x748b, 0x3f1: 0x0012, 0x3f2: 0x0113, 0x3f3: 0x0112, 0x3f4: 0x0012, 0x3f5: 0x0313, + 0x3f6: 0x0312, 0x3f7: 0x0012, 0x3f8: 0x0012, 0x3f9: 0x0012, 0x3fa: 0x0012, 0x3fb: 0x0012, + 0x3fc: 0x0015, 0x3fd: 0x0015, 0x3fe: 0x74eb, 0x3ff: 0x754b, + // Block 0x10, offset 0x400 + 0x400: 0x0113, 0x401: 0x0112, 0x402: 0x0113, 0x403: 0x0112, 0x404: 0x0113, 0x405: 0x0112, + 0x406: 0x0113, 0x407: 0x0112, 0x408: 0x0014, 0x409: 0x0014, 0x40a: 0x0014, 0x40b: 0x0713, + 0x40c: 0x0712, 0x40d: 0x75ab, 0x40e: 0x0012, 0x40f: 0x0010, 0x410: 0x0113, 0x411: 0x0112, + 0x412: 0x0113, 0x413: 0x0112, 0x414: 0x0012, 0x415: 0x0012, 0x416: 0x0113, 0x417: 0x0112, + 0x418: 0x0113, 0x419: 0x0112, 0x41a: 0x0113, 0x41b: 0x0112, 0x41c: 0x0113, 0x41d: 0x0112, + 0x41e: 0x0113, 0x41f: 0x0112, 0x420: 0x0113, 0x421: 0x0112, 0x422: 0x0113, 0x423: 0x0112, + 0x424: 0x0113, 0x425: 0x0112, 0x426: 0x0113, 0x427: 0x0112, 0x428: 0x0113, 0x429: 0x0112, + 0x42a: 0x760b, 0x42b: 0x766b, 0x42c: 0x76cb, 0x42d: 0x772b, 0x42e: 0x778b, + 0x430: 0x77eb, 0x431: 0x784b, 0x432: 0x78ab, 0x433: 0xac53, 0x434: 0x0113, 0x435: 0x0112, + 0x436: 0x0113, 0x437: 0x0112, + // Block 0x11, offset 0x440 + 0x440: 0x790a, 0x441: 0x798a, 0x442: 0x7a0a, 0x443: 0x7a8a, 0x444: 0x7b3a, 0x445: 0x7bea, + 0x446: 0x7c6a, + 0x453: 0x7cea, 0x454: 0x7dca, 0x455: 0x7eaa, 0x456: 0x7f8a, 0x457: 0x806a, + 0x45d: 0x0010, + 0x45e: 0x0034, 0x45f: 0x0010, 0x460: 0x0010, 0x461: 0x0010, 0x462: 0x0010, 0x463: 0x0010, + 0x464: 0x0010, 0x465: 0x0010, 0x466: 0x0010, 0x467: 0x0010, 0x468: 0x0010, + 0x46a: 0x0010, 0x46b: 0x0010, 0x46c: 0x0010, 0x46d: 0x0010, 0x46e: 0x0010, 0x46f: 0x0010, + 0x470: 0x0010, 0x471: 0x0010, 0x472: 0x0010, 0x473: 0x0010, 0x474: 0x0010, 0x475: 0x0010, + 0x476: 0x0010, 0x478: 0x0010, 0x479: 0x0010, 0x47a: 0x0010, 0x47b: 0x0010, + 0x47c: 0x0010, 0x47e: 0x0010, + // Block 0x12, offset 0x480 + 0x480: 0x2213, 0x481: 0x2213, 0x482: 0x2613, 0x483: 0x2613, 0x484: 0x2213, 0x485: 0x2213, + 0x486: 0x2e13, 0x487: 0x2e13, 0x488: 0x2213, 0x489: 0x2213, 0x48a: 0x2613, 0x48b: 0x2613, + 0x48c: 0x2213, 0x48d: 0x2213, 0x48e: 0x3e13, 0x48f: 0x3e13, 0x490: 0x2213, 0x491: 0x2213, + 0x492: 0x2613, 0x493: 0x2613, 0x494: 0x2213, 0x495: 0x2213, 0x496: 0x2e13, 0x497: 0x2e13, + 0x498: 0x2213, 0x499: 0x2213, 0x49a: 0x2613, 0x49b: 0x2613, 0x49c: 0x2213, 0x49d: 0x2213, + 0x49e: 0xb553, 0x49f: 0xb553, 0x4a0: 0xb853, 0x4a1: 0xb853, 0x4a2: 0x2212, 0x4a3: 0x2212, + 0x4a4: 0x2612, 0x4a5: 0x2612, 0x4a6: 0x2212, 0x4a7: 0x2212, 0x4a8: 0x2e12, 0x4a9: 0x2e12, + 0x4aa: 0x2212, 0x4ab: 0x2212, 0x4ac: 0x2612, 0x4ad: 0x2612, 0x4ae: 0x2212, 0x4af: 0x2212, + 0x4b0: 0x3e12, 0x4b1: 0x3e12, 0x4b2: 0x2212, 0x4b3: 0x2212, 0x4b4: 0x2612, 0x4b5: 0x2612, + 0x4b6: 0x2212, 0x4b7: 0x2212, 0x4b8: 0x2e12, 0x4b9: 0x2e12, 0x4ba: 0x2212, 0x4bb: 0x2212, + 0x4bc: 0x2612, 0x4bd: 0x2612, 0x4be: 0x2212, 0x4bf: 0x2212, + // Block 0x13, offset 0x4c0 + 0x4c2: 0x0010, + 0x4c7: 0x0010, 0x4c9: 0x0010, 0x4cb: 0x0010, + 0x4cd: 0x0010, 0x4ce: 0x0010, 0x4cf: 0x0010, 0x4d1: 0x0010, + 0x4d2: 0x0010, 0x4d4: 0x0010, 0x4d7: 0x0010, + 0x4d9: 0x0010, 0x4db: 0x0010, 0x4dd: 0x0010, + 0x4df: 0x0010, 0x4e1: 0x0010, 0x4e2: 0x0010, + 0x4e4: 0x0010, 0x4e7: 0x0010, 0x4e8: 0x0010, 0x4e9: 0x0010, + 0x4ea: 0x0010, 0x4ec: 0x0010, 0x4ed: 0x0010, 0x4ee: 0x0010, 0x4ef: 0x0010, + 0x4f0: 0x0010, 0x4f1: 0x0010, 0x4f2: 0x0010, 0x4f4: 0x0010, 0x4f5: 0x0010, + 0x4f6: 0x0010, 0x4f7: 0x0010, 0x4f9: 0x0010, 0x4fa: 0x0010, 0x4fb: 0x0010, + 0x4fc: 0x0010, 0x4fe: 0x0010, +} + +// caseIndex: 25 blocks, 1600 entries, 3200 bytes +// Block 0 is the zero block. +var caseIndex = [1600]uint16{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x12, 0xc3: 0x13, 0xc4: 0x14, 0xc5: 0x15, 0xc6: 0x01, 0xc7: 0x02, + 0xc8: 0x16, 0xc9: 0x03, 0xca: 0x04, 0xcb: 0x17, 0xcc: 0x18, 0xcd: 0x05, 0xce: 0x06, 0xcf: 0x07, + 0xd0: 0x19, 0xd1: 0x1a, 0xd2: 0x1b, 0xd3: 0x1c, 0xd4: 0x1d, 0xd5: 0x1e, 0xd6: 0x1f, 0xd7: 0x20, + 0xd8: 0x21, 0xd9: 0x22, 0xda: 0x23, 0xdb: 0x24, 0xdc: 0x25, 0xdd: 0x26, 0xde: 0x27, 0xdf: 0x28, + 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05, + 0xea: 0x06, 0xeb: 0x07, 0xec: 0x07, 0xed: 0x08, 0xef: 0x09, + 0xf0: 0x14, 0xf3: 0x16, + // Block 0x4, offset 0x100 + 0x120: 0x29, 0x121: 0x2a, 0x122: 0x2b, 0x123: 0x2c, 0x124: 0x2d, 0x125: 0x2e, 0x126: 0x2f, 0x127: 0x30, + 0x128: 0x31, 0x129: 0x32, 0x12a: 0x33, 0x12b: 0x34, 0x12c: 0x35, 0x12d: 0x36, 0x12e: 0x37, 0x12f: 0x38, + 0x130: 0x39, 0x131: 0x3a, 0x132: 0x3b, 0x133: 0x3c, 0x134: 0x3d, 0x135: 0x3e, 0x136: 0x3f, 0x137: 0x40, + 0x138: 0x41, 0x139: 0x42, 0x13a: 0x43, 0x13b: 0x44, 0x13c: 0x45, 0x13d: 0x46, 0x13e: 0x47, 0x13f: 0x48, + // Block 0x5, offset 0x140 + 0x140: 0x49, 0x141: 0x4a, 0x142: 0x4b, 0x143: 0x4c, 0x144: 0x23, 0x145: 0x23, 0x146: 0x23, 0x147: 0x23, + 0x148: 0x23, 0x149: 0x4d, 0x14a: 0x4e, 0x14b: 0x4f, 0x14c: 0x50, 0x14d: 0x51, 0x14e: 0x52, 0x14f: 0x53, + 0x150: 0x54, 0x151: 0x23, 0x152: 0x23, 0x153: 0x23, 0x154: 0x23, 0x155: 0x23, 0x156: 0x23, 0x157: 0x23, + 0x158: 0x23, 0x159: 0x55, 0x15a: 0x56, 0x15b: 0x57, 0x15c: 0x58, 0x15d: 0x59, 0x15e: 0x5a, 0x15f: 0x5b, + 0x160: 0x5c, 0x161: 0x5d, 0x162: 0x5e, 0x163: 0x5f, 0x164: 0x60, 0x165: 0x61, 0x167: 0x62, + 0x168: 0x63, 0x169: 0x64, 0x16a: 0x65, 0x16c: 0x66, 0x16d: 0x67, 0x16e: 0x68, 0x16f: 0x69, + 0x170: 0x6a, 0x171: 0x6b, 0x172: 0x6c, 0x173: 0x6d, 0x174: 0x6e, 0x175: 0x6f, 0x176: 0x70, 0x177: 0x71, + 0x178: 0x72, 0x179: 0x72, 0x17a: 0x73, 0x17b: 0x72, 0x17c: 0x74, 0x17d: 0x08, 0x17e: 0x09, 0x17f: 0x0a, + // Block 0x6, offset 0x180 + 0x180: 0x75, 0x181: 0x76, 0x182: 0x77, 0x183: 0x78, 0x184: 0x0b, 0x185: 0x79, 0x186: 0x7a, + 0x192: 0x7b, 0x193: 0x0c, + 0x1b0: 0x7c, 0x1b1: 0x0d, 0x1b2: 0x72, 0x1b3: 0x7d, 0x1b4: 0x7e, 0x1b5: 0x7f, 0x1b6: 0x80, 0x1b7: 0x81, + 0x1b8: 0x82, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x83, 0x1c2: 0x84, 0x1c3: 0x85, 0x1c4: 0x86, 0x1c5: 0x23, 0x1c6: 0x87, + // Block 0x8, offset 0x200 + 0x200: 0x88, 0x201: 0x23, 0x202: 0x23, 0x203: 0x23, 0x204: 0x23, 0x205: 0x23, 0x206: 0x23, 0x207: 0x23, + 0x208: 0x23, 0x209: 0x23, 0x20a: 0x23, 0x20b: 0x23, 0x20c: 0x23, 0x20d: 0x23, 0x20e: 0x23, 0x20f: 0x23, + 0x210: 0x23, 0x211: 0x23, 0x212: 0x89, 0x213: 0x8a, 0x214: 0x23, 0x215: 0x23, 0x216: 0x23, 0x217: 0x23, + 0x218: 0x8b, 0x219: 0x8c, 0x21a: 0x8d, 0x21b: 0x8e, 0x21c: 0x8f, 0x21d: 0x90, 0x21e: 0x0e, 0x21f: 0x91, + 0x220: 0x92, 0x221: 0x93, 0x222: 0x23, 0x223: 0x94, 0x224: 0x95, 0x225: 0x96, 0x226: 0x97, 0x227: 0x98, + 0x228: 0x99, 0x229: 0x9a, 0x22a: 0x9b, 0x22b: 0x9c, 0x22c: 0x9d, 0x22d: 0x9e, 0x22e: 0x9f, 0x22f: 0xa0, + 0x230: 0x23, 0x231: 0x23, 0x232: 0x23, 0x233: 0x23, 0x234: 0x23, 0x235: 0x23, 0x236: 0x23, 0x237: 0x23, + 0x238: 0x23, 0x239: 0x23, 0x23a: 0x23, 0x23b: 0x23, 0x23c: 0x23, 0x23d: 0x23, 0x23e: 0x23, 0x23f: 0x23, + // Block 0x9, offset 0x240 + 0x240: 0x23, 0x241: 0x23, 0x242: 0x23, 0x243: 0x23, 0x244: 0x23, 0x245: 0x23, 0x246: 0x23, 0x247: 0x23, + 0x248: 0x23, 0x249: 0x23, 0x24a: 0x23, 0x24b: 0x23, 0x24c: 0x23, 0x24d: 0x23, 0x24e: 0x23, 0x24f: 0x23, + 0x250: 0x23, 0x251: 0x23, 0x252: 0x23, 0x253: 0x23, 0x254: 0x23, 0x255: 0x23, 0x256: 0x23, 0x257: 0x23, + 0x258: 0x23, 0x259: 0x23, 0x25a: 0x23, 0x25b: 0x23, 0x25c: 0x23, 0x25d: 0x23, 0x25e: 0x23, 0x25f: 0x23, + 0x260: 0x23, 0x261: 0x23, 0x262: 0x23, 0x263: 0x23, 0x264: 0x23, 0x265: 0x23, 0x266: 0x23, 0x267: 0x23, + 0x268: 0x23, 0x269: 0x23, 0x26a: 0x23, 0x26b: 0x23, 0x26c: 0x23, 0x26d: 0x23, 0x26e: 0x23, 0x26f: 0x23, + 0x270: 0x23, 0x271: 0x23, 0x272: 0x23, 0x273: 0x23, 0x274: 0x23, 0x275: 0x23, 0x276: 0x23, 0x277: 0x23, + 0x278: 0x23, 0x279: 0x23, 0x27a: 0x23, 0x27b: 0x23, 0x27c: 0x23, 0x27d: 0x23, 0x27e: 0x23, 0x27f: 0x23, + // Block 0xa, offset 0x280 + 0x280: 0x23, 0x281: 0x23, 0x282: 0x23, 0x283: 0x23, 0x284: 0x23, 0x285: 0x23, 0x286: 0x23, 0x287: 0x23, + 0x288: 0x23, 0x289: 0x23, 0x28a: 0x23, 0x28b: 0x23, 0x28c: 0x23, 0x28d: 0x23, 0x28e: 0x23, 0x28f: 0x23, + 0x290: 0x23, 0x291: 0x23, 0x292: 0x23, 0x293: 0x23, 0x294: 0x23, 0x295: 0x23, 0x296: 0x23, 0x297: 0x23, + 0x298: 0x23, 0x299: 0x23, 0x29a: 0x23, 0x29b: 0x23, 0x29c: 0x23, 0x29d: 0x23, 0x29e: 0xa1, 0x29f: 0xa2, + // Block 0xb, offset 0x2c0 + 0x2ec: 0x0f, 0x2ed: 0xa3, 0x2ee: 0xa4, 0x2ef: 0xa5, + 0x2f0: 0x23, 0x2f1: 0x23, 0x2f2: 0x23, 0x2f3: 0x23, 0x2f4: 0xa6, 0x2f5: 0xa7, 0x2f6: 0xa8, 0x2f7: 0xa9, + 0x2f8: 0xaa, 0x2f9: 0xab, 0x2fa: 0x23, 0x2fb: 0xac, 0x2fc: 0xad, 0x2fd: 0xae, 0x2fe: 0xaf, 0x2ff: 0xb0, + // Block 0xc, offset 0x300 + 0x300: 0xb1, 0x301: 0xb2, 0x302: 0x23, 0x303: 0xb3, 0x305: 0xb4, 0x307: 0xb5, + 0x30a: 0xb6, 0x30b: 0xb7, 0x30c: 0xb8, 0x30d: 0xb9, 0x30e: 0xba, 0x30f: 0xbb, + 0x310: 0xbc, 0x311: 0xbd, 0x312: 0xbe, 0x313: 0xbf, 0x314: 0xc0, 0x315: 0xc1, + 0x318: 0x23, 0x319: 0x23, 0x31a: 0x23, 0x31b: 0x23, 0x31c: 0xc2, 0x31d: 0xc3, + 0x320: 0xc4, 0x321: 0xc5, 0x322: 0xc6, 0x323: 0xc7, 0x324: 0xc8, 0x326: 0xc9, + 0x328: 0xca, 0x329: 0xcb, 0x32a: 0xcc, 0x32b: 0xcd, 0x32c: 0x5f, 0x32d: 0xce, 0x32e: 0xcf, + 0x330: 0x23, 0x331: 0xd0, 0x332: 0xd1, 0x333: 0xd2, + // Block 0xd, offset 0x340 + 0x340: 0xd3, 0x341: 0xd4, 0x342: 0xd5, 0x343: 0xd6, 0x344: 0xd7, 0x345: 0xd8, 0x346: 0xd9, 0x347: 0xda, + 0x348: 0xdb, 0x34a: 0xdc, 0x34b: 0xdd, 0x34c: 0xde, 0x34d: 0xdf, + 0x350: 0xe0, 0x351: 0xe1, 0x352: 0xe2, 0x353: 0xe3, 0x356: 0xe4, 0x357: 0xe5, + 0x358: 0xe6, 0x359: 0xe7, 0x35a: 0xe8, 0x35b: 0xe9, 0x35c: 0xea, + 0x362: 0xeb, 0x363: 0xec, + 0x368: 0xed, 0x369: 0xee, 0x36a: 0xef, 0x36b: 0xf0, + 0x370: 0xf1, 0x371: 0xf2, 0x372: 0xf3, 0x374: 0xf4, 0x375: 0xf5, + // Block 0xe, offset 0x380 + 0x380: 0x23, 0x381: 0x23, 0x382: 0x23, 0x383: 0x23, 0x384: 0x23, 0x385: 0x23, 0x386: 0x23, 0x387: 0x23, + 0x388: 0x23, 0x389: 0x23, 0x38a: 0x23, 0x38b: 0x23, 0x38c: 0x23, 0x38d: 0x23, 0x38e: 0xf6, + 0x390: 0x23, 0x391: 0xf7, 0x392: 0x23, 0x393: 0x23, 0x394: 0x23, 0x395: 0xf8, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x23, 0x3c1: 0x23, 0x3c2: 0x23, 0x3c3: 0x23, 0x3c4: 0x23, 0x3c5: 0x23, 0x3c6: 0x23, 0x3c7: 0x23, + 0x3c8: 0x23, 0x3c9: 0x23, 0x3ca: 0x23, 0x3cb: 0x23, 0x3cc: 0x23, 0x3cd: 0x23, 0x3ce: 0x23, 0x3cf: 0x23, + 0x3d0: 0xf7, + // Block 0x10, offset 0x400 + 0x410: 0x23, 0x411: 0x23, 0x412: 0x23, 0x413: 0x23, 0x414: 0x23, 0x415: 0x23, 0x416: 0x23, 0x417: 0x23, + 0x418: 0x23, 0x419: 0xf9, + // Block 0x11, offset 0x440 + 0x460: 0x23, 0x461: 0x23, 0x462: 0x23, 0x463: 0x23, 0x464: 0x23, 0x465: 0x23, 0x466: 0x23, 0x467: 0x23, + 0x468: 0xf0, 0x469: 0xfa, 0x46b: 0xfb, 0x46c: 0xfc, 0x46d: 0xfd, 0x46e: 0xfe, + 0x47c: 0x23, 0x47d: 0xff, 0x47e: 0x100, 0x47f: 0x101, + // Block 0x12, offset 0x480 + 0x4b0: 0x23, 0x4b1: 0x102, 0x4b2: 0x103, + // Block 0x13, offset 0x4c0 + 0x4c5: 0x104, 0x4c6: 0x105, + 0x4c9: 0x106, + 0x4d0: 0x107, 0x4d1: 0x108, 0x4d2: 0x109, 0x4d3: 0x10a, 0x4d4: 0x10b, 0x4d5: 0x10c, 0x4d6: 0x10d, 0x4d7: 0x10e, + 0x4d8: 0x10f, 0x4d9: 0x110, 0x4da: 0x111, 0x4db: 0x112, 0x4dc: 0x113, 0x4dd: 0x114, 0x4de: 0x115, 0x4df: 0x116, + 0x4e8: 0x117, 0x4e9: 0x118, 0x4ea: 0x119, + // Block 0x14, offset 0x500 + 0x500: 0x11a, + 0x520: 0x23, 0x521: 0x23, 0x522: 0x23, 0x523: 0x11b, 0x524: 0x10, 0x525: 0x11c, + 0x538: 0x11d, 0x539: 0x11, 0x53a: 0x11e, + // Block 0x15, offset 0x540 + 0x544: 0x11f, 0x545: 0x120, 0x546: 0x121, + 0x54f: 0x122, + // Block 0x16, offset 0x580 + 0x590: 0x0a, 0x591: 0x0b, 0x592: 0x0c, 0x593: 0x0d, 0x594: 0x0e, 0x596: 0x0f, + 0x59b: 0x10, 0x59d: 0x11, 0x59e: 0x12, 0x59f: 0x13, + // Block 0x17, offset 0x5c0 + 0x5c0: 0x123, 0x5c1: 0x124, 0x5c4: 0x124, 0x5c5: 0x124, 0x5c6: 0x124, 0x5c7: 0x125, + // Block 0x18, offset 0x600 + 0x620: 0x15, +} + +// sparseOffsets: 277 entries, 554 bytes +var sparseOffsets = []uint16{0x0, 0x9, 0xf, 0x18, 0x24, 0x2e, 0x35, 0x38, 0x3c, 0x3f, 0x43, 0x4d, 0x4f, 0x54, 0x64, 0x6b, 0x70, 0x7e, 0x7f, 0x8d, 0x9c, 0xa6, 0xa9, 0xaf, 0xb7, 0xba, 0xbc, 0xca, 0xd0, 0xde, 0xe9, 0xf5, 0x100, 0x10c, 0x116, 0x122, 0x12d, 0x139, 0x145, 0x14d, 0x155, 0x15f, 0x16a, 0x176, 0x17d, 0x188, 0x18d, 0x195, 0x198, 0x19d, 0x1a1, 0x1a5, 0x1ac, 0x1b5, 0x1bd, 0x1be, 0x1c7, 0x1ce, 0x1d6, 0x1dc, 0x1e2, 0x1e7, 0x1eb, 0x1ee, 0x1f0, 0x1f3, 0x1f8, 0x1f9, 0x1fb, 0x1fd, 0x1ff, 0x206, 0x20b, 0x20f, 0x218, 0x21b, 0x21e, 0x224, 0x225, 0x230, 0x231, 0x232, 0x237, 0x244, 0x24c, 0x254, 0x25d, 0x266, 0x26f, 0x274, 0x277, 0x280, 0x28d, 0x28f, 0x296, 0x298, 0x2a4, 0x2a5, 0x2b0, 0x2b8, 0x2c0, 0x2c6, 0x2c7, 0x2d5, 0x2da, 0x2dd, 0x2e2, 0x2e6, 0x2ec, 0x2f1, 0x2f4, 0x2f9, 0x2fe, 0x2ff, 0x305, 0x307, 0x308, 0x30a, 0x30c, 0x30f, 0x310, 0x312, 0x315, 0x31b, 0x31f, 0x321, 0x326, 0x32d, 0x331, 0x33a, 0x33b, 0x343, 0x347, 0x34c, 0x354, 0x35a, 0x360, 0x36a, 0x36f, 0x378, 0x37e, 0x385, 0x389, 0x391, 0x393, 0x395, 0x398, 0x39a, 0x39c, 0x39d, 0x39e, 0x3a0, 0x3a2, 0x3a8, 0x3ad, 0x3af, 0x3b5, 0x3b8, 0x3ba, 0x3c0, 0x3c5, 0x3c7, 0x3c8, 0x3c9, 0x3ca, 0x3cc, 0x3ce, 0x3d0, 0x3d3, 0x3d5, 0x3d8, 0x3e0, 0x3e3, 0x3e7, 0x3ef, 0x3f1, 0x3f2, 0x3f3, 0x3f5, 0x3fb, 0x3fd, 0x3fe, 0x400, 0x402, 0x404, 0x411, 0x412, 0x413, 0x417, 0x419, 0x41a, 0x41b, 0x41c, 0x41d, 0x421, 0x425, 0x42b, 0x42d, 0x434, 0x437, 0x43b, 0x441, 0x44a, 0x450, 0x456, 0x460, 0x46a, 0x46c, 0x473, 0x479, 0x47f, 0x485, 0x488, 0x48e, 0x491, 0x499, 0x49a, 0x4a1, 0x4a2, 0x4a5, 0x4af, 0x4b5, 0x4bb, 0x4bc, 0x4c2, 0x4c5, 0x4cd, 0x4d4, 0x4db, 0x4dc, 0x4dd, 0x4de, 0x4df, 0x4e1, 0x4e3, 0x4e5, 0x4e9, 0x4ea, 0x4ec, 0x4ed, 0x4ee, 0x4f0, 0x4f5, 0x4fa, 0x4fe, 0x4ff, 0x502, 0x506, 0x511, 0x515, 0x51d, 0x522, 0x526, 0x529, 0x52d, 0x530, 0x533, 0x538, 0x53c, 0x540, 0x544, 0x548, 0x54a, 0x54c, 0x54f, 0x554, 0x556, 0x55b, 0x564, 0x569, 0x56a, 0x56d, 0x56e, 0x56f, 0x571, 0x572, 0x573} + +// sparseValues: 1395 entries, 5580 bytes +var sparseValues = [1395]valueRange{ + // Block 0x0, offset 0x0 + {value: 0x0004, lo: 0xa8, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xaa}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0004, lo: 0xaf, hi: 0xaf}, + {value: 0x0004, lo: 0xb4, hi: 0xb4}, + {value: 0x001a, lo: 0xb5, hi: 0xb5}, + {value: 0x0054, lo: 0xb7, hi: 0xb7}, + {value: 0x0004, lo: 0xb8, hi: 0xb8}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + // Block 0x1, offset 0x9 + {value: 0x2013, lo: 0x80, hi: 0x96}, + {value: 0x2013, lo: 0x98, hi: 0x9e}, + {value: 0x009a, lo: 0x9f, hi: 0x9f}, + {value: 0x2012, lo: 0xa0, hi: 0xb6}, + {value: 0x2012, lo: 0xb8, hi: 0xbe}, + {value: 0x0252, lo: 0xbf, hi: 0xbf}, + // Block 0x2, offset 0xf + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x011b, lo: 0xb0, hi: 0xb0}, + {value: 0x019a, lo: 0xb1, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb7}, + {value: 0x0012, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x0553, lo: 0xbf, hi: 0xbf}, + // Block 0x3, offset 0x18 + {value: 0x0552, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x01da, lo: 0x89, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xb7}, + {value: 0x0253, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x028a, lo: 0xbf, hi: 0xbf}, + // Block 0x4, offset 0x24 + {value: 0x0117, lo: 0x80, hi: 0x9f}, + {value: 0x2f53, lo: 0xa0, hi: 0xa0}, + {value: 0x0012, lo: 0xa1, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xb3}, + {value: 0x0012, lo: 0xb4, hi: 0xb9}, + {value: 0x090b, lo: 0xba, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x2953, lo: 0xbd, hi: 0xbd}, + {value: 0x098b, lo: 0xbe, hi: 0xbe}, + {value: 0x0a0a, lo: 0xbf, hi: 0xbf}, + // Block 0x5, offset 0x2e + {value: 0x0015, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x97}, + {value: 0x0004, lo: 0x98, hi: 0x9d}, + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0015, lo: 0xa0, hi: 0xa4}, + {value: 0x0004, lo: 0xa5, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xbf}, + // Block 0x6, offset 0x35 + {value: 0x0024, lo: 0x80, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbf}, + // Block 0x7, offset 0x38 + {value: 0x6553, lo: 0x80, hi: 0x8f}, + {value: 0x2013, lo: 0x90, hi: 0x9f}, + {value: 0x5f53, lo: 0xa0, hi: 0xaf}, + {value: 0x2012, lo: 0xb0, hi: 0xbf}, + // Block 0x8, offset 0x3c + {value: 0x5f52, lo: 0x80, hi: 0x8f}, + {value: 0x6552, lo: 0x90, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x9, offset 0x3f + {value: 0x0117, lo: 0x80, hi: 0x81}, + {value: 0x0024, lo: 0x83, hi: 0x87}, + {value: 0x0014, lo: 0x88, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xbf}, + // Block 0xa, offset 0x43 + {value: 0x0f13, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x0316, lo: 0x89, hi: 0x8a}, + {value: 0x0716, lo: 0x8b, hi: 0x8c}, + {value: 0x0316, lo: 0x8d, hi: 0x8e}, + {value: 0x0f12, lo: 0x8f, hi: 0x8f}, + {value: 0x0117, lo: 0x90, hi: 0xbf}, + // Block 0xb, offset 0x4d + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x6553, lo: 0xb1, hi: 0xbf}, + // Block 0xc, offset 0x4f + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6853, lo: 0x90, hi: 0x96}, + {value: 0x0014, lo: 0x99, hi: 0x99}, + {value: 0x6552, lo: 0xa1, hi: 0xaf}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0xd, offset 0x54 + {value: 0x6852, lo: 0x80, hi: 0x86}, + {value: 0x198a, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x91, hi: 0x91}, + {value: 0x0024, lo: 0x92, hi: 0x95}, + {value: 0x0034, lo: 0x96, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x99}, + {value: 0x0034, lo: 0x9a, hi: 0x9b}, + {value: 0x0024, lo: 0x9c, hi: 0xa1}, + {value: 0x0034, lo: 0xa2, hi: 0xa7}, + {value: 0x0024, lo: 0xa8, hi: 0xa9}, + {value: 0x0034, lo: 0xaa, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xaf}, + {value: 0x0034, lo: 0xb0, hi: 0xbd}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xe, offset 0x64 + {value: 0x0034, lo: 0x81, hi: 0x82}, + {value: 0x0024, lo: 0x84, hi: 0x84}, + {value: 0x0034, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xb3}, + {value: 0x0054, lo: 0xb4, hi: 0xb4}, + // Block 0xf, offset 0x6b + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0024, lo: 0x90, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0014, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x10, offset 0x70 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x8a}, + {value: 0x0034, lo: 0x8b, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9c}, + {value: 0x0024, lo: 0x9d, hi: 0x9e}, + {value: 0x0034, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0034, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x11, offset 0x7e + {value: 0x0010, lo: 0x80, hi: 0xbf}, + // Block 0x12, offset 0x7f + {value: 0x0010, lo: 0x80, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0024, lo: 0x9f, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xaa, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x13, offset 0x8d + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0034, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0034, lo: 0xb1, hi: 0xb1}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbd}, + {value: 0x0034, lo: 0xbe, hi: 0xbe}, + {value: 0x0024, lo: 0xbf, hi: 0xbf}, + // Block 0x14, offset 0x9c + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0024, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x88}, + {value: 0x0024, lo: 0x89, hi: 0x8a}, + {value: 0x0010, lo: 0x8d, hi: 0xbf}, + // Block 0x15, offset 0xa6 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x16, offset 0xa9 + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xb1}, + {value: 0x0034, lo: 0xb2, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + // Block 0x17, offset 0xaf + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x99}, + {value: 0x0014, lo: 0x9a, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0xa3}, + {value: 0x0014, lo: 0xa4, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xad}, + // Block 0x18, offset 0xb7 + {value: 0x0010, lo: 0x80, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x9b}, + {value: 0x0010, lo: 0xa0, hi: 0xaa}, + // Block 0x19, offset 0xba + {value: 0x0010, lo: 0xa0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbd}, + // Block 0x1a, offset 0xbc + {value: 0x0024, lo: 0x94, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0024, lo: 0xaa, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbf}, + // Block 0x1b, offset 0xca + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1c, offset 0xd0 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x98, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0x1d, offset 0xde + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb6, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1e, offset 0xe9 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xb1}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + // Block 0x1f, offset 0xf5 + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x20, offset 0x100 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x99, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x21, offset 0x10c + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x22, offset 0x116 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x85}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xbf}, + // Block 0x23, offset 0x122 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x24, offset 0x12d + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x25, offset 0x139 + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0010, lo: 0xa8, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x26, offset 0x145 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x27, offset 0x14d + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb9}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbf}, + // Block 0x28, offset 0x155 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x88}, + {value: 0x0014, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x29, offset 0x15f + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x2a, offset 0x16a + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb2}, + // Block 0x2b, offset 0x176 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x2c, offset 0x17d + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x94, hi: 0x97}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xba, hi: 0xbf}, + // Block 0x2d, offset 0x188 + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x96}, + {value: 0x0010, lo: 0x9a, hi: 0xb1}, + {value: 0x0010, lo: 0xb3, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x2e, offset 0x18d + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x94}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9f}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + // Block 0x2f, offset 0x195 + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xba}, + // Block 0x30, offset 0x198 + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x31, offset 0x19d + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xb9}, + {value: 0x0014, lo: 0xbb, hi: 0xbc}, + // Block 0x32, offset 0x1a1 + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x33, offset 0x1a5 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0034, lo: 0x98, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0034, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x34, offset 0x1ac + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xac}, + {value: 0x0034, lo: 0xb1, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xba, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x35, offset 0x1b5 + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0024, lo: 0x82, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x86, hi: 0x87}, + {value: 0x0010, lo: 0x88, hi: 0x8c}, + {value: 0x0014, lo: 0x8d, hi: 0x97}, + {value: 0x0014, lo: 0x99, hi: 0xbc}, + // Block 0x36, offset 0x1bd + {value: 0x0034, lo: 0x86, hi: 0x86}, + // Block 0x37, offset 0x1be + {value: 0x0010, lo: 0xab, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbe}, + // Block 0x38, offset 0x1c7 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x96, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x99}, + {value: 0x0014, lo: 0x9e, hi: 0xa0}, + {value: 0x0010, lo: 0xa2, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xad}, + {value: 0x0014, lo: 0xb1, hi: 0xb4}, + // Block 0x39, offset 0x1ce + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x6c53, lo: 0xa0, hi: 0xbf}, + // Block 0x3a, offset 0x1d6 + {value: 0x7053, lo: 0x80, hi: 0x85}, + {value: 0x7053, lo: 0x87, hi: 0x87}, + {value: 0x7053, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xba}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x3b, offset 0x1dc + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x9a, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x3c, offset 0x1e2 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x3d, offset 0x1e7 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x82, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3e, offset 0x1eb + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3f, offset 0x1ee + {value: 0x0010, lo: 0x80, hi: 0x9a}, + {value: 0x0024, lo: 0x9d, hi: 0x9f}, + // Block 0x40, offset 0x1f0 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x7453, lo: 0xa0, hi: 0xaf}, + {value: 0x7853, lo: 0xb0, hi: 0xbf}, + // Block 0x41, offset 0x1f3 + {value: 0x7c53, lo: 0x80, hi: 0x8f}, + {value: 0x8053, lo: 0x90, hi: 0x9f}, + {value: 0x7c53, lo: 0xa0, hi: 0xaf}, + {value: 0x0813, lo: 0xb0, hi: 0xb5}, + {value: 0x0892, lo: 0xb8, hi: 0xbd}, + // Block 0x42, offset 0x1f8 + {value: 0x0010, lo: 0x81, hi: 0xbf}, + // Block 0x43, offset 0x1f9 + {value: 0x0010, lo: 0x80, hi: 0xac}, + {value: 0x0010, lo: 0xaf, hi: 0xbf}, + // Block 0x44, offset 0x1fb + {value: 0x0010, lo: 0x81, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x45, offset 0x1fd + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb8}, + // Block 0x46, offset 0x1ff + {value: 0x0010, lo: 0x80, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0034, lo: 0x94, hi: 0x94}, + {value: 0x0010, lo: 0xa0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + // Block 0x47, offset 0x206 + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xac}, + {value: 0x0010, lo: 0xae, hi: 0xb0}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + // Block 0x48, offset 0x20b + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x49, offset 0x20f + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x89, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0014, lo: 0x93, hi: 0x93}, + {value: 0x0004, lo: 0x97, hi: 0x97}, + {value: 0x0024, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0x4a, offset 0x218 + {value: 0x0014, lo: 0x8b, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x4b, offset 0x21b + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb7}, + // Block 0x4c, offset 0x21e + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x4d, offset 0x224 + {value: 0x0010, lo: 0x80, hi: 0xb5}, + // Block 0x4e, offset 0x225 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbb}, + // Block 0x4f, offset 0x230 + {value: 0x0010, lo: 0x86, hi: 0x8f}, + // Block 0x50, offset 0x231 + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x51, offset 0x232 + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + // Block 0x52, offset 0x237 + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x9e}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xac}, + {value: 0x0010, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xbc}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x53, offset 0x244 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xa7, hi: 0xa7}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + {value: 0x0034, lo: 0xb5, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbc}, + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0x54, offset 0x24c + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x55, offset 0x254 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0030, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x8b}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xab, hi: 0xab}, + {value: 0x0034, lo: 0xac, hi: 0xac}, + {value: 0x0024, lo: 0xad, hi: 0xb3}, + // Block 0x56, offset 0x25d + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0030, lo: 0xaa, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbf}, + // Block 0x57, offset 0x266 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0030, lo: 0xb2, hi: 0xb3}, + // Block 0x58, offset 0x26f + {value: 0x0010, lo: 0x80, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + // Block 0x59, offset 0x274 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8d, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x5a, offset 0x277 + {value: 0x1a6a, lo: 0x80, hi: 0x80}, + {value: 0x1aea, lo: 0x81, hi: 0x81}, + {value: 0x1b6a, lo: 0x82, hi: 0x82}, + {value: 0x1bea, lo: 0x83, hi: 0x83}, + {value: 0x1c6a, lo: 0x84, hi: 0x84}, + {value: 0x1cea, lo: 0x85, hi: 0x85}, + {value: 0x1d6a, lo: 0x86, hi: 0x86}, + {value: 0x1dea, lo: 0x87, hi: 0x87}, + {value: 0x1e6a, lo: 0x88, hi: 0x88}, + // Block 0x5b, offset 0x280 + {value: 0x0024, lo: 0x90, hi: 0x92}, + {value: 0x0034, lo: 0x94, hi: 0x99}, + {value: 0x0024, lo: 0x9a, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9f}, + {value: 0x0024, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0034, lo: 0xa2, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xb3}, + {value: 0x0024, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb7}, + {value: 0x0024, lo: 0xb8, hi: 0xb9}, + // Block 0x5c, offset 0x28d + {value: 0x0012, lo: 0x80, hi: 0xab}, + {value: 0x0015, lo: 0xac, hi: 0xbf}, + // Block 0x5d, offset 0x28f + {value: 0x0015, lo: 0x80, hi: 0xaa}, + {value: 0x0012, lo: 0xab, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb8}, + {value: 0x8452, lo: 0xb9, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbc}, + {value: 0x8852, lo: 0xbd, hi: 0xbd}, + {value: 0x0012, lo: 0xbe, hi: 0xbf}, + // Block 0x5e, offset 0x296 + {value: 0x0012, lo: 0x80, hi: 0x9a}, + {value: 0x0015, lo: 0x9b, hi: 0xbf}, + // Block 0x5f, offset 0x298 + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0024, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb9}, + {value: 0x0024, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbd}, + {value: 0x0024, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x60, offset 0x2a4 + {value: 0x0117, lo: 0x80, hi: 0xbf}, + // Block 0x61, offset 0x2a5 + {value: 0x0117, lo: 0x80, hi: 0x95}, + {value: 0x1f1a, lo: 0x96, hi: 0x96}, + {value: 0x1fca, lo: 0x97, hi: 0x97}, + {value: 0x207a, lo: 0x98, hi: 0x98}, + {value: 0x212a, lo: 0x99, hi: 0x99}, + {value: 0x21da, lo: 0x9a, hi: 0x9a}, + {value: 0x228a, lo: 0x9b, hi: 0x9b}, + {value: 0x0012, lo: 0x9c, hi: 0x9d}, + {value: 0x233b, lo: 0x9e, hi: 0x9e}, + {value: 0x0012, lo: 0x9f, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x62, offset 0x2b0 + {value: 0x0812, lo: 0x80, hi: 0x87}, + {value: 0x0813, lo: 0x88, hi: 0x8f}, + {value: 0x0812, lo: 0x90, hi: 0x95}, + {value: 0x0813, lo: 0x98, hi: 0x9d}, + {value: 0x0812, lo: 0xa0, hi: 0xa7}, + {value: 0x0813, lo: 0xa8, hi: 0xaf}, + {value: 0x0812, lo: 0xb0, hi: 0xb7}, + {value: 0x0813, lo: 0xb8, hi: 0xbf}, + // Block 0x63, offset 0x2b8 + {value: 0x0004, lo: 0x8b, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8f}, + {value: 0x0054, lo: 0x98, hi: 0x99}, + {value: 0x0054, lo: 0xa4, hi: 0xa4}, + {value: 0x0054, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xaa, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xaf}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x64, offset 0x2c0 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x94, hi: 0x94}, + {value: 0x0014, lo: 0xa0, hi: 0xa4}, + {value: 0x0014, lo: 0xa6, hi: 0xaf}, + {value: 0x0015, lo: 0xb1, hi: 0xb1}, + {value: 0x0015, lo: 0xbf, hi: 0xbf}, + // Block 0x65, offset 0x2c6 + {value: 0x0015, lo: 0x90, hi: 0x9c}, + // Block 0x66, offset 0x2c7 + {value: 0x0024, lo: 0x90, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x93}, + {value: 0x0024, lo: 0x94, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0xa0}, + {value: 0x0024, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa4}, + {value: 0x0034, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa7}, + {value: 0x0034, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xa9}, + {value: 0x0034, lo: 0xaa, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + // Block 0x67, offset 0x2d5 + {value: 0x0016, lo: 0x85, hi: 0x86}, + {value: 0x0012, lo: 0x87, hi: 0x89}, + {value: 0x9d52, lo: 0x8e, hi: 0x8e}, + {value: 0x1013, lo: 0xa0, hi: 0xaf}, + {value: 0x1012, lo: 0xb0, hi: 0xbf}, + // Block 0x68, offset 0x2da + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x88}, + // Block 0x69, offset 0x2dd + {value: 0xa053, lo: 0xb6, hi: 0xb7}, + {value: 0xa353, lo: 0xb8, hi: 0xb9}, + {value: 0xa653, lo: 0xba, hi: 0xbb}, + {value: 0xa353, lo: 0xbc, hi: 0xbd}, + {value: 0xa053, lo: 0xbe, hi: 0xbf}, + // Block 0x6a, offset 0x2e2 + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6553, lo: 0x90, hi: 0x9f}, + {value: 0xa953, lo: 0xa0, hi: 0xae}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0x6b, offset 0x2e6 + {value: 0x0117, lo: 0x80, hi: 0xa3}, + {value: 0x0012, lo: 0xa4, hi: 0xa4}, + {value: 0x0716, lo: 0xab, hi: 0xac}, + {value: 0x0316, lo: 0xad, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb3}, + // Block 0x6c, offset 0x2ec + {value: 0x6c52, lo: 0x80, hi: 0x9f}, + {value: 0x7052, lo: 0xa0, hi: 0xa5}, + {value: 0x7052, lo: 0xa7, hi: 0xa7}, + {value: 0x7052, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x6d, offset 0x2f1 + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x6e, offset 0x2f4 + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0010, lo: 0xb0, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x6f, offset 0x2f9 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9e}, + {value: 0x0024, lo: 0xa0, hi: 0xbf}, + // Block 0x70, offset 0x2fe + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + // Block 0x71, offset 0x2ff + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0xaa, hi: 0xad}, + {value: 0x0030, lo: 0xae, hi: 0xaf}, + {value: 0x0004, lo: 0xb1, hi: 0xb5}, + {value: 0x0014, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + // Block 0x72, offset 0x305 + {value: 0x0034, lo: 0x99, hi: 0x9a}, + {value: 0x0004, lo: 0x9b, hi: 0x9e}, + // Block 0x73, offset 0x307 + {value: 0x0004, lo: 0xbc, hi: 0xbe}, + // Block 0x74, offset 0x308 + {value: 0x0010, lo: 0x85, hi: 0xae}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x75, offset 0x30a + {value: 0x0010, lo: 0x80, hi: 0x8e}, + {value: 0x0010, lo: 0xa0, hi: 0xba}, + // Block 0x76, offset 0x30c + {value: 0x0010, lo: 0x80, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0x96, hi: 0xbf}, + // Block 0x77, offset 0x30f + {value: 0x0010, lo: 0x80, hi: 0x8c}, + // Block 0x78, offset 0x310 + {value: 0x0010, lo: 0x90, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x79, offset 0x312 + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0xab}, + // Block 0x7a, offset 0x315 + {value: 0x0117, lo: 0x80, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb2}, + {value: 0x0024, lo: 0xb4, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x7b, offset 0x31b + {value: 0x0117, lo: 0x80, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9d}, + {value: 0x0024, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x7c, offset 0x31f + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb1}, + // Block 0x7d, offset 0x321 + {value: 0x0004, lo: 0x80, hi: 0x96}, + {value: 0x0014, lo: 0x97, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xaf}, + {value: 0x0012, lo: 0xb0, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xbf}, + // Block 0x7e, offset 0x326 + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x0015, lo: 0xb0, hi: 0xb0}, + {value: 0x0012, lo: 0xb1, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x8453, lo: 0xbd, hi: 0xbd}, + {value: 0x0117, lo: 0xbe, hi: 0xbf}, + // Block 0x7f, offset 0x32d + {value: 0x0010, lo: 0xb7, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbf}, + // Block 0x80, offset 0x331 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8b}, + {value: 0x0010, lo: 0x8c, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + // Block 0x81, offset 0x33a + {value: 0x0010, lo: 0x80, hi: 0xb3}, + // Block 0x82, offset 0x33b + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xa0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb7}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x83, offset 0x343 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x84, offset 0x347 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0x92}, + {value: 0x0030, lo: 0x93, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0x85, offset 0x34c + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb9}, + {value: 0x0010, lo: 0xba, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x86, offset 0x354 + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0004, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x87, offset 0x35a + {value: 0x0010, lo: 0x80, hi: 0xa8}, + {value: 0x0014, lo: 0xa9, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0x88, offset 0x360 + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x89, offset 0x36a + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0024, lo: 0xbe, hi: 0xbf}, + // Block 0x8a, offset 0x36f + {value: 0x0024, lo: 0x81, hi: 0x81}, + {value: 0x0004, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + // Block 0x8b, offset 0x378 + {value: 0x0010, lo: 0x81, hi: 0x86}, + {value: 0x0010, lo: 0x89, hi: 0x8e}, + {value: 0x0010, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x8c, offset 0x37e + {value: 0x0012, lo: 0x80, hi: 0x92}, + {value: 0xac52, lo: 0x93, hi: 0x93}, + {value: 0x0012, lo: 0x94, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9f}, + {value: 0x0012, lo: 0xa0, hi: 0xa5}, + {value: 0x74d2, lo: 0xb0, hi: 0xbf}, + // Block 0x8d, offset 0x385 + {value: 0x78d2, lo: 0x80, hi: 0x8f}, + {value: 0x7cd2, lo: 0x90, hi: 0x9f}, + {value: 0x80d2, lo: 0xa0, hi: 0xaf}, + {value: 0x7cd2, lo: 0xb0, hi: 0xbf}, + // Block 0x8e, offset 0x389 + {value: 0x0010, lo: 0x80, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x8f, offset 0x391 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x90, offset 0x393 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x8b, hi: 0xbb}, + // Block 0x91, offset 0x395 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0xbf}, + // Block 0x92, offset 0x398 + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0004, lo: 0xb2, hi: 0xbf}, + // Block 0x93, offset 0x39a + {value: 0x0004, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x93, hi: 0xbf}, + // Block 0x94, offset 0x39c + {value: 0x0010, lo: 0x80, hi: 0xbd}, + // Block 0x95, offset 0x39d + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0x96, offset 0x39e + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0xbf}, + // Block 0x97, offset 0x3a0 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0xb0, hi: 0xbb}, + // Block 0x98, offset 0x3a2 + {value: 0x0014, lo: 0x80, hi: 0x8f}, + {value: 0x0054, lo: 0x93, hi: 0x93}, + {value: 0x0024, lo: 0xa0, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xad}, + {value: 0x0024, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + // Block 0x99, offset 0x3a8 + {value: 0x0010, lo: 0x8d, hi: 0x8f}, + {value: 0x0054, lo: 0x92, hi: 0x92}, + {value: 0x0054, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0xb0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0x9a, offset 0x3ad + {value: 0x0010, lo: 0x80, hi: 0xbc}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x9b, offset 0x3af + {value: 0x0054, lo: 0x87, hi: 0x87}, + {value: 0x0054, lo: 0x8e, hi: 0x8e}, + {value: 0x0054, lo: 0x9a, hi: 0x9a}, + {value: 0x5f53, lo: 0xa1, hi: 0xba}, + {value: 0x0004, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x9c, offset 0x3b5 + {value: 0x0004, lo: 0x80, hi: 0x80}, + {value: 0x5f52, lo: 0x81, hi: 0x9a}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + // Block 0x9d, offset 0x3b8 + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbe}, + // Block 0x9e, offset 0x3ba + {value: 0x0010, lo: 0x82, hi: 0x87}, + {value: 0x0010, lo: 0x8a, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0x97}, + {value: 0x0010, lo: 0x9a, hi: 0x9c}, + {value: 0x0004, lo: 0xa3, hi: 0xa3}, + {value: 0x0014, lo: 0xb9, hi: 0xbb}, + // Block 0x9f, offset 0x3c0 + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0010, lo: 0x8d, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xba}, + {value: 0x0010, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xa0, offset 0x3c5 + {value: 0x0010, lo: 0x80, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x9d}, + // Block 0xa1, offset 0x3c7 + {value: 0x0010, lo: 0x80, hi: 0xba}, + // Block 0xa2, offset 0x3c8 + {value: 0x0010, lo: 0x80, hi: 0xb4}, + // Block 0xa3, offset 0x3c9 + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + // Block 0xa4, offset 0x3ca + {value: 0x0010, lo: 0x80, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa5, offset 0x3cc + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + // Block 0xa6, offset 0x3ce + {value: 0x0010, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xad, hi: 0xbf}, + // Block 0xa7, offset 0x3d0 + {value: 0x0010, lo: 0x80, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0xb5}, + {value: 0x0024, lo: 0xb6, hi: 0xba}, + // Block 0xa8, offset 0x3d3 + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa9, offset 0x3d5 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x91, hi: 0x95}, + // Block 0xaa, offset 0x3d8 + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x97}, + {value: 0xaf53, lo: 0x98, hi: 0x9f}, + {value: 0xb253, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbf}, + // Block 0xab, offset 0x3e0 + {value: 0xaf52, lo: 0x80, hi: 0x87}, + {value: 0xb252, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0xac, offset 0x3e3 + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0xb253, lo: 0xb0, hi: 0xb7}, + {value: 0xaf53, lo: 0xb8, hi: 0xbf}, + // Block 0xad, offset 0x3e7 + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x93}, + {value: 0xb252, lo: 0x98, hi: 0x9f}, + {value: 0xaf52, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbb}, + // Block 0xae, offset 0x3ef + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xaf, offset 0x3f1 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + // Block 0xb0, offset 0x3f2 + {value: 0x0010, lo: 0x80, hi: 0xb6}, + // Block 0xb1, offset 0x3f3 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xa7}, + // Block 0xb2, offset 0x3f5 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xb5}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xb3, offset 0x3fb + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb6}, + // Block 0xb4, offset 0x3fd + {value: 0x0010, lo: 0x80, hi: 0x9e}, + // Block 0xb5, offset 0x3fe + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + // Block 0xb6, offset 0x400 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb9}, + // Block 0xb7, offset 0x402 + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0xb8, offset 0x404 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x83}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x8e, hi: 0x8e}, + {value: 0x0024, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x99, hi: 0xb3}, + {value: 0x0024, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xb9, offset 0x411 + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0xba, offset 0x412 + {value: 0x0010, lo: 0x80, hi: 0x9c}, + // Block 0xbb, offset 0x413 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + // Block 0xbc, offset 0x417 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + // Block 0xbd, offset 0x419 + {value: 0x0010, lo: 0x80, hi: 0x91}, + // Block 0xbe, offset 0x41a + {value: 0x0010, lo: 0x80, hi: 0x88}, + // Block 0xbf, offset 0x41b + {value: 0x5653, lo: 0x80, hi: 0xb2}, + // Block 0xc0, offset 0x41c + {value: 0x5652, lo: 0x80, hi: 0xb2}, + // Block 0xc1, offset 0x41d + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xc2, offset 0x421 + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xc3, offset 0x425 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb6}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + // Block 0xc4, offset 0x42b + {value: 0x0010, lo: 0x90, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xc5, offset 0x42d + {value: 0x0024, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0xc6, offset 0x434 + {value: 0x0010, lo: 0x90, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + // Block 0xc7, offset 0x437 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xc8, offset 0x43b + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + // Block 0xc9, offset 0x441 + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb4}, + {value: 0x0030, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xb7}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0xca, offset 0x44a + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xcb, offset 0x450 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa2}, + {value: 0x0014, lo: 0xa3, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xcc, offset 0x456 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xcd, offset 0x460 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0030, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9d, hi: 0xa3}, + {value: 0x0024, lo: 0xa6, hi: 0xac}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + // Block 0xce, offset 0x46a + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xcf, offset 0x46c + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd0, offset 0x473 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xd1, offset 0x479 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd2, offset 0x47f + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd3, offset 0x485 + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x98, hi: 0x9b}, + {value: 0x0014, lo: 0x9c, hi: 0x9d}, + // Block 0xd4, offset 0x488 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd5, offset 0x48e + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd6, offset 0x491 + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0014, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb5}, + {value: 0x0030, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + // Block 0xd7, offset 0x499 + {value: 0x0010, lo: 0x80, hi: 0x89}, + // Block 0xd8, offset 0x49a + {value: 0x0014, lo: 0x9d, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xd9, offset 0x4a1 + {value: 0x5f53, lo: 0xa0, hi: 0xbf}, + // Block 0xda, offset 0x4a2 + {value: 0x5f52, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xdb, offset 0x4a5 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x89, hi: 0x8a}, + {value: 0x0010, lo: 0x8b, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbb, hi: 0xbe}, + // Block 0xdc, offset 0x4af + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0014, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x98}, + {value: 0x0014, lo: 0x99, hi: 0x9b}, + {value: 0x0010, lo: 0x9c, hi: 0xbf}, + // Block 0xdd, offset 0x4b5 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x86, hi: 0x89}, + {value: 0x0014, lo: 0x8a, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x99}, + // Block 0xde, offset 0x4bb + {value: 0x0010, lo: 0x80, hi: 0xb8}, + // Block 0xdf, offset 0x4bc + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb6}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xe0, offset 0x4c2 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0xe1, offset 0x4c5 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0014, lo: 0x92, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xa9}, + {value: 0x0014, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0xe2, offset 0x4cd + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb6}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xe3, offset 0x4d4 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xe4, offset 0x4db + {value: 0x0010, lo: 0x80, hi: 0x99}, + // Block 0xe5, offset 0x4dc + {value: 0x0010, lo: 0x80, hi: 0xae}, + // Block 0xe6, offset 0x4dd + {value: 0x0010, lo: 0x80, hi: 0x83}, + // Block 0xe7, offset 0x4de + {value: 0x0010, lo: 0x80, hi: 0x86}, + // Block 0xe8, offset 0x4df + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0xe9, offset 0x4e1 + {value: 0x0010, lo: 0x90, hi: 0xad}, + {value: 0x0034, lo: 0xb0, hi: 0xb4}, + // Block 0xea, offset 0x4e3 + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb6}, + // Block 0xeb, offset 0x4e5 + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa3, hi: 0xb7}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xec, offset 0x4e9 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + // Block 0xed, offset 0x4ea + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0010, lo: 0x90, hi: 0xbe}, + // Block 0xee, offset 0x4ec + {value: 0x0014, lo: 0x8f, hi: 0x9f}, + // Block 0xef, offset 0x4ed + {value: 0x0014, lo: 0xa0, hi: 0xa1}, + // Block 0xf0, offset 0x4ee + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbc}, + // Block 0xf1, offset 0x4f0 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0034, lo: 0x9e, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa3}, + // Block 0xf2, offset 0x4f5 + {value: 0x0030, lo: 0xa5, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xa9}, + {value: 0x0030, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbf}, + // Block 0xf3, offset 0x4fa + {value: 0x0034, lo: 0x80, hi: 0x82}, + {value: 0x0024, lo: 0x85, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8b}, + {value: 0x0024, lo: 0xaa, hi: 0xad}, + // Block 0xf4, offset 0x4fe + {value: 0x0024, lo: 0x82, hi: 0x84}, + // Block 0xf5, offset 0x4ff + {value: 0x0013, lo: 0x80, hi: 0x99}, + {value: 0x0012, lo: 0x9a, hi: 0xb3}, + {value: 0x0013, lo: 0xb4, hi: 0xbf}, + // Block 0xf6, offset 0x502 + {value: 0x0013, lo: 0x80, hi: 0x8d}, + {value: 0x0012, lo: 0x8e, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0xa7}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0xf7, offset 0x506 + {value: 0x0013, lo: 0x80, hi: 0x81}, + {value: 0x0012, lo: 0x82, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0x9c}, + {value: 0x0013, lo: 0x9e, hi: 0x9f}, + {value: 0x0013, lo: 0xa2, hi: 0xa2}, + {value: 0x0013, lo: 0xa5, hi: 0xa6}, + {value: 0x0013, lo: 0xa9, hi: 0xac}, + {value: 0x0013, lo: 0xae, hi: 0xb5}, + {value: 0x0012, lo: 0xb6, hi: 0xb9}, + {value: 0x0012, lo: 0xbb, hi: 0xbb}, + {value: 0x0012, lo: 0xbd, hi: 0xbf}, + // Block 0xf8, offset 0x511 + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0012, lo: 0x85, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0xf9, offset 0x515 + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0013, lo: 0x84, hi: 0x85}, + {value: 0x0013, lo: 0x87, hi: 0x8a}, + {value: 0x0013, lo: 0x8d, hi: 0x94}, + {value: 0x0013, lo: 0x96, hi: 0x9c}, + {value: 0x0012, lo: 0x9e, hi: 0xb7}, + {value: 0x0013, lo: 0xb8, hi: 0xb9}, + {value: 0x0013, lo: 0xbb, hi: 0xbe}, + // Block 0xfa, offset 0x51d + {value: 0x0013, lo: 0x80, hi: 0x84}, + {value: 0x0013, lo: 0x86, hi: 0x86}, + {value: 0x0013, lo: 0x8a, hi: 0x90}, + {value: 0x0012, lo: 0x92, hi: 0xab}, + {value: 0x0013, lo: 0xac, hi: 0xbf}, + // Block 0xfb, offset 0x522 + {value: 0x0013, lo: 0x80, hi: 0x85}, + {value: 0x0012, lo: 0x86, hi: 0x9f}, + {value: 0x0013, lo: 0xa0, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbf}, + // Block 0xfc, offset 0x526 + {value: 0x0012, lo: 0x80, hi: 0x93}, + {value: 0x0013, lo: 0x94, hi: 0xad}, + {value: 0x0012, lo: 0xae, hi: 0xbf}, + // Block 0xfd, offset 0x529 + {value: 0x0012, lo: 0x80, hi: 0x87}, + {value: 0x0013, lo: 0x88, hi: 0xa1}, + {value: 0x0012, lo: 0xa2, hi: 0xbb}, + {value: 0x0013, lo: 0xbc, hi: 0xbf}, + // Block 0xfe, offset 0x52d + {value: 0x0013, lo: 0x80, hi: 0x95}, + {value: 0x0012, lo: 0x96, hi: 0xaf}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0xff, offset 0x530 + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0012, lo: 0x8a, hi: 0xa5}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0x100, offset 0x533 + {value: 0x0013, lo: 0x80, hi: 0x80}, + {value: 0x0012, lo: 0x82, hi: 0x9a}, + {value: 0x0012, lo: 0x9c, hi: 0xa1}, + {value: 0x0013, lo: 0xa2, hi: 0xba}, + {value: 0x0012, lo: 0xbc, hi: 0xbf}, + // Block 0x101, offset 0x538 + {value: 0x0012, lo: 0x80, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0xb4}, + {value: 0x0012, lo: 0xb6, hi: 0xbf}, + // Block 0x102, offset 0x53c + {value: 0x0012, lo: 0x80, hi: 0x8e}, + {value: 0x0012, lo: 0x90, hi: 0x95}, + {value: 0x0013, lo: 0x96, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x103, offset 0x540 + {value: 0x0012, lo: 0x80, hi: 0x88}, + {value: 0x0012, lo: 0x8a, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0x104, offset 0x544 + {value: 0x0012, lo: 0x80, hi: 0x82}, + {value: 0x0012, lo: 0x84, hi: 0x89}, + {value: 0x0017, lo: 0x8a, hi: 0x8b}, + {value: 0x0010, lo: 0x8e, hi: 0xbf}, + // Block 0x105, offset 0x548 + {value: 0x0014, lo: 0x80, hi: 0xb6}, + {value: 0x0014, lo: 0xbb, hi: 0xbf}, + // Block 0x106, offset 0x54a + {value: 0x0014, lo: 0x80, hi: 0xac}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x107, offset 0x54c + {value: 0x0014, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x9b, hi: 0x9f}, + {value: 0x0014, lo: 0xa1, hi: 0xaf}, + // Block 0x108, offset 0x54f + {value: 0x0024, lo: 0x80, hi: 0x86}, + {value: 0x0024, lo: 0x88, hi: 0x98}, + {value: 0x0024, lo: 0x9b, hi: 0xa1}, + {value: 0x0024, lo: 0xa3, hi: 0xa4}, + {value: 0x0024, lo: 0xa6, hi: 0xaa}, + // Block 0x109, offset 0x554 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0034, lo: 0x90, hi: 0x96}, + // Block 0x10a, offset 0x556 + {value: 0xb552, lo: 0x80, hi: 0x81}, + {value: 0xb852, lo: 0x82, hi: 0x83}, + {value: 0x0024, lo: 0x84, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x10b, offset 0x55b + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x9f}, + {value: 0x0010, lo: 0xa1, hi: 0xa2}, + {value: 0x0010, lo: 0xa4, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb7}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + // Block 0x10c, offset 0x564 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x9b}, + {value: 0x0010, lo: 0xa1, hi: 0xa3}, + {value: 0x0010, lo: 0xa5, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xbb}, + // Block 0x10d, offset 0x569 + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x10e, offset 0x56a + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x10f, offset 0x56d + {value: 0x0013, lo: 0x80, hi: 0x89}, + // Block 0x110, offset 0x56e + {value: 0x0004, lo: 0xbb, hi: 0xbf}, + // Block 0x111, offset 0x56f + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0014, lo: 0xa0, hi: 0xbf}, + // Block 0x112, offset 0x571 + {value: 0x0014, lo: 0x80, hi: 0xbf}, + // Block 0x113, offset 0x572 + {value: 0x0014, lo: 0x80, hi: 0xaf}, +} + +// Total table size 14177 bytes (13KiB); checksum: F17D40E8 diff --git a/vendor/golang.org/x/text/cases/tables11.0.0.go b/vendor/golang.org/x/text/cases/tables11.0.0.go new file mode 100644 index 000000000..b1106b417 --- /dev/null +++ b/vendor/golang.org/x/text/cases/tables11.0.0.go @@ -0,0 +1,2317 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.13 && !go1.14 +// +build go1.13,!go1.14 + +package cases + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "11.0.0" + +var xorData string = "" + // Size: 188 bytes + "\x00\x06\x07\x00\x01?\x00\x0f\x03\x00\x0f\x12\x00\x0f\x1f\x00\x0f\x1d" + + "\x00\x01\x13\x00\x0f\x16\x00\x0f\x0b\x00\x0f3\x00\x0f7\x00\x01#\x00\x0f?" + + "\x00\x0e'\x00\x0f/\x00\x0e>\x00\x0f*\x00\x0c&\x00\x0c*\x00\x0c;\x00\x0c9" + + "\x00\x0c%\x00\x01\x08\x00\x03\x0d\x00\x03\x09\x00\x02\x06\x00\x02\x02" + + "\x00\x02\x0c\x00\x01\x00\x00\x01\x03\x00\x01\x01\x00\x01 \x00\x01\x0c" + + "\x00\x01\x10\x00\x03\x10\x00\x036 \x00\x037 \x00\x0b#\x10\x00\x0b 0\x00" + + "\x0b!\x10\x00\x0b!0\x001\x00\x00\x0b(\x04\x00\x03\x04\x1e\x00\x03\x0a" + + "\x00\x02:\x00\x02>\x00\x02,\x00\x02\x00\x00\x02\x10\x00\x01<\x00\x01&" + + "\x00\x01*\x00\x01.\x00\x010\x003 \x00\x01\x18\x00\x01(\x00\x01\x1e\x00" + + "\x01\x22" + +var exceptions string = "" + // Size: 2436 bytes + "\x00\x12\x12μΜΜ\x12\x12ssSSSs\x13\x18i̇i̇\x10\x09II\x13\x1bʼnʼNʼN\x11" + + "\x09sSS\x12\x12dždžDž\x12\x12dždžDŽ\x10\x12DŽDž\x12\x12ljljLj\x12\x12ljljLJ\x10\x12LJLj" + + "\x12\x12njnjNj\x12\x12njnjNJ\x10\x12NJNj\x13\x1bǰJ̌J̌\x12\x12dzdzDz\x12\x12dzdzDZ\x10" + + "\x12DZDz\x13\x18ⱥⱥ\x13\x18ⱦⱦ\x10\x1bⱾⱾ\x10\x1bⱿⱿ\x10\x1bⱯⱯ\x10\x1bⱭⱭ\x10" + + "\x1bⱰⱰ\x10\x1bꞫꞫ\x10\x1bꞬꞬ\x10\x1bꞍꞍ\x10\x1bꞪꞪ\x10\x1bꞮꞮ\x10\x1bⱢⱢ\x10" + + "\x1bꞭꞭ\x10\x1bⱮⱮ\x10\x1bⱤⱤ\x10\x1bꞱꞱ\x10\x1bꞲꞲ\x10\x1bꞰꞰ2\x12ιΙΙ\x166ΐ" + + "Ϊ́Ϊ́\x166ΰΫ́Ϋ́\x12\x12σΣΣ\x12\x12βΒΒ\x12\x12θΘΘ\x12\x12φΦΦ\x12" + + "\x12πΠΠ\x12\x12κΚΚ\x12\x12ρΡΡ\x12\x12εΕΕ\x14$եւԵՒԵւ\x10\x1bᲐა\x10\x1bᲑბ" + + "\x10\x1bᲒგ\x10\x1bᲓდ\x10\x1bᲔე\x10\x1bᲕვ\x10\x1bᲖზ\x10\x1bᲗთ\x10\x1bᲘი" + + "\x10\x1bᲙკ\x10\x1bᲚლ\x10\x1bᲛმ\x10\x1bᲜნ\x10\x1bᲝო\x10\x1bᲞპ\x10\x1bᲟჟ" + + "\x10\x1bᲠრ\x10\x1bᲡს\x10\x1bᲢტ\x10\x1bᲣუ\x10\x1bᲤფ\x10\x1bᲥქ\x10\x1bᲦღ" + + "\x10\x1bᲧყ\x10\x1bᲨშ\x10\x1bᲩჩ\x10\x1bᲪც\x10\x1bᲫძ\x10\x1bᲬწ\x10\x1bᲭჭ" + + "\x10\x1bᲮხ\x10\x1bᲯჯ\x10\x1bᲰჰ\x10\x1bᲱჱ\x10\x1bᲲჲ\x10\x1bᲳჳ\x10\x1bᲴჴ" + + "\x10\x1bᲵჵ\x10\x1bᲶჶ\x10\x1bᲷჷ\x10\x1bᲸჸ\x10\x1bᲹჹ\x10\x1bᲺჺ\x10\x1bᲽჽ" + + "\x10\x1bᲾჾ\x10\x1bᲿჿ\x12\x12вВВ\x12\x12дДД\x12\x12оОО\x12\x12сСС\x12\x12" + + "тТТ\x12\x12тТТ\x12\x12ъЪЪ\x12\x12ѣѢѢ\x13\x1bꙋꙊꙊ\x13\x1bẖH̱H̱\x13\x1bẗ" + + "T̈T̈\x13\x1bẘW̊W̊\x13\x1bẙY̊Y̊\x13\x1baʾAʾAʾ\x13\x1bṡṠṠ\x12\x10ssß\x14" + + "$ὐΥ̓Υ̓\x166ὒΥ̓̀Υ̓̀\x166ὔΥ̓́Υ̓́\x166ὖΥ̓͂Υ̓͂\x15+ἀιἈΙᾈ\x15+ἁιἉΙᾉ" + + "\x15+ἂιἊΙᾊ\x15+ἃιἋΙᾋ\x15+ἄιἌΙᾌ\x15+ἅιἍΙᾍ\x15+ἆιἎΙᾎ\x15+ἇιἏΙᾏ\x15\x1dἀιᾀἈ" + + "Ι\x15\x1dἁιᾁἉΙ\x15\x1dἂιᾂἊΙ\x15\x1dἃιᾃἋΙ\x15\x1dἄιᾄἌΙ\x15\x1dἅιᾅἍΙ\x15" + + "\x1dἆιᾆἎΙ\x15\x1dἇιᾇἏΙ\x15+ἠιἨΙᾘ\x15+ἡιἩΙᾙ\x15+ἢιἪΙᾚ\x15+ἣιἫΙᾛ\x15+ἤιἬΙᾜ" + + "\x15+ἥιἭΙᾝ\x15+ἦιἮΙᾞ\x15+ἧιἯΙᾟ\x15\x1dἠιᾐἨΙ\x15\x1dἡιᾑἩΙ\x15\x1dἢιᾒἪΙ" + + "\x15\x1dἣιᾓἫΙ\x15\x1dἤιᾔἬΙ\x15\x1dἥιᾕἭΙ\x15\x1dἦιᾖἮΙ\x15\x1dἧιᾗἯΙ\x15+ὠι" + + "ὨΙᾨ\x15+ὡιὩΙᾩ\x15+ὢιὪΙᾪ\x15+ὣιὫΙᾫ\x15+ὤιὬΙᾬ\x15+ὥιὭΙᾭ\x15+ὦιὮΙᾮ\x15+ὧι" + + "ὯΙᾯ\x15\x1dὠιᾠὨΙ\x15\x1dὡιᾡὩΙ\x15\x1dὢιᾢὪΙ\x15\x1dὣιᾣὫΙ\x15\x1dὤιᾤὬΙ" + + "\x15\x1dὥιᾥὭΙ\x15\x1dὦιᾦὮΙ\x15\x1dὧιᾧὯΙ\x15-ὰιᾺΙᾺͅ\x14#αιΑΙᾼ\x14$άιΆΙΆͅ" + + "\x14$ᾶΑ͂Α͂\x166ᾶιΑ͂Ιᾼ͂\x14\x1cαιᾳΑΙ\x12\x12ιΙΙ\x15-ὴιῊΙῊͅ\x14#ηιΗΙῌ" + + "\x14$ήιΉΙΉͅ\x14$ῆΗ͂Η͂\x166ῆιΗ͂Ιῌ͂\x14\x1cηιῃΗΙ\x166ῒΪ̀Ϊ̀\x166ΐΙ" + + "̈́Ϊ́\x14$ῖΙ͂Ι͂\x166ῗΪ͂Ϊ͂\x166ῢΫ̀Ϋ̀\x166ΰΫ́Ϋ́\x14$ῤΡ̓Ρ̓" + + "\x14$ῦΥ͂Υ͂\x166ῧΫ͂Ϋ͂\x15-ὼιῺΙῺͅ\x14#ωιΩΙῼ\x14$ώιΏΙΏͅ\x14$ῶΩ͂Ω͂\x16" + + "6ῶιΩ͂Ιῼ͂\x14\x1cωιῳΩΙ\x12\x10ωω\x11\x08kk\x12\x10åå\x12\x10ɫɫ\x12\x10ɽ" + + "ɽ\x10\x12ȺȺ\x10\x12ȾȾ\x12\x10ɑɑ\x12\x10ɱɱ\x12\x10ɐɐ\x12\x10ɒɒ\x12\x10ȿȿ" + + "\x12\x10ɀɀ\x12\x10ɥɥ\x12\x10ɦɦ\x12\x10ɜɜ\x12\x10ɡɡ\x12\x10ɬɬ\x12\x10ɪɪ" + + "\x12\x10ʞʞ\x12\x10ʇʇ\x12\x10ʝʝ\x12\x12ffFFFf\x12\x12fiFIFi\x12\x12flFLFl" + + "\x13\x1bffiFFIFfi\x13\x1bfflFFLFfl\x12\x12stSTSt\x12\x12stSTSt\x14$մնՄՆՄ" + + "ն\x14$մեՄԵՄե\x14$միՄԻՄի\x14$վնՎՆՎն\x14$մխՄԽՄխ" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// caseTrie. Total size: 12250 bytes (11.96 KiB). Checksum: 53ff6cb7321675e1. +type caseTrie struct{} + +func newCaseTrie(i int) *caseTrie { + return &caseTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *caseTrie) lookupValue(n uint32, b byte) uint16 { + switch { + case n < 20: + return uint16(caseValues[n<<6+uint32(b)]) + default: + n -= 20 + return uint16(sparse.lookup(n, b)) + } +} + +// caseValues: 22 blocks, 1408 entries, 2816 bytes +// The third block is the zero block. +var caseValues = [1408]uint16{ + // Block 0x0, offset 0x0 + 0x27: 0x0054, + 0x2e: 0x0054, + 0x30: 0x0010, 0x31: 0x0010, 0x32: 0x0010, 0x33: 0x0010, 0x34: 0x0010, 0x35: 0x0010, + 0x36: 0x0010, 0x37: 0x0010, 0x38: 0x0010, 0x39: 0x0010, 0x3a: 0x0054, + // Block 0x1, offset 0x40 + 0x41: 0x2013, 0x42: 0x2013, 0x43: 0x2013, 0x44: 0x2013, 0x45: 0x2013, + 0x46: 0x2013, 0x47: 0x2013, 0x48: 0x2013, 0x49: 0x2013, 0x4a: 0x2013, 0x4b: 0x2013, + 0x4c: 0x2013, 0x4d: 0x2013, 0x4e: 0x2013, 0x4f: 0x2013, 0x50: 0x2013, 0x51: 0x2013, + 0x52: 0x2013, 0x53: 0x2013, 0x54: 0x2013, 0x55: 0x2013, 0x56: 0x2013, 0x57: 0x2013, + 0x58: 0x2013, 0x59: 0x2013, 0x5a: 0x2013, + 0x5e: 0x0004, 0x5f: 0x0010, 0x60: 0x0004, 0x61: 0x2012, 0x62: 0x2012, 0x63: 0x2012, + 0x64: 0x2012, 0x65: 0x2012, 0x66: 0x2012, 0x67: 0x2012, 0x68: 0x2012, 0x69: 0x2012, + 0x6a: 0x2012, 0x6b: 0x2012, 0x6c: 0x2012, 0x6d: 0x2012, 0x6e: 0x2012, 0x6f: 0x2012, + 0x70: 0x2012, 0x71: 0x2012, 0x72: 0x2012, 0x73: 0x2012, 0x74: 0x2012, 0x75: 0x2012, + 0x76: 0x2012, 0x77: 0x2012, 0x78: 0x2012, 0x79: 0x2012, 0x7a: 0x2012, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc0: 0x0852, 0xc1: 0x0b53, 0xc2: 0x0113, 0xc3: 0x0112, 0xc4: 0x0113, 0xc5: 0x0112, + 0xc6: 0x0b53, 0xc7: 0x0f13, 0xc8: 0x0f12, 0xc9: 0x0e53, 0xca: 0x1153, 0xcb: 0x0713, + 0xcc: 0x0712, 0xcd: 0x0012, 0xce: 0x1453, 0xcf: 0x1753, 0xd0: 0x1a53, 0xd1: 0x0313, + 0xd2: 0x0312, 0xd3: 0x1d53, 0xd4: 0x2053, 0xd5: 0x2352, 0xd6: 0x2653, 0xd7: 0x2653, + 0xd8: 0x0113, 0xd9: 0x0112, 0xda: 0x2952, 0xdb: 0x0012, 0xdc: 0x1d53, 0xdd: 0x2c53, + 0xde: 0x2f52, 0xdf: 0x3253, 0xe0: 0x0113, 0xe1: 0x0112, 0xe2: 0x0113, 0xe3: 0x0112, + 0xe4: 0x0113, 0xe5: 0x0112, 0xe6: 0x3553, 0xe7: 0x0f13, 0xe8: 0x0f12, 0xe9: 0x3853, + 0xea: 0x0012, 0xeb: 0x0012, 0xec: 0x0113, 0xed: 0x0112, 0xee: 0x3553, 0xef: 0x1f13, + 0xf0: 0x1f12, 0xf1: 0x3b53, 0xf2: 0x3e53, 0xf3: 0x0713, 0xf4: 0x0712, 0xf5: 0x0313, + 0xf6: 0x0312, 0xf7: 0x4153, 0xf8: 0x0113, 0xf9: 0x0112, 0xfa: 0x0012, 0xfb: 0x0010, + 0xfc: 0x0113, 0xfd: 0x0112, 0xfe: 0x0012, 0xff: 0x4452, + // Block 0x4, offset 0x100 + 0x100: 0x0010, 0x101: 0x0010, 0x102: 0x0010, 0x103: 0x0010, 0x104: 0x02db, 0x105: 0x0359, + 0x106: 0x03da, 0x107: 0x043b, 0x108: 0x04b9, 0x109: 0x053a, 0x10a: 0x059b, 0x10b: 0x0619, + 0x10c: 0x069a, 0x10d: 0x0313, 0x10e: 0x0312, 0x10f: 0x1f13, 0x110: 0x1f12, 0x111: 0x0313, + 0x112: 0x0312, 0x113: 0x0713, 0x114: 0x0712, 0x115: 0x0313, 0x116: 0x0312, 0x117: 0x0f13, + 0x118: 0x0f12, 0x119: 0x0313, 0x11a: 0x0312, 0x11b: 0x0713, 0x11c: 0x0712, 0x11d: 0x1452, + 0x11e: 0x0113, 0x11f: 0x0112, 0x120: 0x0113, 0x121: 0x0112, 0x122: 0x0113, 0x123: 0x0112, + 0x124: 0x0113, 0x125: 0x0112, 0x126: 0x0113, 0x127: 0x0112, 0x128: 0x0113, 0x129: 0x0112, + 0x12a: 0x0113, 0x12b: 0x0112, 0x12c: 0x0113, 0x12d: 0x0112, 0x12e: 0x0113, 0x12f: 0x0112, + 0x130: 0x06fa, 0x131: 0x07ab, 0x132: 0x0829, 0x133: 0x08aa, 0x134: 0x0113, 0x135: 0x0112, + 0x136: 0x2353, 0x137: 0x4453, 0x138: 0x0113, 0x139: 0x0112, 0x13a: 0x0113, 0x13b: 0x0112, + 0x13c: 0x0113, 0x13d: 0x0112, 0x13e: 0x0113, 0x13f: 0x0112, + // Block 0x5, offset 0x140 + 0x140: 0x0a8a, 0x141: 0x0313, 0x142: 0x0312, 0x143: 0x0853, 0x144: 0x4753, 0x145: 0x4a53, + 0x146: 0x0113, 0x147: 0x0112, 0x148: 0x0113, 0x149: 0x0112, 0x14a: 0x0113, 0x14b: 0x0112, + 0x14c: 0x0113, 0x14d: 0x0112, 0x14e: 0x0113, 0x14f: 0x0112, 0x150: 0x0b0a, 0x151: 0x0b8a, + 0x152: 0x0c0a, 0x153: 0x0b52, 0x154: 0x0b52, 0x155: 0x0012, 0x156: 0x0e52, 0x157: 0x1152, + 0x158: 0x0012, 0x159: 0x1752, 0x15a: 0x0012, 0x15b: 0x1a52, 0x15c: 0x0c8a, 0x15d: 0x0012, + 0x15e: 0x0012, 0x15f: 0x0012, 0x160: 0x1d52, 0x161: 0x0d0a, 0x162: 0x0012, 0x163: 0x2052, + 0x164: 0x0012, 0x165: 0x0d8a, 0x166: 0x0e0a, 0x167: 0x0012, 0x168: 0x2652, 0x169: 0x2652, + 0x16a: 0x0e8a, 0x16b: 0x0f0a, 0x16c: 0x0f8a, 0x16d: 0x0012, 0x16e: 0x0012, 0x16f: 0x1d52, + 0x170: 0x0012, 0x171: 0x100a, 0x172: 0x2c52, 0x173: 0x0012, 0x174: 0x0012, 0x175: 0x3252, + 0x176: 0x0012, 0x177: 0x0012, 0x178: 0x0012, 0x179: 0x0012, 0x17a: 0x0012, 0x17b: 0x0012, + 0x17c: 0x0012, 0x17d: 0x108a, 0x17e: 0x0012, 0x17f: 0x0012, + // Block 0x6, offset 0x180 + 0x180: 0x3552, 0x181: 0x0012, 0x182: 0x0012, 0x183: 0x3852, 0x184: 0x0012, 0x185: 0x0012, + 0x186: 0x0012, 0x187: 0x110a, 0x188: 0x3552, 0x189: 0x4752, 0x18a: 0x3b52, 0x18b: 0x3e52, + 0x18c: 0x4a52, 0x18d: 0x0012, 0x18e: 0x0012, 0x18f: 0x0012, 0x190: 0x0012, 0x191: 0x0012, + 0x192: 0x4152, 0x193: 0x0012, 0x194: 0x0010, 0x195: 0x0012, 0x196: 0x0012, 0x197: 0x0012, + 0x198: 0x0012, 0x199: 0x0012, 0x19a: 0x0012, 0x19b: 0x0012, 0x19c: 0x0012, 0x19d: 0x118a, + 0x19e: 0x120a, 0x19f: 0x0012, 0x1a0: 0x0012, 0x1a1: 0x0012, 0x1a2: 0x0012, 0x1a3: 0x0012, + 0x1a4: 0x0012, 0x1a5: 0x0012, 0x1a6: 0x0012, 0x1a7: 0x0012, 0x1a8: 0x0012, 0x1a9: 0x0012, + 0x1aa: 0x0012, 0x1ab: 0x0012, 0x1ac: 0x0012, 0x1ad: 0x0012, 0x1ae: 0x0012, 0x1af: 0x0012, + 0x1b0: 0x0015, 0x1b1: 0x0015, 0x1b2: 0x0015, 0x1b3: 0x0015, 0x1b4: 0x0015, 0x1b5: 0x0015, + 0x1b6: 0x0015, 0x1b7: 0x0015, 0x1b8: 0x0015, 0x1b9: 0x0014, 0x1ba: 0x0014, 0x1bb: 0x0014, + 0x1bc: 0x0014, 0x1bd: 0x0014, 0x1be: 0x0014, 0x1bf: 0x0014, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0024, 0x1c1: 0x0024, 0x1c2: 0x0024, 0x1c3: 0x0024, 0x1c4: 0x0024, 0x1c5: 0x128d, + 0x1c6: 0x0024, 0x1c7: 0x0034, 0x1c8: 0x0034, 0x1c9: 0x0034, 0x1ca: 0x0024, 0x1cb: 0x0024, + 0x1cc: 0x0024, 0x1cd: 0x0034, 0x1ce: 0x0034, 0x1cf: 0x0014, 0x1d0: 0x0024, 0x1d1: 0x0024, + 0x1d2: 0x0024, 0x1d3: 0x0034, 0x1d4: 0x0034, 0x1d5: 0x0034, 0x1d6: 0x0034, 0x1d7: 0x0024, + 0x1d8: 0x0034, 0x1d9: 0x0034, 0x1da: 0x0034, 0x1db: 0x0024, 0x1dc: 0x0034, 0x1dd: 0x0034, + 0x1de: 0x0034, 0x1df: 0x0034, 0x1e0: 0x0034, 0x1e1: 0x0034, 0x1e2: 0x0034, 0x1e3: 0x0024, + 0x1e4: 0x0024, 0x1e5: 0x0024, 0x1e6: 0x0024, 0x1e7: 0x0024, 0x1e8: 0x0024, 0x1e9: 0x0024, + 0x1ea: 0x0024, 0x1eb: 0x0024, 0x1ec: 0x0024, 0x1ed: 0x0024, 0x1ee: 0x0024, 0x1ef: 0x0024, + 0x1f0: 0x0113, 0x1f1: 0x0112, 0x1f2: 0x0113, 0x1f3: 0x0112, 0x1f4: 0x0014, 0x1f5: 0x0004, + 0x1f6: 0x0113, 0x1f7: 0x0112, 0x1fa: 0x0015, 0x1fb: 0x4d52, + 0x1fc: 0x5052, 0x1fd: 0x5052, 0x1ff: 0x5353, + // Block 0x8, offset 0x200 + 0x204: 0x0004, 0x205: 0x0004, + 0x206: 0x2a13, 0x207: 0x0054, 0x208: 0x2513, 0x209: 0x2713, 0x20a: 0x2513, + 0x20c: 0x5653, 0x20e: 0x5953, 0x20f: 0x5c53, 0x210: 0x130a, 0x211: 0x2013, + 0x212: 0x2013, 0x213: 0x2013, 0x214: 0x2013, 0x215: 0x2013, 0x216: 0x2013, 0x217: 0x2013, + 0x218: 0x2013, 0x219: 0x2013, 0x21a: 0x2013, 0x21b: 0x2013, 0x21c: 0x2013, 0x21d: 0x2013, + 0x21e: 0x2013, 0x21f: 0x2013, 0x220: 0x5f53, 0x221: 0x5f53, 0x223: 0x5f53, + 0x224: 0x5f53, 0x225: 0x5f53, 0x226: 0x5f53, 0x227: 0x5f53, 0x228: 0x5f53, 0x229: 0x5f53, + 0x22a: 0x5f53, 0x22b: 0x5f53, 0x22c: 0x2a12, 0x22d: 0x2512, 0x22e: 0x2712, 0x22f: 0x2512, + 0x230: 0x144a, 0x231: 0x2012, 0x232: 0x2012, 0x233: 0x2012, 0x234: 0x2012, 0x235: 0x2012, + 0x236: 0x2012, 0x237: 0x2012, 0x238: 0x2012, 0x239: 0x2012, 0x23a: 0x2012, 0x23b: 0x2012, + 0x23c: 0x2012, 0x23d: 0x2012, 0x23e: 0x2012, 0x23f: 0x2012, + // Block 0x9, offset 0x240 + 0x240: 0x5f52, 0x241: 0x5f52, 0x242: 0x158a, 0x243: 0x5f52, 0x244: 0x5f52, 0x245: 0x5f52, + 0x246: 0x5f52, 0x247: 0x5f52, 0x248: 0x5f52, 0x249: 0x5f52, 0x24a: 0x5f52, 0x24b: 0x5f52, + 0x24c: 0x5652, 0x24d: 0x5952, 0x24e: 0x5c52, 0x24f: 0x1813, 0x250: 0x160a, 0x251: 0x168a, + 0x252: 0x0013, 0x253: 0x0013, 0x254: 0x0013, 0x255: 0x170a, 0x256: 0x178a, 0x257: 0x1812, + 0x258: 0x0113, 0x259: 0x0112, 0x25a: 0x0113, 0x25b: 0x0112, 0x25c: 0x0113, 0x25d: 0x0112, + 0x25e: 0x0113, 0x25f: 0x0112, 0x260: 0x0113, 0x261: 0x0112, 0x262: 0x0113, 0x263: 0x0112, + 0x264: 0x0113, 0x265: 0x0112, 0x266: 0x0113, 0x267: 0x0112, 0x268: 0x0113, 0x269: 0x0112, + 0x26a: 0x0113, 0x26b: 0x0112, 0x26c: 0x0113, 0x26d: 0x0112, 0x26e: 0x0113, 0x26f: 0x0112, + 0x270: 0x180a, 0x271: 0x188a, 0x272: 0x0b12, 0x273: 0x5352, 0x274: 0x6253, 0x275: 0x190a, + 0x277: 0x0f13, 0x278: 0x0f12, 0x279: 0x0b13, 0x27a: 0x0113, 0x27b: 0x0112, + 0x27c: 0x0012, 0x27d: 0x4d53, 0x27e: 0x5053, 0x27f: 0x5053, + // Block 0xa, offset 0x280 + 0x280: 0x6852, 0x281: 0x6852, 0x282: 0x6852, 0x283: 0x6852, 0x284: 0x6852, 0x285: 0x6852, + 0x286: 0x6852, 0x287: 0x198a, 0x288: 0x0012, + 0x291: 0x0034, + 0x292: 0x0024, 0x293: 0x0024, 0x294: 0x0024, 0x295: 0x0024, 0x296: 0x0034, 0x297: 0x0024, + 0x298: 0x0024, 0x299: 0x0024, 0x29a: 0x0034, 0x29b: 0x0034, 0x29c: 0x0024, 0x29d: 0x0024, + 0x29e: 0x0024, 0x29f: 0x0024, 0x2a0: 0x0024, 0x2a1: 0x0024, 0x2a2: 0x0034, 0x2a3: 0x0034, + 0x2a4: 0x0034, 0x2a5: 0x0034, 0x2a6: 0x0034, 0x2a7: 0x0034, 0x2a8: 0x0024, 0x2a9: 0x0024, + 0x2aa: 0x0034, 0x2ab: 0x0024, 0x2ac: 0x0024, 0x2ad: 0x0034, 0x2ae: 0x0034, 0x2af: 0x0024, + 0x2b0: 0x0034, 0x2b1: 0x0034, 0x2b2: 0x0034, 0x2b3: 0x0034, 0x2b4: 0x0034, 0x2b5: 0x0034, + 0x2b6: 0x0034, 0x2b7: 0x0034, 0x2b8: 0x0034, 0x2b9: 0x0034, 0x2ba: 0x0034, 0x2bb: 0x0034, + 0x2bc: 0x0034, 0x2bd: 0x0034, 0x2bf: 0x0034, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x7053, 0x2c1: 0x7053, 0x2c2: 0x7053, 0x2c3: 0x7053, 0x2c4: 0x7053, 0x2c5: 0x7053, + 0x2c7: 0x7053, + 0x2cd: 0x7053, 0x2d0: 0x1a6a, 0x2d1: 0x1aea, + 0x2d2: 0x1b6a, 0x2d3: 0x1bea, 0x2d4: 0x1c6a, 0x2d5: 0x1cea, 0x2d6: 0x1d6a, 0x2d7: 0x1dea, + 0x2d8: 0x1e6a, 0x2d9: 0x1eea, 0x2da: 0x1f6a, 0x2db: 0x1fea, 0x2dc: 0x206a, 0x2dd: 0x20ea, + 0x2de: 0x216a, 0x2df: 0x21ea, 0x2e0: 0x226a, 0x2e1: 0x22ea, 0x2e2: 0x236a, 0x2e3: 0x23ea, + 0x2e4: 0x246a, 0x2e5: 0x24ea, 0x2e6: 0x256a, 0x2e7: 0x25ea, 0x2e8: 0x266a, 0x2e9: 0x26ea, + 0x2ea: 0x276a, 0x2eb: 0x27ea, 0x2ec: 0x286a, 0x2ed: 0x28ea, 0x2ee: 0x296a, 0x2ef: 0x29ea, + 0x2f0: 0x2a6a, 0x2f1: 0x2aea, 0x2f2: 0x2b6a, 0x2f3: 0x2bea, 0x2f4: 0x2c6a, 0x2f5: 0x2cea, + 0x2f6: 0x2d6a, 0x2f7: 0x2dea, 0x2f8: 0x2e6a, 0x2f9: 0x2eea, 0x2fa: 0x2f6a, + 0x2fc: 0x0014, 0x2fd: 0x2fea, 0x2fe: 0x306a, 0x2ff: 0x30ea, + // Block 0xc, offset 0x300 + 0x300: 0x0812, 0x301: 0x0812, 0x302: 0x0812, 0x303: 0x0812, 0x304: 0x0812, 0x305: 0x0812, + 0x308: 0x0813, 0x309: 0x0813, 0x30a: 0x0813, 0x30b: 0x0813, + 0x30c: 0x0813, 0x30d: 0x0813, 0x310: 0x3a9a, 0x311: 0x0812, + 0x312: 0x3b7a, 0x313: 0x0812, 0x314: 0x3cba, 0x315: 0x0812, 0x316: 0x3dfa, 0x317: 0x0812, + 0x319: 0x0813, 0x31b: 0x0813, 0x31d: 0x0813, + 0x31f: 0x0813, 0x320: 0x0812, 0x321: 0x0812, 0x322: 0x0812, 0x323: 0x0812, + 0x324: 0x0812, 0x325: 0x0812, 0x326: 0x0812, 0x327: 0x0812, 0x328: 0x0813, 0x329: 0x0813, + 0x32a: 0x0813, 0x32b: 0x0813, 0x32c: 0x0813, 0x32d: 0x0813, 0x32e: 0x0813, 0x32f: 0x0813, + 0x330: 0x8e52, 0x331: 0x8e52, 0x332: 0x9152, 0x333: 0x9152, 0x334: 0x9452, 0x335: 0x9452, + 0x336: 0x9752, 0x337: 0x9752, 0x338: 0x9a52, 0x339: 0x9a52, 0x33a: 0x9d52, 0x33b: 0x9d52, + 0x33c: 0x4d52, 0x33d: 0x4d52, + // Block 0xd, offset 0x340 + 0x340: 0x3f3a, 0x341: 0x402a, 0x342: 0x411a, 0x343: 0x420a, 0x344: 0x42fa, 0x345: 0x43ea, + 0x346: 0x44da, 0x347: 0x45ca, 0x348: 0x46b9, 0x349: 0x47a9, 0x34a: 0x4899, 0x34b: 0x4989, + 0x34c: 0x4a79, 0x34d: 0x4b69, 0x34e: 0x4c59, 0x34f: 0x4d49, 0x350: 0x4e3a, 0x351: 0x4f2a, + 0x352: 0x501a, 0x353: 0x510a, 0x354: 0x51fa, 0x355: 0x52ea, 0x356: 0x53da, 0x357: 0x54ca, + 0x358: 0x55b9, 0x359: 0x56a9, 0x35a: 0x5799, 0x35b: 0x5889, 0x35c: 0x5979, 0x35d: 0x5a69, + 0x35e: 0x5b59, 0x35f: 0x5c49, 0x360: 0x5d3a, 0x361: 0x5e2a, 0x362: 0x5f1a, 0x363: 0x600a, + 0x364: 0x60fa, 0x365: 0x61ea, 0x366: 0x62da, 0x367: 0x63ca, 0x368: 0x64b9, 0x369: 0x65a9, + 0x36a: 0x6699, 0x36b: 0x6789, 0x36c: 0x6879, 0x36d: 0x6969, 0x36e: 0x6a59, 0x36f: 0x6b49, + 0x370: 0x0812, 0x371: 0x0812, 0x372: 0x6c3a, 0x373: 0x6d4a, 0x374: 0x6e1a, + 0x376: 0x6efa, 0x377: 0x6fda, 0x378: 0x0813, 0x379: 0x0813, 0x37a: 0x8e53, 0x37b: 0x8e53, + 0x37c: 0x7119, 0x37d: 0x0004, 0x37e: 0x71ea, 0x37f: 0x0004, + // Block 0xe, offset 0x380 + 0x380: 0x0004, 0x381: 0x0004, 0x382: 0x726a, 0x383: 0x737a, 0x384: 0x744a, + 0x386: 0x752a, 0x387: 0x760a, 0x388: 0x9153, 0x389: 0x9153, 0x38a: 0x9453, 0x38b: 0x9453, + 0x38c: 0x7749, 0x38d: 0x0004, 0x38e: 0x0004, 0x38f: 0x0004, 0x390: 0x0812, 0x391: 0x0812, + 0x392: 0x781a, 0x393: 0x795a, 0x396: 0x7a9a, 0x397: 0x7b7a, + 0x398: 0x0813, 0x399: 0x0813, 0x39a: 0x9753, 0x39b: 0x9753, 0x39d: 0x0004, + 0x39e: 0x0004, 0x39f: 0x0004, 0x3a0: 0x0812, 0x3a1: 0x0812, 0x3a2: 0x7cba, 0x3a3: 0x7dfa, + 0x3a4: 0x7f3a, 0x3a5: 0x0912, 0x3a6: 0x801a, 0x3a7: 0x80fa, 0x3a8: 0x0813, 0x3a9: 0x0813, + 0x3aa: 0x9d53, 0x3ab: 0x9d53, 0x3ac: 0x0913, 0x3ad: 0x0004, 0x3ae: 0x0004, 0x3af: 0x0004, + 0x3b2: 0x823a, 0x3b3: 0x834a, 0x3b4: 0x841a, + 0x3b6: 0x84fa, 0x3b7: 0x85da, 0x3b8: 0x9a53, 0x3b9: 0x9a53, 0x3ba: 0x4d53, 0x3bb: 0x4d53, + 0x3bc: 0x8719, 0x3bd: 0x0004, 0x3be: 0x0004, + // Block 0xf, offset 0x3c0 + 0x3c2: 0x0013, + 0x3c7: 0x0013, 0x3ca: 0x0012, 0x3cb: 0x0013, + 0x3cc: 0x0013, 0x3cd: 0x0013, 0x3ce: 0x0012, 0x3cf: 0x0012, 0x3d0: 0x0013, 0x3d1: 0x0013, + 0x3d2: 0x0013, 0x3d3: 0x0012, 0x3d5: 0x0013, + 0x3d9: 0x0013, 0x3da: 0x0013, 0x3db: 0x0013, 0x3dc: 0x0013, 0x3dd: 0x0013, + 0x3e4: 0x0013, 0x3e6: 0x87eb, 0x3e8: 0x0013, + 0x3ea: 0x884b, 0x3eb: 0x888b, 0x3ec: 0x0013, 0x3ed: 0x0013, 0x3ef: 0x0012, + 0x3f0: 0x0013, 0x3f1: 0x0013, 0x3f2: 0xa053, 0x3f3: 0x0013, 0x3f4: 0x0012, 0x3f5: 0x0010, + 0x3f6: 0x0010, 0x3f7: 0x0010, 0x3f8: 0x0010, 0x3f9: 0x0012, + 0x3fc: 0x0012, 0x3fd: 0x0012, 0x3fe: 0x0013, 0x3ff: 0x0013, + // Block 0x10, offset 0x400 + 0x400: 0x1a13, 0x401: 0x1a13, 0x402: 0x1e13, 0x403: 0x1e13, 0x404: 0x1a13, 0x405: 0x1a13, + 0x406: 0x2613, 0x407: 0x2613, 0x408: 0x2a13, 0x409: 0x2a13, 0x40a: 0x2e13, 0x40b: 0x2e13, + 0x40c: 0x2a13, 0x40d: 0x2a13, 0x40e: 0x2613, 0x40f: 0x2613, 0x410: 0xa352, 0x411: 0xa352, + 0x412: 0xa652, 0x413: 0xa652, 0x414: 0xa952, 0x415: 0xa952, 0x416: 0xa652, 0x417: 0xa652, + 0x418: 0xa352, 0x419: 0xa352, 0x41a: 0x1a12, 0x41b: 0x1a12, 0x41c: 0x1e12, 0x41d: 0x1e12, + 0x41e: 0x1a12, 0x41f: 0x1a12, 0x420: 0x2612, 0x421: 0x2612, 0x422: 0x2a12, 0x423: 0x2a12, + 0x424: 0x2e12, 0x425: 0x2e12, 0x426: 0x2a12, 0x427: 0x2a12, 0x428: 0x2612, 0x429: 0x2612, + // Block 0x11, offset 0x440 + 0x440: 0x6552, 0x441: 0x6552, 0x442: 0x6552, 0x443: 0x6552, 0x444: 0x6552, 0x445: 0x6552, + 0x446: 0x6552, 0x447: 0x6552, 0x448: 0x6552, 0x449: 0x6552, 0x44a: 0x6552, 0x44b: 0x6552, + 0x44c: 0x6552, 0x44d: 0x6552, 0x44e: 0x6552, 0x44f: 0x6552, 0x450: 0xac52, 0x451: 0xac52, + 0x452: 0xac52, 0x453: 0xac52, 0x454: 0xac52, 0x455: 0xac52, 0x456: 0xac52, 0x457: 0xac52, + 0x458: 0xac52, 0x459: 0xac52, 0x45a: 0xac52, 0x45b: 0xac52, 0x45c: 0xac52, 0x45d: 0xac52, + 0x45e: 0xac52, 0x460: 0x0113, 0x461: 0x0112, 0x462: 0x88eb, 0x463: 0x8b53, + 0x464: 0x894b, 0x465: 0x89aa, 0x466: 0x8a0a, 0x467: 0x0f13, 0x468: 0x0f12, 0x469: 0x0313, + 0x46a: 0x0312, 0x46b: 0x0713, 0x46c: 0x0712, 0x46d: 0x8a6b, 0x46e: 0x8acb, 0x46f: 0x8b2b, + 0x470: 0x8b8b, 0x471: 0x0012, 0x472: 0x0113, 0x473: 0x0112, 0x474: 0x0012, 0x475: 0x0313, + 0x476: 0x0312, 0x477: 0x0012, 0x478: 0x0012, 0x479: 0x0012, 0x47a: 0x0012, 0x47b: 0x0012, + 0x47c: 0x0015, 0x47d: 0x0015, 0x47e: 0x8beb, 0x47f: 0x8c4b, + // Block 0x12, offset 0x480 + 0x480: 0x0113, 0x481: 0x0112, 0x482: 0x0113, 0x483: 0x0112, 0x484: 0x0113, 0x485: 0x0112, + 0x486: 0x0113, 0x487: 0x0112, 0x488: 0x0014, 0x489: 0x0014, 0x48a: 0x0014, 0x48b: 0x0713, + 0x48c: 0x0712, 0x48d: 0x8cab, 0x48e: 0x0012, 0x48f: 0x0010, 0x490: 0x0113, 0x491: 0x0112, + 0x492: 0x0113, 0x493: 0x0112, 0x494: 0x0012, 0x495: 0x0012, 0x496: 0x0113, 0x497: 0x0112, + 0x498: 0x0113, 0x499: 0x0112, 0x49a: 0x0113, 0x49b: 0x0112, 0x49c: 0x0113, 0x49d: 0x0112, + 0x49e: 0x0113, 0x49f: 0x0112, 0x4a0: 0x0113, 0x4a1: 0x0112, 0x4a2: 0x0113, 0x4a3: 0x0112, + 0x4a4: 0x0113, 0x4a5: 0x0112, 0x4a6: 0x0113, 0x4a7: 0x0112, 0x4a8: 0x0113, 0x4a9: 0x0112, + 0x4aa: 0x8d0b, 0x4ab: 0x8d6b, 0x4ac: 0x8dcb, 0x4ad: 0x8e2b, 0x4ae: 0x8e8b, 0x4af: 0x0012, + 0x4b0: 0x8eeb, 0x4b1: 0x8f4b, 0x4b2: 0x8fab, 0x4b3: 0xaf53, 0x4b4: 0x0113, 0x4b5: 0x0112, + 0x4b6: 0x0113, 0x4b7: 0x0112, 0x4b8: 0x0113, 0x4b9: 0x0112, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x900a, 0x4c1: 0x908a, 0x4c2: 0x910a, 0x4c3: 0x918a, 0x4c4: 0x923a, 0x4c5: 0x92ea, + 0x4c6: 0x936a, + 0x4d3: 0x93ea, 0x4d4: 0x94ca, 0x4d5: 0x95aa, 0x4d6: 0x968a, 0x4d7: 0x976a, + 0x4dd: 0x0010, + 0x4de: 0x0034, 0x4df: 0x0010, 0x4e0: 0x0010, 0x4e1: 0x0010, 0x4e2: 0x0010, 0x4e3: 0x0010, + 0x4e4: 0x0010, 0x4e5: 0x0010, 0x4e6: 0x0010, 0x4e7: 0x0010, 0x4e8: 0x0010, + 0x4ea: 0x0010, 0x4eb: 0x0010, 0x4ec: 0x0010, 0x4ed: 0x0010, 0x4ee: 0x0010, 0x4ef: 0x0010, + 0x4f0: 0x0010, 0x4f1: 0x0010, 0x4f2: 0x0010, 0x4f3: 0x0010, 0x4f4: 0x0010, 0x4f5: 0x0010, + 0x4f6: 0x0010, 0x4f8: 0x0010, 0x4f9: 0x0010, 0x4fa: 0x0010, 0x4fb: 0x0010, + 0x4fc: 0x0010, 0x4fe: 0x0010, + // Block 0x14, offset 0x500 + 0x500: 0x2213, 0x501: 0x2213, 0x502: 0x2613, 0x503: 0x2613, 0x504: 0x2213, 0x505: 0x2213, + 0x506: 0x2e13, 0x507: 0x2e13, 0x508: 0x2213, 0x509: 0x2213, 0x50a: 0x2613, 0x50b: 0x2613, + 0x50c: 0x2213, 0x50d: 0x2213, 0x50e: 0x3e13, 0x50f: 0x3e13, 0x510: 0x2213, 0x511: 0x2213, + 0x512: 0x2613, 0x513: 0x2613, 0x514: 0x2213, 0x515: 0x2213, 0x516: 0x2e13, 0x517: 0x2e13, + 0x518: 0x2213, 0x519: 0x2213, 0x51a: 0x2613, 0x51b: 0x2613, 0x51c: 0x2213, 0x51d: 0x2213, + 0x51e: 0xb853, 0x51f: 0xb853, 0x520: 0xbb53, 0x521: 0xbb53, 0x522: 0x2212, 0x523: 0x2212, + 0x524: 0x2612, 0x525: 0x2612, 0x526: 0x2212, 0x527: 0x2212, 0x528: 0x2e12, 0x529: 0x2e12, + 0x52a: 0x2212, 0x52b: 0x2212, 0x52c: 0x2612, 0x52d: 0x2612, 0x52e: 0x2212, 0x52f: 0x2212, + 0x530: 0x3e12, 0x531: 0x3e12, 0x532: 0x2212, 0x533: 0x2212, 0x534: 0x2612, 0x535: 0x2612, + 0x536: 0x2212, 0x537: 0x2212, 0x538: 0x2e12, 0x539: 0x2e12, 0x53a: 0x2212, 0x53b: 0x2212, + 0x53c: 0x2612, 0x53d: 0x2612, 0x53e: 0x2212, 0x53f: 0x2212, + // Block 0x15, offset 0x540 + 0x542: 0x0010, + 0x547: 0x0010, 0x549: 0x0010, 0x54b: 0x0010, + 0x54d: 0x0010, 0x54e: 0x0010, 0x54f: 0x0010, 0x551: 0x0010, + 0x552: 0x0010, 0x554: 0x0010, 0x557: 0x0010, + 0x559: 0x0010, 0x55b: 0x0010, 0x55d: 0x0010, + 0x55f: 0x0010, 0x561: 0x0010, 0x562: 0x0010, + 0x564: 0x0010, 0x567: 0x0010, 0x568: 0x0010, 0x569: 0x0010, + 0x56a: 0x0010, 0x56c: 0x0010, 0x56d: 0x0010, 0x56e: 0x0010, 0x56f: 0x0010, + 0x570: 0x0010, 0x571: 0x0010, 0x572: 0x0010, 0x574: 0x0010, 0x575: 0x0010, + 0x576: 0x0010, 0x577: 0x0010, 0x579: 0x0010, 0x57a: 0x0010, 0x57b: 0x0010, + 0x57c: 0x0010, 0x57e: 0x0010, +} + +// caseIndex: 25 blocks, 1600 entries, 3200 bytes +// Block 0 is the zero block. +var caseIndex = [1600]uint16{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x14, 0xc3: 0x15, 0xc4: 0x16, 0xc5: 0x17, 0xc6: 0x01, 0xc7: 0x02, + 0xc8: 0x18, 0xc9: 0x03, 0xca: 0x04, 0xcb: 0x19, 0xcc: 0x1a, 0xcd: 0x05, 0xce: 0x06, 0xcf: 0x07, + 0xd0: 0x1b, 0xd1: 0x1c, 0xd2: 0x1d, 0xd3: 0x1e, 0xd4: 0x1f, 0xd5: 0x20, 0xd6: 0x08, 0xd7: 0x21, + 0xd8: 0x22, 0xd9: 0x23, 0xda: 0x24, 0xdb: 0x25, 0xdc: 0x26, 0xdd: 0x27, 0xde: 0x28, 0xdf: 0x29, + 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05, + 0xea: 0x06, 0xeb: 0x07, 0xec: 0x07, 0xed: 0x08, 0xef: 0x09, + 0xf0: 0x14, 0xf3: 0x16, + // Block 0x4, offset 0x100 + 0x120: 0x2a, 0x121: 0x2b, 0x122: 0x2c, 0x123: 0x2d, 0x124: 0x2e, 0x125: 0x2f, 0x126: 0x30, 0x127: 0x31, + 0x128: 0x32, 0x129: 0x33, 0x12a: 0x34, 0x12b: 0x35, 0x12c: 0x36, 0x12d: 0x37, 0x12e: 0x38, 0x12f: 0x39, + 0x130: 0x3a, 0x131: 0x3b, 0x132: 0x3c, 0x133: 0x3d, 0x134: 0x3e, 0x135: 0x3f, 0x136: 0x40, 0x137: 0x41, + 0x138: 0x42, 0x139: 0x43, 0x13a: 0x44, 0x13b: 0x45, 0x13c: 0x46, 0x13d: 0x47, 0x13e: 0x48, 0x13f: 0x49, + // Block 0x5, offset 0x140 + 0x140: 0x4a, 0x141: 0x4b, 0x142: 0x4c, 0x143: 0x09, 0x144: 0x24, 0x145: 0x24, 0x146: 0x24, 0x147: 0x24, + 0x148: 0x24, 0x149: 0x4d, 0x14a: 0x4e, 0x14b: 0x4f, 0x14c: 0x50, 0x14d: 0x51, 0x14e: 0x52, 0x14f: 0x53, + 0x150: 0x54, 0x151: 0x24, 0x152: 0x24, 0x153: 0x24, 0x154: 0x24, 0x155: 0x24, 0x156: 0x24, 0x157: 0x24, + 0x158: 0x24, 0x159: 0x55, 0x15a: 0x56, 0x15b: 0x57, 0x15c: 0x58, 0x15d: 0x59, 0x15e: 0x5a, 0x15f: 0x5b, + 0x160: 0x5c, 0x161: 0x5d, 0x162: 0x5e, 0x163: 0x5f, 0x164: 0x60, 0x165: 0x61, 0x167: 0x62, + 0x168: 0x63, 0x169: 0x64, 0x16a: 0x65, 0x16c: 0x66, 0x16d: 0x67, 0x16e: 0x68, 0x16f: 0x69, + 0x170: 0x6a, 0x171: 0x6b, 0x172: 0x6c, 0x173: 0x6d, 0x174: 0x6e, 0x175: 0x6f, 0x176: 0x70, 0x177: 0x71, + 0x178: 0x72, 0x179: 0x72, 0x17a: 0x73, 0x17b: 0x72, 0x17c: 0x74, 0x17d: 0x0a, 0x17e: 0x0b, 0x17f: 0x0c, + // Block 0x6, offset 0x180 + 0x180: 0x75, 0x181: 0x76, 0x182: 0x77, 0x183: 0x78, 0x184: 0x0d, 0x185: 0x79, 0x186: 0x7a, + 0x192: 0x7b, 0x193: 0x0e, + 0x1b0: 0x7c, 0x1b1: 0x0f, 0x1b2: 0x72, 0x1b3: 0x7d, 0x1b4: 0x7e, 0x1b5: 0x7f, 0x1b6: 0x80, 0x1b7: 0x81, + 0x1b8: 0x82, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x83, 0x1c2: 0x84, 0x1c3: 0x85, 0x1c4: 0x86, 0x1c5: 0x24, 0x1c6: 0x87, + // Block 0x8, offset 0x200 + 0x200: 0x88, 0x201: 0x24, 0x202: 0x24, 0x203: 0x24, 0x204: 0x24, 0x205: 0x24, 0x206: 0x24, 0x207: 0x24, + 0x208: 0x24, 0x209: 0x24, 0x20a: 0x24, 0x20b: 0x24, 0x20c: 0x24, 0x20d: 0x24, 0x20e: 0x24, 0x20f: 0x24, + 0x210: 0x24, 0x211: 0x24, 0x212: 0x89, 0x213: 0x8a, 0x214: 0x24, 0x215: 0x24, 0x216: 0x24, 0x217: 0x24, + 0x218: 0x8b, 0x219: 0x8c, 0x21a: 0x8d, 0x21b: 0x8e, 0x21c: 0x8f, 0x21d: 0x90, 0x21e: 0x10, 0x21f: 0x91, + 0x220: 0x92, 0x221: 0x93, 0x222: 0x24, 0x223: 0x94, 0x224: 0x95, 0x225: 0x96, 0x226: 0x97, 0x227: 0x98, + 0x228: 0x99, 0x229: 0x9a, 0x22a: 0x9b, 0x22b: 0x9c, 0x22c: 0x9d, 0x22d: 0x9e, 0x22e: 0x9f, 0x22f: 0xa0, + 0x230: 0x24, 0x231: 0x24, 0x232: 0x24, 0x233: 0x24, 0x234: 0x24, 0x235: 0x24, 0x236: 0x24, 0x237: 0x24, + 0x238: 0x24, 0x239: 0x24, 0x23a: 0x24, 0x23b: 0x24, 0x23c: 0x24, 0x23d: 0x24, 0x23e: 0x24, 0x23f: 0x24, + // Block 0x9, offset 0x240 + 0x240: 0x24, 0x241: 0x24, 0x242: 0x24, 0x243: 0x24, 0x244: 0x24, 0x245: 0x24, 0x246: 0x24, 0x247: 0x24, + 0x248: 0x24, 0x249: 0x24, 0x24a: 0x24, 0x24b: 0x24, 0x24c: 0x24, 0x24d: 0x24, 0x24e: 0x24, 0x24f: 0x24, + 0x250: 0x24, 0x251: 0x24, 0x252: 0x24, 0x253: 0x24, 0x254: 0x24, 0x255: 0x24, 0x256: 0x24, 0x257: 0x24, + 0x258: 0x24, 0x259: 0x24, 0x25a: 0x24, 0x25b: 0x24, 0x25c: 0x24, 0x25d: 0x24, 0x25e: 0x24, 0x25f: 0x24, + 0x260: 0x24, 0x261: 0x24, 0x262: 0x24, 0x263: 0x24, 0x264: 0x24, 0x265: 0x24, 0x266: 0x24, 0x267: 0x24, + 0x268: 0x24, 0x269: 0x24, 0x26a: 0x24, 0x26b: 0x24, 0x26c: 0x24, 0x26d: 0x24, 0x26e: 0x24, 0x26f: 0x24, + 0x270: 0x24, 0x271: 0x24, 0x272: 0x24, 0x273: 0x24, 0x274: 0x24, 0x275: 0x24, 0x276: 0x24, 0x277: 0x24, + 0x278: 0x24, 0x279: 0x24, 0x27a: 0x24, 0x27b: 0x24, 0x27c: 0x24, 0x27d: 0x24, 0x27e: 0x24, 0x27f: 0x24, + // Block 0xa, offset 0x280 + 0x280: 0x24, 0x281: 0x24, 0x282: 0x24, 0x283: 0x24, 0x284: 0x24, 0x285: 0x24, 0x286: 0x24, 0x287: 0x24, + 0x288: 0x24, 0x289: 0x24, 0x28a: 0x24, 0x28b: 0x24, 0x28c: 0x24, 0x28d: 0x24, 0x28e: 0x24, 0x28f: 0x24, + 0x290: 0x24, 0x291: 0x24, 0x292: 0x24, 0x293: 0x24, 0x294: 0x24, 0x295: 0x24, 0x296: 0x24, 0x297: 0x24, + 0x298: 0x24, 0x299: 0x24, 0x29a: 0x24, 0x29b: 0x24, 0x29c: 0x24, 0x29d: 0x24, 0x29e: 0xa1, 0x29f: 0xa2, + // Block 0xb, offset 0x2c0 + 0x2ec: 0x11, 0x2ed: 0xa3, 0x2ee: 0xa4, 0x2ef: 0xa5, + 0x2f0: 0x24, 0x2f1: 0x24, 0x2f2: 0x24, 0x2f3: 0x24, 0x2f4: 0xa6, 0x2f5: 0xa7, 0x2f6: 0xa8, 0x2f7: 0xa9, + 0x2f8: 0xaa, 0x2f9: 0xab, 0x2fa: 0x24, 0x2fb: 0xac, 0x2fc: 0xad, 0x2fd: 0xae, 0x2fe: 0xaf, 0x2ff: 0xb0, + // Block 0xc, offset 0x300 + 0x300: 0xb1, 0x301: 0xb2, 0x302: 0x24, 0x303: 0xb3, 0x305: 0xb4, 0x307: 0xb5, + 0x30a: 0xb6, 0x30b: 0xb7, 0x30c: 0xb8, 0x30d: 0xb9, 0x30e: 0xba, 0x30f: 0xbb, + 0x310: 0xbc, 0x311: 0xbd, 0x312: 0xbe, 0x313: 0xbf, 0x314: 0xc0, 0x315: 0xc1, + 0x318: 0x24, 0x319: 0x24, 0x31a: 0x24, 0x31b: 0x24, 0x31c: 0xc2, 0x31d: 0xc3, + 0x320: 0xc4, 0x321: 0xc5, 0x322: 0xc6, 0x323: 0xc7, 0x324: 0xc8, 0x326: 0xc9, + 0x328: 0xca, 0x329: 0xcb, 0x32a: 0xcc, 0x32b: 0xcd, 0x32c: 0x5f, 0x32d: 0xce, 0x32e: 0xcf, + 0x330: 0x24, 0x331: 0xd0, 0x332: 0xd1, 0x333: 0xd2, 0x334: 0xd3, + 0x33c: 0xd4, 0x33d: 0xd5, + // Block 0xd, offset 0x340 + 0x340: 0xd6, 0x341: 0xd7, 0x342: 0xd8, 0x343: 0xd9, 0x344: 0xda, 0x345: 0xdb, 0x346: 0xdc, 0x347: 0xdd, + 0x348: 0xde, 0x34a: 0xdf, 0x34b: 0xe0, 0x34c: 0xe1, 0x34d: 0xe2, + 0x350: 0xe3, 0x351: 0xe4, 0x352: 0xe5, 0x353: 0xe6, 0x356: 0xe7, 0x357: 0xe8, + 0x358: 0xe9, 0x359: 0xea, 0x35a: 0xeb, 0x35b: 0xec, 0x35c: 0xed, + 0x360: 0xee, 0x362: 0xef, 0x363: 0xf0, + 0x368: 0xf1, 0x369: 0xf2, 0x36a: 0xf3, 0x36b: 0xf4, + 0x370: 0xf5, 0x371: 0xf6, 0x372: 0xf7, 0x374: 0xf8, 0x375: 0xf9, 0x376: 0xfa, + 0x37b: 0xfb, + // Block 0xe, offset 0x380 + 0x380: 0x24, 0x381: 0x24, 0x382: 0x24, 0x383: 0x24, 0x384: 0x24, 0x385: 0x24, 0x386: 0x24, 0x387: 0x24, + 0x388: 0x24, 0x389: 0x24, 0x38a: 0x24, 0x38b: 0x24, 0x38c: 0x24, 0x38d: 0x24, 0x38e: 0xfc, + 0x390: 0x24, 0x391: 0xfd, 0x392: 0x24, 0x393: 0x24, 0x394: 0x24, 0x395: 0xfe, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x24, 0x3c1: 0x24, 0x3c2: 0x24, 0x3c3: 0x24, 0x3c4: 0x24, 0x3c5: 0x24, 0x3c6: 0x24, 0x3c7: 0x24, + 0x3c8: 0x24, 0x3c9: 0x24, 0x3ca: 0x24, 0x3cb: 0x24, 0x3cc: 0x24, 0x3cd: 0x24, 0x3ce: 0x24, 0x3cf: 0x24, + 0x3d0: 0xfd, + // Block 0x10, offset 0x400 + 0x410: 0x24, 0x411: 0x24, 0x412: 0x24, 0x413: 0x24, 0x414: 0x24, 0x415: 0x24, 0x416: 0x24, 0x417: 0x24, + 0x418: 0x24, 0x419: 0xff, + // Block 0x11, offset 0x440 + 0x460: 0x24, 0x461: 0x24, 0x462: 0x24, 0x463: 0x24, 0x464: 0x24, 0x465: 0x24, 0x466: 0x24, 0x467: 0x24, + 0x468: 0xf4, 0x469: 0x100, 0x46b: 0x101, 0x46c: 0x102, 0x46d: 0x103, 0x46e: 0x104, + 0x479: 0x105, 0x47c: 0x24, 0x47d: 0x106, 0x47e: 0x107, 0x47f: 0x108, + // Block 0x12, offset 0x480 + 0x4b0: 0x24, 0x4b1: 0x109, 0x4b2: 0x10a, + // Block 0x13, offset 0x4c0 + 0x4c5: 0x10b, 0x4c6: 0x10c, + 0x4c9: 0x10d, + 0x4d0: 0x10e, 0x4d1: 0x10f, 0x4d2: 0x110, 0x4d3: 0x111, 0x4d4: 0x112, 0x4d5: 0x113, 0x4d6: 0x114, 0x4d7: 0x115, + 0x4d8: 0x116, 0x4d9: 0x117, 0x4da: 0x118, 0x4db: 0x119, 0x4dc: 0x11a, 0x4dd: 0x11b, 0x4de: 0x11c, 0x4df: 0x11d, + 0x4e8: 0x11e, 0x4e9: 0x11f, 0x4ea: 0x120, + // Block 0x14, offset 0x500 + 0x500: 0x121, + 0x520: 0x24, 0x521: 0x24, 0x522: 0x24, 0x523: 0x122, 0x524: 0x12, 0x525: 0x123, + 0x538: 0x124, 0x539: 0x13, 0x53a: 0x125, + // Block 0x15, offset 0x540 + 0x544: 0x126, 0x545: 0x127, 0x546: 0x128, + 0x54f: 0x129, + // Block 0x16, offset 0x580 + 0x590: 0x0a, 0x591: 0x0b, 0x592: 0x0c, 0x593: 0x0d, 0x594: 0x0e, 0x596: 0x0f, + 0x59b: 0x10, 0x59d: 0x11, 0x59e: 0x12, 0x59f: 0x13, + // Block 0x17, offset 0x5c0 + 0x5c0: 0x12a, 0x5c1: 0x12b, 0x5c4: 0x12b, 0x5c5: 0x12b, 0x5c6: 0x12b, 0x5c7: 0x12c, + // Block 0x18, offset 0x600 + 0x620: 0x15, +} + +// sparseOffsets: 282 entries, 564 bytes +var sparseOffsets = []uint16{0x0, 0x9, 0xf, 0x18, 0x24, 0x2e, 0x35, 0x38, 0x3c, 0x3f, 0x43, 0x4d, 0x4f, 0x57, 0x5e, 0x63, 0x71, 0x72, 0x80, 0x8f, 0x99, 0x9c, 0xa3, 0xab, 0xae, 0xb0, 0xbf, 0xc5, 0xd3, 0xde, 0xeb, 0xf6, 0x102, 0x10c, 0x118, 0x123, 0x12f, 0x13b, 0x143, 0x14c, 0x156, 0x161, 0x16d, 0x174, 0x17f, 0x184, 0x18c, 0x18f, 0x194, 0x198, 0x19c, 0x1a3, 0x1ac, 0x1b4, 0x1b5, 0x1be, 0x1c5, 0x1cd, 0x1d3, 0x1d8, 0x1dc, 0x1df, 0x1e1, 0x1e4, 0x1e9, 0x1ea, 0x1ec, 0x1ee, 0x1f0, 0x1f7, 0x1fc, 0x200, 0x209, 0x20c, 0x20f, 0x215, 0x216, 0x221, 0x222, 0x223, 0x228, 0x235, 0x23d, 0x245, 0x24e, 0x257, 0x260, 0x265, 0x268, 0x273, 0x280, 0x282, 0x289, 0x28b, 0x297, 0x298, 0x2a3, 0x2ab, 0x2b3, 0x2b9, 0x2ba, 0x2c8, 0x2cd, 0x2d0, 0x2d5, 0x2d9, 0x2df, 0x2e4, 0x2e7, 0x2ec, 0x2f1, 0x2f2, 0x2f8, 0x2fa, 0x2fb, 0x2fd, 0x2ff, 0x302, 0x303, 0x305, 0x308, 0x30e, 0x312, 0x314, 0x319, 0x320, 0x324, 0x32d, 0x32e, 0x337, 0x33b, 0x340, 0x348, 0x34e, 0x354, 0x35e, 0x363, 0x36c, 0x372, 0x379, 0x37d, 0x385, 0x387, 0x389, 0x38c, 0x38e, 0x390, 0x391, 0x392, 0x394, 0x396, 0x39c, 0x3a1, 0x3a3, 0x3a9, 0x3ac, 0x3ae, 0x3b4, 0x3b9, 0x3bb, 0x3bc, 0x3bd, 0x3be, 0x3c0, 0x3c2, 0x3c4, 0x3c7, 0x3c9, 0x3cc, 0x3d4, 0x3d7, 0x3db, 0x3e3, 0x3e5, 0x3e6, 0x3e7, 0x3e9, 0x3ef, 0x3f1, 0x3f2, 0x3f4, 0x3f6, 0x3f8, 0x405, 0x406, 0x407, 0x40b, 0x40d, 0x40e, 0x40f, 0x410, 0x411, 0x414, 0x417, 0x41d, 0x421, 0x425, 0x42b, 0x42e, 0x435, 0x439, 0x43d, 0x444, 0x44d, 0x453, 0x459, 0x463, 0x46d, 0x46f, 0x477, 0x47d, 0x483, 0x489, 0x48c, 0x492, 0x495, 0x49d, 0x49e, 0x4a5, 0x4a9, 0x4aa, 0x4ad, 0x4b5, 0x4bb, 0x4c2, 0x4c3, 0x4c9, 0x4cc, 0x4d4, 0x4db, 0x4e5, 0x4ed, 0x4f0, 0x4f1, 0x4f2, 0x4f3, 0x4f4, 0x4f6, 0x4f8, 0x4fa, 0x4fe, 0x4ff, 0x501, 0x503, 0x504, 0x505, 0x507, 0x50c, 0x511, 0x515, 0x516, 0x519, 0x51d, 0x528, 0x52c, 0x534, 0x539, 0x53d, 0x540, 0x544, 0x547, 0x54a, 0x54f, 0x553, 0x557, 0x55b, 0x55f, 0x561, 0x563, 0x566, 0x56b, 0x56d, 0x572, 0x57b, 0x580, 0x581, 0x584, 0x585, 0x586, 0x588, 0x589, 0x58a} + +// sparseValues: 1418 entries, 5672 bytes +var sparseValues = [1418]valueRange{ + // Block 0x0, offset 0x0 + {value: 0x0004, lo: 0xa8, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xaa}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0004, lo: 0xaf, hi: 0xaf}, + {value: 0x0004, lo: 0xb4, hi: 0xb4}, + {value: 0x001a, lo: 0xb5, hi: 0xb5}, + {value: 0x0054, lo: 0xb7, hi: 0xb7}, + {value: 0x0004, lo: 0xb8, hi: 0xb8}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + // Block 0x1, offset 0x9 + {value: 0x2013, lo: 0x80, hi: 0x96}, + {value: 0x2013, lo: 0x98, hi: 0x9e}, + {value: 0x009a, lo: 0x9f, hi: 0x9f}, + {value: 0x2012, lo: 0xa0, hi: 0xb6}, + {value: 0x2012, lo: 0xb8, hi: 0xbe}, + {value: 0x0252, lo: 0xbf, hi: 0xbf}, + // Block 0x2, offset 0xf + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x011b, lo: 0xb0, hi: 0xb0}, + {value: 0x019a, lo: 0xb1, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb7}, + {value: 0x0012, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x0553, lo: 0xbf, hi: 0xbf}, + // Block 0x3, offset 0x18 + {value: 0x0552, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x01da, lo: 0x89, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xb7}, + {value: 0x0253, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x028a, lo: 0xbf, hi: 0xbf}, + // Block 0x4, offset 0x24 + {value: 0x0117, lo: 0x80, hi: 0x9f}, + {value: 0x2f53, lo: 0xa0, hi: 0xa0}, + {value: 0x0012, lo: 0xa1, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xb3}, + {value: 0x0012, lo: 0xb4, hi: 0xb9}, + {value: 0x090b, lo: 0xba, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x2953, lo: 0xbd, hi: 0xbd}, + {value: 0x098b, lo: 0xbe, hi: 0xbe}, + {value: 0x0a0a, lo: 0xbf, hi: 0xbf}, + // Block 0x5, offset 0x2e + {value: 0x0015, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x97}, + {value: 0x0004, lo: 0x98, hi: 0x9d}, + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0015, lo: 0xa0, hi: 0xa4}, + {value: 0x0004, lo: 0xa5, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xbf}, + // Block 0x6, offset 0x35 + {value: 0x0024, lo: 0x80, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbf}, + // Block 0x7, offset 0x38 + {value: 0x6553, lo: 0x80, hi: 0x8f}, + {value: 0x2013, lo: 0x90, hi: 0x9f}, + {value: 0x5f53, lo: 0xa0, hi: 0xaf}, + {value: 0x2012, lo: 0xb0, hi: 0xbf}, + // Block 0x8, offset 0x3c + {value: 0x5f52, lo: 0x80, hi: 0x8f}, + {value: 0x6552, lo: 0x90, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x9, offset 0x3f + {value: 0x0117, lo: 0x80, hi: 0x81}, + {value: 0x0024, lo: 0x83, hi: 0x87}, + {value: 0x0014, lo: 0x88, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xbf}, + // Block 0xa, offset 0x43 + {value: 0x0f13, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x0316, lo: 0x89, hi: 0x8a}, + {value: 0x0716, lo: 0x8b, hi: 0x8c}, + {value: 0x0316, lo: 0x8d, hi: 0x8e}, + {value: 0x0f12, lo: 0x8f, hi: 0x8f}, + {value: 0x0117, lo: 0x90, hi: 0xbf}, + // Block 0xb, offset 0x4d + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x6553, lo: 0xb1, hi: 0xbf}, + // Block 0xc, offset 0x4f + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6853, lo: 0x90, hi: 0x96}, + {value: 0x0014, lo: 0x99, hi: 0x99}, + {value: 0x0010, lo: 0x9b, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0012, lo: 0xa0, hi: 0xa0}, + {value: 0x6552, lo: 0xa1, hi: 0xaf}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0xd, offset 0x57 + {value: 0x0034, lo: 0x81, hi: 0x82}, + {value: 0x0024, lo: 0x84, hi: 0x84}, + {value: 0x0034, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0xaa}, + {value: 0x0010, lo: 0xaf, hi: 0xb3}, + {value: 0x0054, lo: 0xb4, hi: 0xb4}, + // Block 0xe, offset 0x5e + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0024, lo: 0x90, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0014, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xf, offset 0x63 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x8a}, + {value: 0x0034, lo: 0x8b, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9c}, + {value: 0x0024, lo: 0x9d, hi: 0x9e}, + {value: 0x0034, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0034, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x10, offset 0x71 + {value: 0x0010, lo: 0x80, hi: 0xbf}, + // Block 0x11, offset 0x72 + {value: 0x0010, lo: 0x80, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0024, lo: 0x9f, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xaa, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x12, offset 0x80 + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0034, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0034, lo: 0xb1, hi: 0xb1}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbd}, + {value: 0x0034, lo: 0xbe, hi: 0xbe}, + {value: 0x0024, lo: 0xbf, hi: 0xbf}, + // Block 0x13, offset 0x8f + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0024, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x88}, + {value: 0x0024, lo: 0x89, hi: 0x8a}, + {value: 0x0010, lo: 0x8d, hi: 0xbf}, + // Block 0x14, offset 0x99 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x15, offset 0x9c + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xb1}, + {value: 0x0034, lo: 0xb2, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + // Block 0x16, offset 0xa3 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x99}, + {value: 0x0014, lo: 0x9a, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0xa3}, + {value: 0x0014, lo: 0xa4, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xad}, + // Block 0x17, offset 0xab + {value: 0x0010, lo: 0x80, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x9b}, + {value: 0x0010, lo: 0xa0, hi: 0xaa}, + // Block 0x18, offset 0xae + {value: 0x0010, lo: 0xa0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbd}, + // Block 0x19, offset 0xb0 + {value: 0x0034, lo: 0x93, hi: 0x93}, + {value: 0x0024, lo: 0x94, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0024, lo: 0xaa, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbf}, + // Block 0x1a, offset 0xbf + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1b, offset 0xc5 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x98, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0x1c, offset 0xd3 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb6, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1d, offset 0xde + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xb1}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + {value: 0x0024, lo: 0xbe, hi: 0xbe}, + // Block 0x1e, offset 0xeb + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x1f, offset 0xf6 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x99, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x20, offset 0x102 + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x21, offset 0x10c + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x85}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xbf}, + // Block 0x22, offset 0x118 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x23, offset 0x123 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x24, offset 0x12f + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0010, lo: 0xa8, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x25, offset 0x13b + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x26, offset 0x143 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x83}, + {value: 0x0014, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb9}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbf}, + // Block 0x27, offset 0x14c + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x88}, + {value: 0x0014, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x28, offset 0x156 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x29, offset 0x161 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb2}, + // Block 0x2a, offset 0x16d + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x2b, offset 0x174 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x94, hi: 0x97}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xba, hi: 0xbf}, + // Block 0x2c, offset 0x17f + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x96}, + {value: 0x0010, lo: 0x9a, hi: 0xb1}, + {value: 0x0010, lo: 0xb3, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x2d, offset 0x184 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x94}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9f}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + // Block 0x2e, offset 0x18c + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xba}, + // Block 0x2f, offset 0x18f + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x30, offset 0x194 + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xb9}, + {value: 0x0014, lo: 0xbb, hi: 0xbc}, + // Block 0x31, offset 0x198 + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x32, offset 0x19c + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0034, lo: 0x98, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0034, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x33, offset 0x1a3 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xac}, + {value: 0x0034, lo: 0xb1, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xba, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x34, offset 0x1ac + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0024, lo: 0x82, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x86, hi: 0x87}, + {value: 0x0010, lo: 0x88, hi: 0x8c}, + {value: 0x0014, lo: 0x8d, hi: 0x97}, + {value: 0x0014, lo: 0x99, hi: 0xbc}, + // Block 0x35, offset 0x1b4 + {value: 0x0034, lo: 0x86, hi: 0x86}, + // Block 0x36, offset 0x1b5 + {value: 0x0010, lo: 0xab, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbe}, + // Block 0x37, offset 0x1be + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x96, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x99}, + {value: 0x0014, lo: 0x9e, hi: 0xa0}, + {value: 0x0010, lo: 0xa2, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xad}, + {value: 0x0014, lo: 0xb1, hi: 0xb4}, + // Block 0x38, offset 0x1c5 + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x6c53, lo: 0xa0, hi: 0xbf}, + // Block 0x39, offset 0x1cd + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x9a, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x3a, offset 0x1d3 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x3b, offset 0x1d8 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x82, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3c, offset 0x1dc + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3d, offset 0x1df + {value: 0x0010, lo: 0x80, hi: 0x9a}, + {value: 0x0024, lo: 0x9d, hi: 0x9f}, + // Block 0x3e, offset 0x1e1 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x7453, lo: 0xa0, hi: 0xaf}, + {value: 0x7853, lo: 0xb0, hi: 0xbf}, + // Block 0x3f, offset 0x1e4 + {value: 0x7c53, lo: 0x80, hi: 0x8f}, + {value: 0x8053, lo: 0x90, hi: 0x9f}, + {value: 0x7c53, lo: 0xa0, hi: 0xaf}, + {value: 0x0813, lo: 0xb0, hi: 0xb5}, + {value: 0x0892, lo: 0xb8, hi: 0xbd}, + // Block 0x40, offset 0x1e9 + {value: 0x0010, lo: 0x81, hi: 0xbf}, + // Block 0x41, offset 0x1ea + {value: 0x0010, lo: 0x80, hi: 0xac}, + {value: 0x0010, lo: 0xaf, hi: 0xbf}, + // Block 0x42, offset 0x1ec + {value: 0x0010, lo: 0x81, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x43, offset 0x1ee + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb8}, + // Block 0x44, offset 0x1f0 + {value: 0x0010, lo: 0x80, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0034, lo: 0x94, hi: 0x94}, + {value: 0x0010, lo: 0xa0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + // Block 0x45, offset 0x1f7 + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xac}, + {value: 0x0010, lo: 0xae, hi: 0xb0}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + // Block 0x46, offset 0x1fc + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x47, offset 0x200 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x89, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0014, lo: 0x93, hi: 0x93}, + {value: 0x0004, lo: 0x97, hi: 0x97}, + {value: 0x0024, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0x48, offset 0x209 + {value: 0x0014, lo: 0x8b, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x49, offset 0x20c + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb8}, + // Block 0x4a, offset 0x20f + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x4b, offset 0x215 + {value: 0x0010, lo: 0x80, hi: 0xb5}, + // Block 0x4c, offset 0x216 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbb}, + // Block 0x4d, offset 0x221 + {value: 0x0010, lo: 0x86, hi: 0x8f}, + // Block 0x4e, offset 0x222 + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x4f, offset 0x223 + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + // Block 0x50, offset 0x228 + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x9e}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xac}, + {value: 0x0010, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xbc}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x51, offset 0x235 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xa7, hi: 0xa7}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + {value: 0x0034, lo: 0xb5, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbc}, + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0x52, offset 0x23d + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x53, offset 0x245 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0030, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x8b}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xab, hi: 0xab}, + {value: 0x0034, lo: 0xac, hi: 0xac}, + {value: 0x0024, lo: 0xad, hi: 0xb3}, + // Block 0x54, offset 0x24e + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0030, lo: 0xaa, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbf}, + // Block 0x55, offset 0x257 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0030, lo: 0xb2, hi: 0xb3}, + // Block 0x56, offset 0x260 + {value: 0x0010, lo: 0x80, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + // Block 0x57, offset 0x265 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8d, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x58, offset 0x268 + {value: 0x316a, lo: 0x80, hi: 0x80}, + {value: 0x31ea, lo: 0x81, hi: 0x81}, + {value: 0x326a, lo: 0x82, hi: 0x82}, + {value: 0x32ea, lo: 0x83, hi: 0x83}, + {value: 0x336a, lo: 0x84, hi: 0x84}, + {value: 0x33ea, lo: 0x85, hi: 0x85}, + {value: 0x346a, lo: 0x86, hi: 0x86}, + {value: 0x34ea, lo: 0x87, hi: 0x87}, + {value: 0x356a, lo: 0x88, hi: 0x88}, + {value: 0x8353, lo: 0x90, hi: 0xba}, + {value: 0x8353, lo: 0xbd, hi: 0xbf}, + // Block 0x59, offset 0x273 + {value: 0x0024, lo: 0x90, hi: 0x92}, + {value: 0x0034, lo: 0x94, hi: 0x99}, + {value: 0x0024, lo: 0x9a, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9f}, + {value: 0x0024, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0034, lo: 0xa2, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xb3}, + {value: 0x0024, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb7}, + {value: 0x0024, lo: 0xb8, hi: 0xb9}, + // Block 0x5a, offset 0x280 + {value: 0x0012, lo: 0x80, hi: 0xab}, + {value: 0x0015, lo: 0xac, hi: 0xbf}, + // Block 0x5b, offset 0x282 + {value: 0x0015, lo: 0x80, hi: 0xaa}, + {value: 0x0012, lo: 0xab, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb8}, + {value: 0x8752, lo: 0xb9, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbc}, + {value: 0x8b52, lo: 0xbd, hi: 0xbd}, + {value: 0x0012, lo: 0xbe, hi: 0xbf}, + // Block 0x5c, offset 0x289 + {value: 0x0012, lo: 0x80, hi: 0x9a}, + {value: 0x0015, lo: 0x9b, hi: 0xbf}, + // Block 0x5d, offset 0x28b + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0024, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb9}, + {value: 0x0024, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbd}, + {value: 0x0024, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x5e, offset 0x297 + {value: 0x0117, lo: 0x80, hi: 0xbf}, + // Block 0x5f, offset 0x298 + {value: 0x0117, lo: 0x80, hi: 0x95}, + {value: 0x361a, lo: 0x96, hi: 0x96}, + {value: 0x36ca, lo: 0x97, hi: 0x97}, + {value: 0x377a, lo: 0x98, hi: 0x98}, + {value: 0x382a, lo: 0x99, hi: 0x99}, + {value: 0x38da, lo: 0x9a, hi: 0x9a}, + {value: 0x398a, lo: 0x9b, hi: 0x9b}, + {value: 0x0012, lo: 0x9c, hi: 0x9d}, + {value: 0x3a3b, lo: 0x9e, hi: 0x9e}, + {value: 0x0012, lo: 0x9f, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x60, offset 0x2a3 + {value: 0x0812, lo: 0x80, hi: 0x87}, + {value: 0x0813, lo: 0x88, hi: 0x8f}, + {value: 0x0812, lo: 0x90, hi: 0x95}, + {value: 0x0813, lo: 0x98, hi: 0x9d}, + {value: 0x0812, lo: 0xa0, hi: 0xa7}, + {value: 0x0813, lo: 0xa8, hi: 0xaf}, + {value: 0x0812, lo: 0xb0, hi: 0xb7}, + {value: 0x0813, lo: 0xb8, hi: 0xbf}, + // Block 0x61, offset 0x2ab + {value: 0x0004, lo: 0x8b, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8f}, + {value: 0x0054, lo: 0x98, hi: 0x99}, + {value: 0x0054, lo: 0xa4, hi: 0xa4}, + {value: 0x0054, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xaa, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xaf}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x62, offset 0x2b3 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x94, hi: 0x94}, + {value: 0x0014, lo: 0xa0, hi: 0xa4}, + {value: 0x0014, lo: 0xa6, hi: 0xaf}, + {value: 0x0015, lo: 0xb1, hi: 0xb1}, + {value: 0x0015, lo: 0xbf, hi: 0xbf}, + // Block 0x63, offset 0x2b9 + {value: 0x0015, lo: 0x90, hi: 0x9c}, + // Block 0x64, offset 0x2ba + {value: 0x0024, lo: 0x90, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x93}, + {value: 0x0024, lo: 0x94, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0xa0}, + {value: 0x0024, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa4}, + {value: 0x0034, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa7}, + {value: 0x0034, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xa9}, + {value: 0x0034, lo: 0xaa, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + // Block 0x65, offset 0x2c8 + {value: 0x0016, lo: 0x85, hi: 0x86}, + {value: 0x0012, lo: 0x87, hi: 0x89}, + {value: 0xa052, lo: 0x8e, hi: 0x8e}, + {value: 0x1013, lo: 0xa0, hi: 0xaf}, + {value: 0x1012, lo: 0xb0, hi: 0xbf}, + // Block 0x66, offset 0x2cd + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x88}, + // Block 0x67, offset 0x2d0 + {value: 0xa353, lo: 0xb6, hi: 0xb7}, + {value: 0xa653, lo: 0xb8, hi: 0xb9}, + {value: 0xa953, lo: 0xba, hi: 0xbb}, + {value: 0xa653, lo: 0xbc, hi: 0xbd}, + {value: 0xa353, lo: 0xbe, hi: 0xbf}, + // Block 0x68, offset 0x2d5 + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6553, lo: 0x90, hi: 0x9f}, + {value: 0xac53, lo: 0xa0, hi: 0xae}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0x69, offset 0x2d9 + {value: 0x0117, lo: 0x80, hi: 0xa3}, + {value: 0x0012, lo: 0xa4, hi: 0xa4}, + {value: 0x0716, lo: 0xab, hi: 0xac}, + {value: 0x0316, lo: 0xad, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb3}, + // Block 0x6a, offset 0x2df + {value: 0x6c52, lo: 0x80, hi: 0x9f}, + {value: 0x7052, lo: 0xa0, hi: 0xa5}, + {value: 0x7052, lo: 0xa7, hi: 0xa7}, + {value: 0x7052, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x6b, offset 0x2e4 + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x6c, offset 0x2e7 + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0010, lo: 0xb0, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x6d, offset 0x2ec + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9e}, + {value: 0x0024, lo: 0xa0, hi: 0xbf}, + // Block 0x6e, offset 0x2f1 + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + // Block 0x6f, offset 0x2f2 + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0xaa, hi: 0xad}, + {value: 0x0030, lo: 0xae, hi: 0xaf}, + {value: 0x0004, lo: 0xb1, hi: 0xb5}, + {value: 0x0014, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + // Block 0x70, offset 0x2f8 + {value: 0x0034, lo: 0x99, hi: 0x9a}, + {value: 0x0004, lo: 0x9b, hi: 0x9e}, + // Block 0x71, offset 0x2fa + {value: 0x0004, lo: 0xbc, hi: 0xbe}, + // Block 0x72, offset 0x2fb + {value: 0x0010, lo: 0x85, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x73, offset 0x2fd + {value: 0x0010, lo: 0x80, hi: 0x8e}, + {value: 0x0010, lo: 0xa0, hi: 0xba}, + // Block 0x74, offset 0x2ff + {value: 0x0010, lo: 0x80, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0x96, hi: 0xbf}, + // Block 0x75, offset 0x302 + {value: 0x0010, lo: 0x80, hi: 0x8c}, + // Block 0x76, offset 0x303 + {value: 0x0010, lo: 0x90, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x77, offset 0x305 + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0xab}, + // Block 0x78, offset 0x308 + {value: 0x0117, lo: 0x80, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb2}, + {value: 0x0024, lo: 0xb4, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x79, offset 0x30e + {value: 0x0117, lo: 0x80, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9d}, + {value: 0x0024, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x7a, offset 0x312 + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb1}, + // Block 0x7b, offset 0x314 + {value: 0x0004, lo: 0x80, hi: 0x96}, + {value: 0x0014, lo: 0x97, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xaf}, + {value: 0x0012, lo: 0xb0, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xbf}, + // Block 0x7c, offset 0x319 + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x0015, lo: 0xb0, hi: 0xb0}, + {value: 0x0012, lo: 0xb1, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x8753, lo: 0xbd, hi: 0xbd}, + {value: 0x0117, lo: 0xbe, hi: 0xbf}, + // Block 0x7d, offset 0x320 + {value: 0x0010, lo: 0xb7, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbf}, + // Block 0x7e, offset 0x324 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8b}, + {value: 0x0010, lo: 0x8c, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + // Block 0x7f, offset 0x32d + {value: 0x0010, lo: 0x80, hi: 0xb3}, + // Block 0x80, offset 0x32e + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xa0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb7}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x81, offset 0x337 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x82, offset 0x33b + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0x92}, + {value: 0x0030, lo: 0x93, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0x83, offset 0x340 + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb9}, + {value: 0x0010, lo: 0xba, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x84, offset 0x348 + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0004, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x85, offset 0x34e + {value: 0x0010, lo: 0x80, hi: 0xa8}, + {value: 0x0014, lo: 0xa9, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0x86, offset 0x354 + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x87, offset 0x35e + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0024, lo: 0xbe, hi: 0xbf}, + // Block 0x88, offset 0x363 + {value: 0x0024, lo: 0x81, hi: 0x81}, + {value: 0x0004, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + // Block 0x89, offset 0x36c + {value: 0x0010, lo: 0x81, hi: 0x86}, + {value: 0x0010, lo: 0x89, hi: 0x8e}, + {value: 0x0010, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x8a, offset 0x372 + {value: 0x0012, lo: 0x80, hi: 0x92}, + {value: 0xaf52, lo: 0x93, hi: 0x93}, + {value: 0x0012, lo: 0x94, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9f}, + {value: 0x0012, lo: 0xa0, hi: 0xa5}, + {value: 0x74d2, lo: 0xb0, hi: 0xbf}, + // Block 0x8b, offset 0x379 + {value: 0x78d2, lo: 0x80, hi: 0x8f}, + {value: 0x7cd2, lo: 0x90, hi: 0x9f}, + {value: 0x80d2, lo: 0xa0, hi: 0xaf}, + {value: 0x7cd2, lo: 0xb0, hi: 0xbf}, + // Block 0x8c, offset 0x37d + {value: 0x0010, lo: 0x80, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x8d, offset 0x385 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x8e, offset 0x387 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x8b, hi: 0xbb}, + // Block 0x8f, offset 0x389 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0xbf}, + // Block 0x90, offset 0x38c + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0004, lo: 0xb2, hi: 0xbf}, + // Block 0x91, offset 0x38e + {value: 0x0004, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x93, hi: 0xbf}, + // Block 0x92, offset 0x390 + {value: 0x0010, lo: 0x80, hi: 0xbd}, + // Block 0x93, offset 0x391 + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0x94, offset 0x392 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0xbf}, + // Block 0x95, offset 0x394 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0xb0, hi: 0xbb}, + // Block 0x96, offset 0x396 + {value: 0x0014, lo: 0x80, hi: 0x8f}, + {value: 0x0054, lo: 0x93, hi: 0x93}, + {value: 0x0024, lo: 0xa0, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xad}, + {value: 0x0024, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + // Block 0x97, offset 0x39c + {value: 0x0010, lo: 0x8d, hi: 0x8f}, + {value: 0x0054, lo: 0x92, hi: 0x92}, + {value: 0x0054, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0xb0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0x98, offset 0x3a1 + {value: 0x0010, lo: 0x80, hi: 0xbc}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x99, offset 0x3a3 + {value: 0x0054, lo: 0x87, hi: 0x87}, + {value: 0x0054, lo: 0x8e, hi: 0x8e}, + {value: 0x0054, lo: 0x9a, hi: 0x9a}, + {value: 0x5f53, lo: 0xa1, hi: 0xba}, + {value: 0x0004, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x9a, offset 0x3a9 + {value: 0x0004, lo: 0x80, hi: 0x80}, + {value: 0x5f52, lo: 0x81, hi: 0x9a}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + // Block 0x9b, offset 0x3ac + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbe}, + // Block 0x9c, offset 0x3ae + {value: 0x0010, lo: 0x82, hi: 0x87}, + {value: 0x0010, lo: 0x8a, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0x97}, + {value: 0x0010, lo: 0x9a, hi: 0x9c}, + {value: 0x0004, lo: 0xa3, hi: 0xa3}, + {value: 0x0014, lo: 0xb9, hi: 0xbb}, + // Block 0x9d, offset 0x3b4 + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0010, lo: 0x8d, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xba}, + {value: 0x0010, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x9e, offset 0x3b9 + {value: 0x0010, lo: 0x80, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x9d}, + // Block 0x9f, offset 0x3bb + {value: 0x0010, lo: 0x80, hi: 0xba}, + // Block 0xa0, offset 0x3bc + {value: 0x0010, lo: 0x80, hi: 0xb4}, + // Block 0xa1, offset 0x3bd + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + // Block 0xa2, offset 0x3be + {value: 0x0010, lo: 0x80, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa3, offset 0x3c0 + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + // Block 0xa4, offset 0x3c2 + {value: 0x0010, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xad, hi: 0xbf}, + // Block 0xa5, offset 0x3c4 + {value: 0x0010, lo: 0x80, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0xb5}, + {value: 0x0024, lo: 0xb6, hi: 0xba}, + // Block 0xa6, offset 0x3c7 + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa7, offset 0x3c9 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x91, hi: 0x95}, + // Block 0xa8, offset 0x3cc + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x97}, + {value: 0xb253, lo: 0x98, hi: 0x9f}, + {value: 0xb553, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbf}, + // Block 0xa9, offset 0x3d4 + {value: 0xb252, lo: 0x80, hi: 0x87}, + {value: 0xb552, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0xaa, offset 0x3d7 + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0xb553, lo: 0xb0, hi: 0xb7}, + {value: 0xb253, lo: 0xb8, hi: 0xbf}, + // Block 0xab, offset 0x3db + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x93}, + {value: 0xb552, lo: 0x98, hi: 0x9f}, + {value: 0xb252, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbb}, + // Block 0xac, offset 0x3e3 + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xad, offset 0x3e5 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + // Block 0xae, offset 0x3e6 + {value: 0x0010, lo: 0x80, hi: 0xb6}, + // Block 0xaf, offset 0x3e7 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xa7}, + // Block 0xb0, offset 0x3e9 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xb5}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xb1, offset 0x3ef + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb6}, + // Block 0xb2, offset 0x3f1 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + // Block 0xb3, offset 0x3f2 + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + // Block 0xb4, offset 0x3f4 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb9}, + // Block 0xb5, offset 0x3f6 + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0xb6, offset 0x3f8 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x83}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x8e, hi: 0x8e}, + {value: 0x0024, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x99, hi: 0xb5}, + {value: 0x0024, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xb7, offset 0x405 + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0xb8, offset 0x406 + {value: 0x0010, lo: 0x80, hi: 0x9c}, + // Block 0xb9, offset 0x407 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + // Block 0xba, offset 0x40b + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + // Block 0xbb, offset 0x40d + {value: 0x0010, lo: 0x80, hi: 0x91}, + // Block 0xbc, offset 0x40e + {value: 0x0010, lo: 0x80, hi: 0x88}, + // Block 0xbd, offset 0x40f + {value: 0x5653, lo: 0x80, hi: 0xb2}, + // Block 0xbe, offset 0x410 + {value: 0x5652, lo: 0x80, hi: 0xb2}, + // Block 0xbf, offset 0x411 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xc0, offset 0x414 + {value: 0x0010, lo: 0x80, hi: 0x9c}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xc1, offset 0x417 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x87}, + {value: 0x0024, lo: 0x88, hi: 0x8a}, + {value: 0x0034, lo: 0x8b, hi: 0x8b}, + {value: 0x0024, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x90}, + // Block 0xc2, offset 0x41d + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xc3, offset 0x421 + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xc4, offset 0x425 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb6}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + // Block 0xc5, offset 0x42b + {value: 0x0014, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xc6, offset 0x42e + {value: 0x0024, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0xc7, offset 0x435 + {value: 0x0010, lo: 0x84, hi: 0x86}, + {value: 0x0010, lo: 0x90, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + // Block 0xc8, offset 0x439 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xc9, offset 0x43d + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0014, lo: 0x89, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + // Block 0xca, offset 0x444 + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb4}, + {value: 0x0030, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xb7}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0xcb, offset 0x44d + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xcc, offset 0x453 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa2}, + {value: 0x0014, lo: 0xa3, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xcd, offset 0x459 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xce, offset 0x463 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0030, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9d, hi: 0xa3}, + {value: 0x0024, lo: 0xa6, hi: 0xac}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + // Block 0xcf, offset 0x46d + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xd0, offset 0x46f + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0x9e, hi: 0x9e}, + // Block 0xd1, offset 0x477 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xd2, offset 0x47d + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd3, offset 0x483 + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd4, offset 0x489 + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x98, hi: 0x9b}, + {value: 0x0014, lo: 0x9c, hi: 0x9d}, + // Block 0xd5, offset 0x48c + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd6, offset 0x492 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd7, offset 0x495 + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0014, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb5}, + {value: 0x0030, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + // Block 0xd8, offset 0x49d + {value: 0x0010, lo: 0x80, hi: 0x89}, + // Block 0xd9, offset 0x49e + {value: 0x0014, lo: 0x9d, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xda, offset 0x4a5 + {value: 0x0010, lo: 0x80, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + // Block 0xdb, offset 0x4a9 + {value: 0x5f53, lo: 0xa0, hi: 0xbf}, + // Block 0xdc, offset 0x4aa + {value: 0x5f52, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xdd, offset 0x4ad + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x8a}, + {value: 0x0010, lo: 0x8b, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbb, hi: 0xbe}, + // Block 0xde, offset 0x4b5 + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0014, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x98}, + {value: 0x0014, lo: 0x99, hi: 0x9b}, + {value: 0x0010, lo: 0x9c, hi: 0xbf}, + // Block 0xdf, offset 0x4bb + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x86, hi: 0x89}, + {value: 0x0014, lo: 0x8a, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x99}, + {value: 0x0010, lo: 0x9d, hi: 0x9d}, + // Block 0xe0, offset 0x4c2 + {value: 0x0010, lo: 0x80, hi: 0xb8}, + // Block 0xe1, offset 0x4c3 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb6}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xe2, offset 0x4c9 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0xe3, offset 0x4cc + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0014, lo: 0x92, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xa9}, + {value: 0x0014, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0xe4, offset 0x4d4 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb6}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xe5, offset 0x4db + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xa5}, + {value: 0x0010, lo: 0xa7, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xbf}, + // Block 0xe6, offset 0x4e5 + {value: 0x0010, lo: 0x80, hi: 0x8e}, + {value: 0x0014, lo: 0x90, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0x96, hi: 0x96}, + {value: 0x0034, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0xe7, offset 0x4ed + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + // Block 0xe8, offset 0x4f0 + {value: 0x0010, lo: 0x80, hi: 0x99}, + // Block 0xe9, offset 0x4f1 + {value: 0x0010, lo: 0x80, hi: 0xae}, + // Block 0xea, offset 0x4f2 + {value: 0x0010, lo: 0x80, hi: 0x83}, + // Block 0xeb, offset 0x4f3 + {value: 0x0010, lo: 0x80, hi: 0x86}, + // Block 0xec, offset 0x4f4 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0xed, offset 0x4f6 + {value: 0x0010, lo: 0x90, hi: 0xad}, + {value: 0x0034, lo: 0xb0, hi: 0xb4}, + // Block 0xee, offset 0x4f8 + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb6}, + // Block 0xef, offset 0x4fa + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa3, hi: 0xb7}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xf0, offset 0x4fe + {value: 0x0010, lo: 0x80, hi: 0x8f}, + // Block 0xf1, offset 0x4ff + {value: 0x2013, lo: 0x80, hi: 0x9f}, + {value: 0x2012, lo: 0xa0, hi: 0xbf}, + // Block 0xf2, offset 0x501 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0010, lo: 0x90, hi: 0xbe}, + // Block 0xf3, offset 0x503 + {value: 0x0014, lo: 0x8f, hi: 0x9f}, + // Block 0xf4, offset 0x504 + {value: 0x0014, lo: 0xa0, hi: 0xa1}, + // Block 0xf5, offset 0x505 + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbc}, + // Block 0xf6, offset 0x507 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0034, lo: 0x9e, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa3}, + // Block 0xf7, offset 0x50c + {value: 0x0030, lo: 0xa5, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xa9}, + {value: 0x0030, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbf}, + // Block 0xf8, offset 0x511 + {value: 0x0034, lo: 0x80, hi: 0x82}, + {value: 0x0024, lo: 0x85, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8b}, + {value: 0x0024, lo: 0xaa, hi: 0xad}, + // Block 0xf9, offset 0x515 + {value: 0x0024, lo: 0x82, hi: 0x84}, + // Block 0xfa, offset 0x516 + {value: 0x0013, lo: 0x80, hi: 0x99}, + {value: 0x0012, lo: 0x9a, hi: 0xb3}, + {value: 0x0013, lo: 0xb4, hi: 0xbf}, + // Block 0xfb, offset 0x519 + {value: 0x0013, lo: 0x80, hi: 0x8d}, + {value: 0x0012, lo: 0x8e, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0xa7}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0xfc, offset 0x51d + {value: 0x0013, lo: 0x80, hi: 0x81}, + {value: 0x0012, lo: 0x82, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0x9c}, + {value: 0x0013, lo: 0x9e, hi: 0x9f}, + {value: 0x0013, lo: 0xa2, hi: 0xa2}, + {value: 0x0013, lo: 0xa5, hi: 0xa6}, + {value: 0x0013, lo: 0xa9, hi: 0xac}, + {value: 0x0013, lo: 0xae, hi: 0xb5}, + {value: 0x0012, lo: 0xb6, hi: 0xb9}, + {value: 0x0012, lo: 0xbb, hi: 0xbb}, + {value: 0x0012, lo: 0xbd, hi: 0xbf}, + // Block 0xfd, offset 0x528 + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0012, lo: 0x85, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0xfe, offset 0x52c + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0013, lo: 0x84, hi: 0x85}, + {value: 0x0013, lo: 0x87, hi: 0x8a}, + {value: 0x0013, lo: 0x8d, hi: 0x94}, + {value: 0x0013, lo: 0x96, hi: 0x9c}, + {value: 0x0012, lo: 0x9e, hi: 0xb7}, + {value: 0x0013, lo: 0xb8, hi: 0xb9}, + {value: 0x0013, lo: 0xbb, hi: 0xbe}, + // Block 0xff, offset 0x534 + {value: 0x0013, lo: 0x80, hi: 0x84}, + {value: 0x0013, lo: 0x86, hi: 0x86}, + {value: 0x0013, lo: 0x8a, hi: 0x90}, + {value: 0x0012, lo: 0x92, hi: 0xab}, + {value: 0x0013, lo: 0xac, hi: 0xbf}, + // Block 0x100, offset 0x539 + {value: 0x0013, lo: 0x80, hi: 0x85}, + {value: 0x0012, lo: 0x86, hi: 0x9f}, + {value: 0x0013, lo: 0xa0, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbf}, + // Block 0x101, offset 0x53d + {value: 0x0012, lo: 0x80, hi: 0x93}, + {value: 0x0013, lo: 0x94, hi: 0xad}, + {value: 0x0012, lo: 0xae, hi: 0xbf}, + // Block 0x102, offset 0x540 + {value: 0x0012, lo: 0x80, hi: 0x87}, + {value: 0x0013, lo: 0x88, hi: 0xa1}, + {value: 0x0012, lo: 0xa2, hi: 0xbb}, + {value: 0x0013, lo: 0xbc, hi: 0xbf}, + // Block 0x103, offset 0x544 + {value: 0x0013, lo: 0x80, hi: 0x95}, + {value: 0x0012, lo: 0x96, hi: 0xaf}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x104, offset 0x547 + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0012, lo: 0x8a, hi: 0xa5}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0x105, offset 0x54a + {value: 0x0013, lo: 0x80, hi: 0x80}, + {value: 0x0012, lo: 0x82, hi: 0x9a}, + {value: 0x0012, lo: 0x9c, hi: 0xa1}, + {value: 0x0013, lo: 0xa2, hi: 0xba}, + {value: 0x0012, lo: 0xbc, hi: 0xbf}, + // Block 0x106, offset 0x54f + {value: 0x0012, lo: 0x80, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0xb4}, + {value: 0x0012, lo: 0xb6, hi: 0xbf}, + // Block 0x107, offset 0x553 + {value: 0x0012, lo: 0x80, hi: 0x8e}, + {value: 0x0012, lo: 0x90, hi: 0x95}, + {value: 0x0013, lo: 0x96, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x108, offset 0x557 + {value: 0x0012, lo: 0x80, hi: 0x88}, + {value: 0x0012, lo: 0x8a, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0x109, offset 0x55b + {value: 0x0012, lo: 0x80, hi: 0x82}, + {value: 0x0012, lo: 0x84, hi: 0x89}, + {value: 0x0017, lo: 0x8a, hi: 0x8b}, + {value: 0x0010, lo: 0x8e, hi: 0xbf}, + // Block 0x10a, offset 0x55f + {value: 0x0014, lo: 0x80, hi: 0xb6}, + {value: 0x0014, lo: 0xbb, hi: 0xbf}, + // Block 0x10b, offset 0x561 + {value: 0x0014, lo: 0x80, hi: 0xac}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x10c, offset 0x563 + {value: 0x0014, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x9b, hi: 0x9f}, + {value: 0x0014, lo: 0xa1, hi: 0xaf}, + // Block 0x10d, offset 0x566 + {value: 0x0024, lo: 0x80, hi: 0x86}, + {value: 0x0024, lo: 0x88, hi: 0x98}, + {value: 0x0024, lo: 0x9b, hi: 0xa1}, + {value: 0x0024, lo: 0xa3, hi: 0xa4}, + {value: 0x0024, lo: 0xa6, hi: 0xaa}, + // Block 0x10e, offset 0x56b + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0034, lo: 0x90, hi: 0x96}, + // Block 0x10f, offset 0x56d + {value: 0xb852, lo: 0x80, hi: 0x81}, + {value: 0xbb52, lo: 0x82, hi: 0x83}, + {value: 0x0024, lo: 0x84, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x110, offset 0x572 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x9f}, + {value: 0x0010, lo: 0xa1, hi: 0xa2}, + {value: 0x0010, lo: 0xa4, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb7}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + // Block 0x111, offset 0x57b + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x9b}, + {value: 0x0010, lo: 0xa1, hi: 0xa3}, + {value: 0x0010, lo: 0xa5, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xbb}, + // Block 0x112, offset 0x580 + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x113, offset 0x581 + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x114, offset 0x584 + {value: 0x0013, lo: 0x80, hi: 0x89}, + // Block 0x115, offset 0x585 + {value: 0x0014, lo: 0xbb, hi: 0xbf}, + // Block 0x116, offset 0x586 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0014, lo: 0xa0, hi: 0xbf}, + // Block 0x117, offset 0x588 + {value: 0x0014, lo: 0x80, hi: 0xbf}, + // Block 0x118, offset 0x589 + {value: 0x0014, lo: 0x80, hi: 0xaf}, +} + +// Total table size 14906 bytes (14KiB); checksum: 362795C7 diff --git a/vendor/golang.org/x/text/cases/tables12.0.0.go b/vendor/golang.org/x/text/cases/tables12.0.0.go new file mode 100644 index 000000000..ae7dc2407 --- /dev/null +++ b/vendor/golang.org/x/text/cases/tables12.0.0.go @@ -0,0 +1,2360 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.14 && !go1.16 +// +build go1.14,!go1.16 + +package cases + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "12.0.0" + +var xorData string = "" + // Size: 192 bytes + "\x00\x06\x07\x00\x01?\x00\x0f\x03\x00\x0f\x12\x00\x0f\x1f\x00\x0f\x1d" + + "\x00\x01\x13\x00\x0f\x16\x00\x0f\x0b\x00\x0f3\x00\x0f7\x00\x01#\x00\x0f?" + + "\x00\x0e'\x00\x0f/\x00\x0e>\x00\x0f*\x00\x0c&\x00\x0c*\x00\x0c;\x00\x0c9" + + "\x00\x0c%\x00\x01\x08\x00\x03\x0d\x00\x03\x09\x00\x02\x06\x00\x02\x02" + + "\x00\x02\x0c\x00\x01\x00\x00\x01\x03\x00\x01\x01\x00\x01 \x00\x01\x0c" + + "\x00\x01\x10\x00\x03\x10\x00\x036 \x00\x037 \x00\x0b#\x10\x00\x0b 0\x00" + + "\x0b!\x10\x00\x0b!0\x001\x00\x00\x0b(\x04\x00\x03\x04\x1e\x00\x0b)\x08" + + "\x00\x03\x0a\x00\x02:\x00\x02>\x00\x02,\x00\x02\x00\x00\x02\x10\x00\x01<" + + "\x00\x01&\x00\x01*\x00\x01.\x00\x010\x003 \x00\x01\x18\x00\x01(\x00\x01" + + "\x1e\x00\x01\x22" + +var exceptions string = "" + // Size: 2450 bytes + "\x00\x12\x12μΜΜ\x12\x12ssSSSs\x13\x18i̇i̇\x10\x09II\x13\x1bʼnʼNʼN\x11" + + "\x09sSS\x12\x12dždžDž\x12\x12dždžDŽ\x10\x12DŽDž\x12\x12ljljLj\x12\x12ljljLJ\x10\x12LJLj" + + "\x12\x12njnjNj\x12\x12njnjNJ\x10\x12NJNj\x13\x1bǰJ̌J̌\x12\x12dzdzDz\x12\x12dzdzDZ\x10" + + "\x12DZDz\x13\x18ⱥⱥ\x13\x18ⱦⱦ\x10\x1bⱾⱾ\x10\x1bⱿⱿ\x10\x1bⱯⱯ\x10\x1bⱭⱭ\x10" + + "\x1bⱰⱰ\x10\x1bꞫꞫ\x10\x1bꞬꞬ\x10\x1bꞍꞍ\x10\x1bꞪꞪ\x10\x1bꞮꞮ\x10\x1bⱢⱢ\x10" + + "\x1bꞭꞭ\x10\x1bⱮⱮ\x10\x1bⱤⱤ\x10\x1bꟅꟅ\x10\x1bꞱꞱ\x10\x1bꞲꞲ\x10\x1bꞰꞰ2\x12ι" + + "ΙΙ\x166ΐΪ́Ϊ́\x166ΰΫ́Ϋ́\x12\x12σΣΣ\x12\x12βΒΒ\x12\x12θΘΘ\x12\x12" + + "φΦΦ\x12\x12πΠΠ\x12\x12κΚΚ\x12\x12ρΡΡ\x12\x12εΕΕ\x14$եւԵՒԵւ\x10\x1bᲐა" + + "\x10\x1bᲑბ\x10\x1bᲒგ\x10\x1bᲓდ\x10\x1bᲔე\x10\x1bᲕვ\x10\x1bᲖზ\x10\x1bᲗთ" + + "\x10\x1bᲘი\x10\x1bᲙკ\x10\x1bᲚლ\x10\x1bᲛმ\x10\x1bᲜნ\x10\x1bᲝო\x10\x1bᲞპ" + + "\x10\x1bᲟჟ\x10\x1bᲠრ\x10\x1bᲡს\x10\x1bᲢტ\x10\x1bᲣუ\x10\x1bᲤფ\x10\x1bᲥქ" + + "\x10\x1bᲦღ\x10\x1bᲧყ\x10\x1bᲨშ\x10\x1bᲩჩ\x10\x1bᲪც\x10\x1bᲫძ\x10\x1bᲬწ" + + "\x10\x1bᲭჭ\x10\x1bᲮხ\x10\x1bᲯჯ\x10\x1bᲰჰ\x10\x1bᲱჱ\x10\x1bᲲჲ\x10\x1bᲳჳ" + + "\x10\x1bᲴჴ\x10\x1bᲵჵ\x10\x1bᲶჶ\x10\x1bᲷჷ\x10\x1bᲸჸ\x10\x1bᲹჹ\x10\x1bᲺჺ" + + "\x10\x1bᲽჽ\x10\x1bᲾჾ\x10\x1bᲿჿ\x12\x12вВВ\x12\x12дДД\x12\x12оОО\x12\x12с" + + "СС\x12\x12тТТ\x12\x12тТТ\x12\x12ъЪЪ\x12\x12ѣѢѢ\x13\x1bꙋꙊꙊ\x13\x1bẖH̱H̱" + + "\x13\x1bẗT̈T̈\x13\x1bẘW̊W̊\x13\x1bẙY̊Y̊\x13\x1baʾAʾAʾ\x13\x1bṡṠṠ\x12" + + "\x10ssß\x14$ὐΥ̓Υ̓\x166ὒΥ̓̀Υ̓̀\x166ὔΥ̓́Υ̓́\x166ὖΥ̓͂Υ̓͂\x15+ἀιἈΙᾈ" + + "\x15+ἁιἉΙᾉ\x15+ἂιἊΙᾊ\x15+ἃιἋΙᾋ\x15+ἄιἌΙᾌ\x15+ἅιἍΙᾍ\x15+ἆιἎΙᾎ\x15+ἇιἏΙᾏ" + + "\x15\x1dἀιᾀἈΙ\x15\x1dἁιᾁἉΙ\x15\x1dἂιᾂἊΙ\x15\x1dἃιᾃἋΙ\x15\x1dἄιᾄἌΙ\x15" + + "\x1dἅιᾅἍΙ\x15\x1dἆιᾆἎΙ\x15\x1dἇιᾇἏΙ\x15+ἠιἨΙᾘ\x15+ἡιἩΙᾙ\x15+ἢιἪΙᾚ\x15+ἣι" + + "ἫΙᾛ\x15+ἤιἬΙᾜ\x15+ἥιἭΙᾝ\x15+ἦιἮΙᾞ\x15+ἧιἯΙᾟ\x15\x1dἠιᾐἨΙ\x15\x1dἡιᾑἩΙ" + + "\x15\x1dἢιᾒἪΙ\x15\x1dἣιᾓἫΙ\x15\x1dἤιᾔἬΙ\x15\x1dἥιᾕἭΙ\x15\x1dἦιᾖἮΙ\x15" + + "\x1dἧιᾗἯΙ\x15+ὠιὨΙᾨ\x15+ὡιὩΙᾩ\x15+ὢιὪΙᾪ\x15+ὣιὫΙᾫ\x15+ὤιὬΙᾬ\x15+ὥιὭΙᾭ" + + "\x15+ὦιὮΙᾮ\x15+ὧιὯΙᾯ\x15\x1dὠιᾠὨΙ\x15\x1dὡιᾡὩΙ\x15\x1dὢιᾢὪΙ\x15\x1dὣιᾣὫΙ" + + "\x15\x1dὤιᾤὬΙ\x15\x1dὥιᾥὭΙ\x15\x1dὦιᾦὮΙ\x15\x1dὧιᾧὯΙ\x15-ὰιᾺΙᾺͅ\x14#αιΑΙ" + + "ᾼ\x14$άιΆΙΆͅ\x14$ᾶΑ͂Α͂\x166ᾶιΑ͂Ιᾼ͂\x14\x1cαιᾳΑΙ\x12\x12ιΙΙ\x15-ὴιῊΙ" + + "Ὴͅ\x14#ηιΗΙῌ\x14$ήιΉΙΉͅ\x14$ῆΗ͂Η͂\x166ῆιΗ͂Ιῌ͂\x14\x1cηιῃΗΙ\x166ῒΙ" + + "̈̀Ϊ̀\x166ΐΪ́Ϊ́\x14$ῖΙ͂Ι͂\x166ῗΪ͂Ϊ͂\x166ῢΫ̀Ϋ̀\x166ΰΫ́Ϋ" + + "́\x14$ῤΡ̓Ρ̓\x14$ῦΥ͂Υ͂\x166ῧΫ͂Ϋ͂\x15-ὼιῺΙῺͅ\x14#ωιΩΙῼ\x14$ώιΏΙΏͅ" + + "\x14$ῶΩ͂Ω͂\x166ῶιΩ͂Ιῼ͂\x14\x1cωιῳΩΙ\x12\x10ωω\x11\x08kk\x12\x10åå\x12" + + "\x10ɫɫ\x12\x10ɽɽ\x10\x12ȺȺ\x10\x12ȾȾ\x12\x10ɑɑ\x12\x10ɱɱ\x12\x10ɐɐ\x12" + + "\x10ɒɒ\x12\x10ȿȿ\x12\x10ɀɀ\x12\x10ɥɥ\x12\x10ɦɦ\x12\x10ɜɜ\x12\x10ɡɡ\x12" + + "\x10ɬɬ\x12\x10ɪɪ\x12\x10ʞʞ\x12\x10ʇʇ\x12\x10ʝʝ\x12\x10ʂʂ\x12\x12ffFFFf" + + "\x12\x12fiFIFi\x12\x12flFLFl\x13\x1bffiFFIFfi\x13\x1bfflFFLFfl\x12\x12st" + + "STSt\x12\x12stSTSt\x14$մնՄՆՄն\x14$մեՄԵՄե\x14$միՄԻՄի\x14$վնՎՆՎն\x14$մխՄԽՄ" + + "խ" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// caseTrie. Total size: 12396 bytes (12.11 KiB). Checksum: c0656238384c3da1. +type caseTrie struct{} + +func newCaseTrie(i int) *caseTrie { + return &caseTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *caseTrie) lookupValue(n uint32, b byte) uint16 { + switch { + case n < 20: + return uint16(caseValues[n<<6+uint32(b)]) + default: + n -= 20 + return uint16(sparse.lookup(n, b)) + } +} + +// caseValues: 22 blocks, 1408 entries, 2816 bytes +// The third block is the zero block. +var caseValues = [1408]uint16{ + // Block 0x0, offset 0x0 + 0x27: 0x0054, + 0x2e: 0x0054, + 0x30: 0x0010, 0x31: 0x0010, 0x32: 0x0010, 0x33: 0x0010, 0x34: 0x0010, 0x35: 0x0010, + 0x36: 0x0010, 0x37: 0x0010, 0x38: 0x0010, 0x39: 0x0010, 0x3a: 0x0054, + // Block 0x1, offset 0x40 + 0x41: 0x2013, 0x42: 0x2013, 0x43: 0x2013, 0x44: 0x2013, 0x45: 0x2013, + 0x46: 0x2013, 0x47: 0x2013, 0x48: 0x2013, 0x49: 0x2013, 0x4a: 0x2013, 0x4b: 0x2013, + 0x4c: 0x2013, 0x4d: 0x2013, 0x4e: 0x2013, 0x4f: 0x2013, 0x50: 0x2013, 0x51: 0x2013, + 0x52: 0x2013, 0x53: 0x2013, 0x54: 0x2013, 0x55: 0x2013, 0x56: 0x2013, 0x57: 0x2013, + 0x58: 0x2013, 0x59: 0x2013, 0x5a: 0x2013, + 0x5e: 0x0004, 0x5f: 0x0010, 0x60: 0x0004, 0x61: 0x2012, 0x62: 0x2012, 0x63: 0x2012, + 0x64: 0x2012, 0x65: 0x2012, 0x66: 0x2012, 0x67: 0x2012, 0x68: 0x2012, 0x69: 0x2012, + 0x6a: 0x2012, 0x6b: 0x2012, 0x6c: 0x2012, 0x6d: 0x2012, 0x6e: 0x2012, 0x6f: 0x2012, + 0x70: 0x2012, 0x71: 0x2012, 0x72: 0x2012, 0x73: 0x2012, 0x74: 0x2012, 0x75: 0x2012, + 0x76: 0x2012, 0x77: 0x2012, 0x78: 0x2012, 0x79: 0x2012, 0x7a: 0x2012, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc0: 0x0852, 0xc1: 0x0b53, 0xc2: 0x0113, 0xc3: 0x0112, 0xc4: 0x0113, 0xc5: 0x0112, + 0xc6: 0x0b53, 0xc7: 0x0f13, 0xc8: 0x0f12, 0xc9: 0x0e53, 0xca: 0x1153, 0xcb: 0x0713, + 0xcc: 0x0712, 0xcd: 0x0012, 0xce: 0x1453, 0xcf: 0x1753, 0xd0: 0x1a53, 0xd1: 0x0313, + 0xd2: 0x0312, 0xd3: 0x1d53, 0xd4: 0x2053, 0xd5: 0x2352, 0xd6: 0x2653, 0xd7: 0x2653, + 0xd8: 0x0113, 0xd9: 0x0112, 0xda: 0x2952, 0xdb: 0x0012, 0xdc: 0x1d53, 0xdd: 0x2c53, + 0xde: 0x2f52, 0xdf: 0x3253, 0xe0: 0x0113, 0xe1: 0x0112, 0xe2: 0x0113, 0xe3: 0x0112, + 0xe4: 0x0113, 0xe5: 0x0112, 0xe6: 0x3553, 0xe7: 0x0f13, 0xe8: 0x0f12, 0xe9: 0x3853, + 0xea: 0x0012, 0xeb: 0x0012, 0xec: 0x0113, 0xed: 0x0112, 0xee: 0x3553, 0xef: 0x1f13, + 0xf0: 0x1f12, 0xf1: 0x3b53, 0xf2: 0x3e53, 0xf3: 0x0713, 0xf4: 0x0712, 0xf5: 0x0313, + 0xf6: 0x0312, 0xf7: 0x4153, 0xf8: 0x0113, 0xf9: 0x0112, 0xfa: 0x0012, 0xfb: 0x0010, + 0xfc: 0x0113, 0xfd: 0x0112, 0xfe: 0x0012, 0xff: 0x4452, + // Block 0x4, offset 0x100 + 0x100: 0x0010, 0x101: 0x0010, 0x102: 0x0010, 0x103: 0x0010, 0x104: 0x02db, 0x105: 0x0359, + 0x106: 0x03da, 0x107: 0x043b, 0x108: 0x04b9, 0x109: 0x053a, 0x10a: 0x059b, 0x10b: 0x0619, + 0x10c: 0x069a, 0x10d: 0x0313, 0x10e: 0x0312, 0x10f: 0x1f13, 0x110: 0x1f12, 0x111: 0x0313, + 0x112: 0x0312, 0x113: 0x0713, 0x114: 0x0712, 0x115: 0x0313, 0x116: 0x0312, 0x117: 0x0f13, + 0x118: 0x0f12, 0x119: 0x0313, 0x11a: 0x0312, 0x11b: 0x0713, 0x11c: 0x0712, 0x11d: 0x1452, + 0x11e: 0x0113, 0x11f: 0x0112, 0x120: 0x0113, 0x121: 0x0112, 0x122: 0x0113, 0x123: 0x0112, + 0x124: 0x0113, 0x125: 0x0112, 0x126: 0x0113, 0x127: 0x0112, 0x128: 0x0113, 0x129: 0x0112, + 0x12a: 0x0113, 0x12b: 0x0112, 0x12c: 0x0113, 0x12d: 0x0112, 0x12e: 0x0113, 0x12f: 0x0112, + 0x130: 0x06fa, 0x131: 0x07ab, 0x132: 0x0829, 0x133: 0x08aa, 0x134: 0x0113, 0x135: 0x0112, + 0x136: 0x2353, 0x137: 0x4453, 0x138: 0x0113, 0x139: 0x0112, 0x13a: 0x0113, 0x13b: 0x0112, + 0x13c: 0x0113, 0x13d: 0x0112, 0x13e: 0x0113, 0x13f: 0x0112, + // Block 0x5, offset 0x140 + 0x140: 0x0a8a, 0x141: 0x0313, 0x142: 0x0312, 0x143: 0x0853, 0x144: 0x4753, 0x145: 0x4a53, + 0x146: 0x0113, 0x147: 0x0112, 0x148: 0x0113, 0x149: 0x0112, 0x14a: 0x0113, 0x14b: 0x0112, + 0x14c: 0x0113, 0x14d: 0x0112, 0x14e: 0x0113, 0x14f: 0x0112, 0x150: 0x0b0a, 0x151: 0x0b8a, + 0x152: 0x0c0a, 0x153: 0x0b52, 0x154: 0x0b52, 0x155: 0x0012, 0x156: 0x0e52, 0x157: 0x1152, + 0x158: 0x0012, 0x159: 0x1752, 0x15a: 0x0012, 0x15b: 0x1a52, 0x15c: 0x0c8a, 0x15d: 0x0012, + 0x15e: 0x0012, 0x15f: 0x0012, 0x160: 0x1d52, 0x161: 0x0d0a, 0x162: 0x0012, 0x163: 0x2052, + 0x164: 0x0012, 0x165: 0x0d8a, 0x166: 0x0e0a, 0x167: 0x0012, 0x168: 0x2652, 0x169: 0x2652, + 0x16a: 0x0e8a, 0x16b: 0x0f0a, 0x16c: 0x0f8a, 0x16d: 0x0012, 0x16e: 0x0012, 0x16f: 0x1d52, + 0x170: 0x0012, 0x171: 0x100a, 0x172: 0x2c52, 0x173: 0x0012, 0x174: 0x0012, 0x175: 0x3252, + 0x176: 0x0012, 0x177: 0x0012, 0x178: 0x0012, 0x179: 0x0012, 0x17a: 0x0012, 0x17b: 0x0012, + 0x17c: 0x0012, 0x17d: 0x108a, 0x17e: 0x0012, 0x17f: 0x0012, + // Block 0x6, offset 0x180 + 0x180: 0x3552, 0x181: 0x0012, 0x182: 0x110a, 0x183: 0x3852, 0x184: 0x0012, 0x185: 0x0012, + 0x186: 0x0012, 0x187: 0x118a, 0x188: 0x3552, 0x189: 0x4752, 0x18a: 0x3b52, 0x18b: 0x3e52, + 0x18c: 0x4a52, 0x18d: 0x0012, 0x18e: 0x0012, 0x18f: 0x0012, 0x190: 0x0012, 0x191: 0x0012, + 0x192: 0x4152, 0x193: 0x0012, 0x194: 0x0010, 0x195: 0x0012, 0x196: 0x0012, 0x197: 0x0012, + 0x198: 0x0012, 0x199: 0x0012, 0x19a: 0x0012, 0x19b: 0x0012, 0x19c: 0x0012, 0x19d: 0x120a, + 0x19e: 0x128a, 0x19f: 0x0012, 0x1a0: 0x0012, 0x1a1: 0x0012, 0x1a2: 0x0012, 0x1a3: 0x0012, + 0x1a4: 0x0012, 0x1a5: 0x0012, 0x1a6: 0x0012, 0x1a7: 0x0012, 0x1a8: 0x0012, 0x1a9: 0x0012, + 0x1aa: 0x0012, 0x1ab: 0x0012, 0x1ac: 0x0012, 0x1ad: 0x0012, 0x1ae: 0x0012, 0x1af: 0x0012, + 0x1b0: 0x0015, 0x1b1: 0x0015, 0x1b2: 0x0015, 0x1b3: 0x0015, 0x1b4: 0x0015, 0x1b5: 0x0015, + 0x1b6: 0x0015, 0x1b7: 0x0015, 0x1b8: 0x0015, 0x1b9: 0x0014, 0x1ba: 0x0014, 0x1bb: 0x0014, + 0x1bc: 0x0014, 0x1bd: 0x0014, 0x1be: 0x0014, 0x1bf: 0x0014, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0024, 0x1c1: 0x0024, 0x1c2: 0x0024, 0x1c3: 0x0024, 0x1c4: 0x0024, 0x1c5: 0x130d, + 0x1c6: 0x0024, 0x1c7: 0x0034, 0x1c8: 0x0034, 0x1c9: 0x0034, 0x1ca: 0x0024, 0x1cb: 0x0024, + 0x1cc: 0x0024, 0x1cd: 0x0034, 0x1ce: 0x0034, 0x1cf: 0x0014, 0x1d0: 0x0024, 0x1d1: 0x0024, + 0x1d2: 0x0024, 0x1d3: 0x0034, 0x1d4: 0x0034, 0x1d5: 0x0034, 0x1d6: 0x0034, 0x1d7: 0x0024, + 0x1d8: 0x0034, 0x1d9: 0x0034, 0x1da: 0x0034, 0x1db: 0x0024, 0x1dc: 0x0034, 0x1dd: 0x0034, + 0x1de: 0x0034, 0x1df: 0x0034, 0x1e0: 0x0034, 0x1e1: 0x0034, 0x1e2: 0x0034, 0x1e3: 0x0024, + 0x1e4: 0x0024, 0x1e5: 0x0024, 0x1e6: 0x0024, 0x1e7: 0x0024, 0x1e8: 0x0024, 0x1e9: 0x0024, + 0x1ea: 0x0024, 0x1eb: 0x0024, 0x1ec: 0x0024, 0x1ed: 0x0024, 0x1ee: 0x0024, 0x1ef: 0x0024, + 0x1f0: 0x0113, 0x1f1: 0x0112, 0x1f2: 0x0113, 0x1f3: 0x0112, 0x1f4: 0x0014, 0x1f5: 0x0004, + 0x1f6: 0x0113, 0x1f7: 0x0112, 0x1fa: 0x0015, 0x1fb: 0x4d52, + 0x1fc: 0x5052, 0x1fd: 0x5052, 0x1ff: 0x5353, + // Block 0x8, offset 0x200 + 0x204: 0x0004, 0x205: 0x0004, + 0x206: 0x2a13, 0x207: 0x0054, 0x208: 0x2513, 0x209: 0x2713, 0x20a: 0x2513, + 0x20c: 0x5653, 0x20e: 0x5953, 0x20f: 0x5c53, 0x210: 0x138a, 0x211: 0x2013, + 0x212: 0x2013, 0x213: 0x2013, 0x214: 0x2013, 0x215: 0x2013, 0x216: 0x2013, 0x217: 0x2013, + 0x218: 0x2013, 0x219: 0x2013, 0x21a: 0x2013, 0x21b: 0x2013, 0x21c: 0x2013, 0x21d: 0x2013, + 0x21e: 0x2013, 0x21f: 0x2013, 0x220: 0x5f53, 0x221: 0x5f53, 0x223: 0x5f53, + 0x224: 0x5f53, 0x225: 0x5f53, 0x226: 0x5f53, 0x227: 0x5f53, 0x228: 0x5f53, 0x229: 0x5f53, + 0x22a: 0x5f53, 0x22b: 0x5f53, 0x22c: 0x2a12, 0x22d: 0x2512, 0x22e: 0x2712, 0x22f: 0x2512, + 0x230: 0x14ca, 0x231: 0x2012, 0x232: 0x2012, 0x233: 0x2012, 0x234: 0x2012, 0x235: 0x2012, + 0x236: 0x2012, 0x237: 0x2012, 0x238: 0x2012, 0x239: 0x2012, 0x23a: 0x2012, 0x23b: 0x2012, + 0x23c: 0x2012, 0x23d: 0x2012, 0x23e: 0x2012, 0x23f: 0x2012, + // Block 0x9, offset 0x240 + 0x240: 0x5f52, 0x241: 0x5f52, 0x242: 0x160a, 0x243: 0x5f52, 0x244: 0x5f52, 0x245: 0x5f52, + 0x246: 0x5f52, 0x247: 0x5f52, 0x248: 0x5f52, 0x249: 0x5f52, 0x24a: 0x5f52, 0x24b: 0x5f52, + 0x24c: 0x5652, 0x24d: 0x5952, 0x24e: 0x5c52, 0x24f: 0x1813, 0x250: 0x168a, 0x251: 0x170a, + 0x252: 0x0013, 0x253: 0x0013, 0x254: 0x0013, 0x255: 0x178a, 0x256: 0x180a, 0x257: 0x1812, + 0x258: 0x0113, 0x259: 0x0112, 0x25a: 0x0113, 0x25b: 0x0112, 0x25c: 0x0113, 0x25d: 0x0112, + 0x25e: 0x0113, 0x25f: 0x0112, 0x260: 0x0113, 0x261: 0x0112, 0x262: 0x0113, 0x263: 0x0112, + 0x264: 0x0113, 0x265: 0x0112, 0x266: 0x0113, 0x267: 0x0112, 0x268: 0x0113, 0x269: 0x0112, + 0x26a: 0x0113, 0x26b: 0x0112, 0x26c: 0x0113, 0x26d: 0x0112, 0x26e: 0x0113, 0x26f: 0x0112, + 0x270: 0x188a, 0x271: 0x190a, 0x272: 0x0b12, 0x273: 0x5352, 0x274: 0x6253, 0x275: 0x198a, + 0x277: 0x0f13, 0x278: 0x0f12, 0x279: 0x0b13, 0x27a: 0x0113, 0x27b: 0x0112, + 0x27c: 0x0012, 0x27d: 0x4d53, 0x27e: 0x5053, 0x27f: 0x5053, + // Block 0xa, offset 0x280 + 0x280: 0x6852, 0x281: 0x6852, 0x282: 0x6852, 0x283: 0x6852, 0x284: 0x6852, 0x285: 0x6852, + 0x286: 0x6852, 0x287: 0x1a0a, 0x288: 0x0012, + 0x291: 0x0034, + 0x292: 0x0024, 0x293: 0x0024, 0x294: 0x0024, 0x295: 0x0024, 0x296: 0x0034, 0x297: 0x0024, + 0x298: 0x0024, 0x299: 0x0024, 0x29a: 0x0034, 0x29b: 0x0034, 0x29c: 0x0024, 0x29d: 0x0024, + 0x29e: 0x0024, 0x29f: 0x0024, 0x2a0: 0x0024, 0x2a1: 0x0024, 0x2a2: 0x0034, 0x2a3: 0x0034, + 0x2a4: 0x0034, 0x2a5: 0x0034, 0x2a6: 0x0034, 0x2a7: 0x0034, 0x2a8: 0x0024, 0x2a9: 0x0024, + 0x2aa: 0x0034, 0x2ab: 0x0024, 0x2ac: 0x0024, 0x2ad: 0x0034, 0x2ae: 0x0034, 0x2af: 0x0024, + 0x2b0: 0x0034, 0x2b1: 0x0034, 0x2b2: 0x0034, 0x2b3: 0x0034, 0x2b4: 0x0034, 0x2b5: 0x0034, + 0x2b6: 0x0034, 0x2b7: 0x0034, 0x2b8: 0x0034, 0x2b9: 0x0034, 0x2ba: 0x0034, 0x2bb: 0x0034, + 0x2bc: 0x0034, 0x2bd: 0x0034, 0x2bf: 0x0034, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x7053, 0x2c1: 0x7053, 0x2c2: 0x7053, 0x2c3: 0x7053, 0x2c4: 0x7053, 0x2c5: 0x7053, + 0x2c7: 0x7053, + 0x2cd: 0x7053, 0x2d0: 0x1aea, 0x2d1: 0x1b6a, + 0x2d2: 0x1bea, 0x2d3: 0x1c6a, 0x2d4: 0x1cea, 0x2d5: 0x1d6a, 0x2d6: 0x1dea, 0x2d7: 0x1e6a, + 0x2d8: 0x1eea, 0x2d9: 0x1f6a, 0x2da: 0x1fea, 0x2db: 0x206a, 0x2dc: 0x20ea, 0x2dd: 0x216a, + 0x2de: 0x21ea, 0x2df: 0x226a, 0x2e0: 0x22ea, 0x2e1: 0x236a, 0x2e2: 0x23ea, 0x2e3: 0x246a, + 0x2e4: 0x24ea, 0x2e5: 0x256a, 0x2e6: 0x25ea, 0x2e7: 0x266a, 0x2e8: 0x26ea, 0x2e9: 0x276a, + 0x2ea: 0x27ea, 0x2eb: 0x286a, 0x2ec: 0x28ea, 0x2ed: 0x296a, 0x2ee: 0x29ea, 0x2ef: 0x2a6a, + 0x2f0: 0x2aea, 0x2f1: 0x2b6a, 0x2f2: 0x2bea, 0x2f3: 0x2c6a, 0x2f4: 0x2cea, 0x2f5: 0x2d6a, + 0x2f6: 0x2dea, 0x2f7: 0x2e6a, 0x2f8: 0x2eea, 0x2f9: 0x2f6a, 0x2fa: 0x2fea, + 0x2fc: 0x0014, 0x2fd: 0x306a, 0x2fe: 0x30ea, 0x2ff: 0x316a, + // Block 0xc, offset 0x300 + 0x300: 0x0812, 0x301: 0x0812, 0x302: 0x0812, 0x303: 0x0812, 0x304: 0x0812, 0x305: 0x0812, + 0x308: 0x0813, 0x309: 0x0813, 0x30a: 0x0813, 0x30b: 0x0813, + 0x30c: 0x0813, 0x30d: 0x0813, 0x310: 0x3b1a, 0x311: 0x0812, + 0x312: 0x3bfa, 0x313: 0x0812, 0x314: 0x3d3a, 0x315: 0x0812, 0x316: 0x3e7a, 0x317: 0x0812, + 0x319: 0x0813, 0x31b: 0x0813, 0x31d: 0x0813, + 0x31f: 0x0813, 0x320: 0x0812, 0x321: 0x0812, 0x322: 0x0812, 0x323: 0x0812, + 0x324: 0x0812, 0x325: 0x0812, 0x326: 0x0812, 0x327: 0x0812, 0x328: 0x0813, 0x329: 0x0813, + 0x32a: 0x0813, 0x32b: 0x0813, 0x32c: 0x0813, 0x32d: 0x0813, 0x32e: 0x0813, 0x32f: 0x0813, + 0x330: 0x9252, 0x331: 0x9252, 0x332: 0x9552, 0x333: 0x9552, 0x334: 0x9852, 0x335: 0x9852, + 0x336: 0x9b52, 0x337: 0x9b52, 0x338: 0x9e52, 0x339: 0x9e52, 0x33a: 0xa152, 0x33b: 0xa152, + 0x33c: 0x4d52, 0x33d: 0x4d52, + // Block 0xd, offset 0x340 + 0x340: 0x3fba, 0x341: 0x40aa, 0x342: 0x419a, 0x343: 0x428a, 0x344: 0x437a, 0x345: 0x446a, + 0x346: 0x455a, 0x347: 0x464a, 0x348: 0x4739, 0x349: 0x4829, 0x34a: 0x4919, 0x34b: 0x4a09, + 0x34c: 0x4af9, 0x34d: 0x4be9, 0x34e: 0x4cd9, 0x34f: 0x4dc9, 0x350: 0x4eba, 0x351: 0x4faa, + 0x352: 0x509a, 0x353: 0x518a, 0x354: 0x527a, 0x355: 0x536a, 0x356: 0x545a, 0x357: 0x554a, + 0x358: 0x5639, 0x359: 0x5729, 0x35a: 0x5819, 0x35b: 0x5909, 0x35c: 0x59f9, 0x35d: 0x5ae9, + 0x35e: 0x5bd9, 0x35f: 0x5cc9, 0x360: 0x5dba, 0x361: 0x5eaa, 0x362: 0x5f9a, 0x363: 0x608a, + 0x364: 0x617a, 0x365: 0x626a, 0x366: 0x635a, 0x367: 0x644a, 0x368: 0x6539, 0x369: 0x6629, + 0x36a: 0x6719, 0x36b: 0x6809, 0x36c: 0x68f9, 0x36d: 0x69e9, 0x36e: 0x6ad9, 0x36f: 0x6bc9, + 0x370: 0x0812, 0x371: 0x0812, 0x372: 0x6cba, 0x373: 0x6dca, 0x374: 0x6e9a, + 0x376: 0x6f7a, 0x377: 0x705a, 0x378: 0x0813, 0x379: 0x0813, 0x37a: 0x9253, 0x37b: 0x9253, + 0x37c: 0x7199, 0x37d: 0x0004, 0x37e: 0x726a, 0x37f: 0x0004, + // Block 0xe, offset 0x380 + 0x380: 0x0004, 0x381: 0x0004, 0x382: 0x72ea, 0x383: 0x73fa, 0x384: 0x74ca, + 0x386: 0x75aa, 0x387: 0x768a, 0x388: 0x9553, 0x389: 0x9553, 0x38a: 0x9853, 0x38b: 0x9853, + 0x38c: 0x77c9, 0x38d: 0x0004, 0x38e: 0x0004, 0x38f: 0x0004, 0x390: 0x0812, 0x391: 0x0812, + 0x392: 0x789a, 0x393: 0x79da, 0x396: 0x7b1a, 0x397: 0x7bfa, + 0x398: 0x0813, 0x399: 0x0813, 0x39a: 0x9b53, 0x39b: 0x9b53, 0x39d: 0x0004, + 0x39e: 0x0004, 0x39f: 0x0004, 0x3a0: 0x0812, 0x3a1: 0x0812, 0x3a2: 0x7d3a, 0x3a3: 0x7e7a, + 0x3a4: 0x7fba, 0x3a5: 0x0912, 0x3a6: 0x809a, 0x3a7: 0x817a, 0x3a8: 0x0813, 0x3a9: 0x0813, + 0x3aa: 0xa153, 0x3ab: 0xa153, 0x3ac: 0x0913, 0x3ad: 0x0004, 0x3ae: 0x0004, 0x3af: 0x0004, + 0x3b2: 0x82ba, 0x3b3: 0x83ca, 0x3b4: 0x849a, + 0x3b6: 0x857a, 0x3b7: 0x865a, 0x3b8: 0x9e53, 0x3b9: 0x9e53, 0x3ba: 0x4d53, 0x3bb: 0x4d53, + 0x3bc: 0x8799, 0x3bd: 0x0004, 0x3be: 0x0004, + // Block 0xf, offset 0x3c0 + 0x3c2: 0x0013, + 0x3c7: 0x0013, 0x3ca: 0x0012, 0x3cb: 0x0013, + 0x3cc: 0x0013, 0x3cd: 0x0013, 0x3ce: 0x0012, 0x3cf: 0x0012, 0x3d0: 0x0013, 0x3d1: 0x0013, + 0x3d2: 0x0013, 0x3d3: 0x0012, 0x3d5: 0x0013, + 0x3d9: 0x0013, 0x3da: 0x0013, 0x3db: 0x0013, 0x3dc: 0x0013, 0x3dd: 0x0013, + 0x3e4: 0x0013, 0x3e6: 0x886b, 0x3e8: 0x0013, + 0x3ea: 0x88cb, 0x3eb: 0x890b, 0x3ec: 0x0013, 0x3ed: 0x0013, 0x3ef: 0x0012, + 0x3f0: 0x0013, 0x3f1: 0x0013, 0x3f2: 0xa453, 0x3f3: 0x0013, 0x3f4: 0x0012, 0x3f5: 0x0010, + 0x3f6: 0x0010, 0x3f7: 0x0010, 0x3f8: 0x0010, 0x3f9: 0x0012, + 0x3fc: 0x0012, 0x3fd: 0x0012, 0x3fe: 0x0013, 0x3ff: 0x0013, + // Block 0x10, offset 0x400 + 0x400: 0x1a13, 0x401: 0x1a13, 0x402: 0x1e13, 0x403: 0x1e13, 0x404: 0x1a13, 0x405: 0x1a13, + 0x406: 0x2613, 0x407: 0x2613, 0x408: 0x2a13, 0x409: 0x2a13, 0x40a: 0x2e13, 0x40b: 0x2e13, + 0x40c: 0x2a13, 0x40d: 0x2a13, 0x40e: 0x2613, 0x40f: 0x2613, 0x410: 0xa752, 0x411: 0xa752, + 0x412: 0xaa52, 0x413: 0xaa52, 0x414: 0xad52, 0x415: 0xad52, 0x416: 0xaa52, 0x417: 0xaa52, + 0x418: 0xa752, 0x419: 0xa752, 0x41a: 0x1a12, 0x41b: 0x1a12, 0x41c: 0x1e12, 0x41d: 0x1e12, + 0x41e: 0x1a12, 0x41f: 0x1a12, 0x420: 0x2612, 0x421: 0x2612, 0x422: 0x2a12, 0x423: 0x2a12, + 0x424: 0x2e12, 0x425: 0x2e12, 0x426: 0x2a12, 0x427: 0x2a12, 0x428: 0x2612, 0x429: 0x2612, + // Block 0x11, offset 0x440 + 0x440: 0x6552, 0x441: 0x6552, 0x442: 0x6552, 0x443: 0x6552, 0x444: 0x6552, 0x445: 0x6552, + 0x446: 0x6552, 0x447: 0x6552, 0x448: 0x6552, 0x449: 0x6552, 0x44a: 0x6552, 0x44b: 0x6552, + 0x44c: 0x6552, 0x44d: 0x6552, 0x44e: 0x6552, 0x44f: 0x6552, 0x450: 0xb052, 0x451: 0xb052, + 0x452: 0xb052, 0x453: 0xb052, 0x454: 0xb052, 0x455: 0xb052, 0x456: 0xb052, 0x457: 0xb052, + 0x458: 0xb052, 0x459: 0xb052, 0x45a: 0xb052, 0x45b: 0xb052, 0x45c: 0xb052, 0x45d: 0xb052, + 0x45e: 0xb052, 0x460: 0x0113, 0x461: 0x0112, 0x462: 0x896b, 0x463: 0x8b53, + 0x464: 0x89cb, 0x465: 0x8a2a, 0x466: 0x8a8a, 0x467: 0x0f13, 0x468: 0x0f12, 0x469: 0x0313, + 0x46a: 0x0312, 0x46b: 0x0713, 0x46c: 0x0712, 0x46d: 0x8aeb, 0x46e: 0x8b4b, 0x46f: 0x8bab, + 0x470: 0x8c0b, 0x471: 0x0012, 0x472: 0x0113, 0x473: 0x0112, 0x474: 0x0012, 0x475: 0x0313, + 0x476: 0x0312, 0x477: 0x0012, 0x478: 0x0012, 0x479: 0x0012, 0x47a: 0x0012, 0x47b: 0x0012, + 0x47c: 0x0015, 0x47d: 0x0015, 0x47e: 0x8c6b, 0x47f: 0x8ccb, + // Block 0x12, offset 0x480 + 0x480: 0x0113, 0x481: 0x0112, 0x482: 0x0113, 0x483: 0x0112, 0x484: 0x0113, 0x485: 0x0112, + 0x486: 0x0113, 0x487: 0x0112, 0x488: 0x0014, 0x489: 0x0014, 0x48a: 0x0014, 0x48b: 0x0713, + 0x48c: 0x0712, 0x48d: 0x8d2b, 0x48e: 0x0012, 0x48f: 0x0010, 0x490: 0x0113, 0x491: 0x0112, + 0x492: 0x0113, 0x493: 0x0112, 0x494: 0x6552, 0x495: 0x0012, 0x496: 0x0113, 0x497: 0x0112, + 0x498: 0x0113, 0x499: 0x0112, 0x49a: 0x0113, 0x49b: 0x0112, 0x49c: 0x0113, 0x49d: 0x0112, + 0x49e: 0x0113, 0x49f: 0x0112, 0x4a0: 0x0113, 0x4a1: 0x0112, 0x4a2: 0x0113, 0x4a3: 0x0112, + 0x4a4: 0x0113, 0x4a5: 0x0112, 0x4a6: 0x0113, 0x4a7: 0x0112, 0x4a8: 0x0113, 0x4a9: 0x0112, + 0x4aa: 0x8d8b, 0x4ab: 0x8deb, 0x4ac: 0x8e4b, 0x4ad: 0x8eab, 0x4ae: 0x8f0b, 0x4af: 0x0012, + 0x4b0: 0x8f6b, 0x4b1: 0x8fcb, 0x4b2: 0x902b, 0x4b3: 0xb353, 0x4b4: 0x0113, 0x4b5: 0x0112, + 0x4b6: 0x0113, 0x4b7: 0x0112, 0x4b8: 0x0113, 0x4b9: 0x0112, 0x4ba: 0x0113, 0x4bb: 0x0112, + 0x4bc: 0x0113, 0x4bd: 0x0112, 0x4be: 0x0113, 0x4bf: 0x0112, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x90ea, 0x4c1: 0x916a, 0x4c2: 0x91ea, 0x4c3: 0x926a, 0x4c4: 0x931a, 0x4c5: 0x93ca, + 0x4c6: 0x944a, + 0x4d3: 0x94ca, 0x4d4: 0x95aa, 0x4d5: 0x968a, 0x4d6: 0x976a, 0x4d7: 0x984a, + 0x4dd: 0x0010, + 0x4de: 0x0034, 0x4df: 0x0010, 0x4e0: 0x0010, 0x4e1: 0x0010, 0x4e2: 0x0010, 0x4e3: 0x0010, + 0x4e4: 0x0010, 0x4e5: 0x0010, 0x4e6: 0x0010, 0x4e7: 0x0010, 0x4e8: 0x0010, + 0x4ea: 0x0010, 0x4eb: 0x0010, 0x4ec: 0x0010, 0x4ed: 0x0010, 0x4ee: 0x0010, 0x4ef: 0x0010, + 0x4f0: 0x0010, 0x4f1: 0x0010, 0x4f2: 0x0010, 0x4f3: 0x0010, 0x4f4: 0x0010, 0x4f5: 0x0010, + 0x4f6: 0x0010, 0x4f8: 0x0010, 0x4f9: 0x0010, 0x4fa: 0x0010, 0x4fb: 0x0010, + 0x4fc: 0x0010, 0x4fe: 0x0010, + // Block 0x14, offset 0x500 + 0x500: 0x2213, 0x501: 0x2213, 0x502: 0x2613, 0x503: 0x2613, 0x504: 0x2213, 0x505: 0x2213, + 0x506: 0x2e13, 0x507: 0x2e13, 0x508: 0x2213, 0x509: 0x2213, 0x50a: 0x2613, 0x50b: 0x2613, + 0x50c: 0x2213, 0x50d: 0x2213, 0x50e: 0x3e13, 0x50f: 0x3e13, 0x510: 0x2213, 0x511: 0x2213, + 0x512: 0x2613, 0x513: 0x2613, 0x514: 0x2213, 0x515: 0x2213, 0x516: 0x2e13, 0x517: 0x2e13, + 0x518: 0x2213, 0x519: 0x2213, 0x51a: 0x2613, 0x51b: 0x2613, 0x51c: 0x2213, 0x51d: 0x2213, + 0x51e: 0xbc53, 0x51f: 0xbc53, 0x520: 0xbf53, 0x521: 0xbf53, 0x522: 0x2212, 0x523: 0x2212, + 0x524: 0x2612, 0x525: 0x2612, 0x526: 0x2212, 0x527: 0x2212, 0x528: 0x2e12, 0x529: 0x2e12, + 0x52a: 0x2212, 0x52b: 0x2212, 0x52c: 0x2612, 0x52d: 0x2612, 0x52e: 0x2212, 0x52f: 0x2212, + 0x530: 0x3e12, 0x531: 0x3e12, 0x532: 0x2212, 0x533: 0x2212, 0x534: 0x2612, 0x535: 0x2612, + 0x536: 0x2212, 0x537: 0x2212, 0x538: 0x2e12, 0x539: 0x2e12, 0x53a: 0x2212, 0x53b: 0x2212, + 0x53c: 0x2612, 0x53d: 0x2612, 0x53e: 0x2212, 0x53f: 0x2212, + // Block 0x15, offset 0x540 + 0x542: 0x0010, + 0x547: 0x0010, 0x549: 0x0010, 0x54b: 0x0010, + 0x54d: 0x0010, 0x54e: 0x0010, 0x54f: 0x0010, 0x551: 0x0010, + 0x552: 0x0010, 0x554: 0x0010, 0x557: 0x0010, + 0x559: 0x0010, 0x55b: 0x0010, 0x55d: 0x0010, + 0x55f: 0x0010, 0x561: 0x0010, 0x562: 0x0010, + 0x564: 0x0010, 0x567: 0x0010, 0x568: 0x0010, 0x569: 0x0010, + 0x56a: 0x0010, 0x56c: 0x0010, 0x56d: 0x0010, 0x56e: 0x0010, 0x56f: 0x0010, + 0x570: 0x0010, 0x571: 0x0010, 0x572: 0x0010, 0x574: 0x0010, 0x575: 0x0010, + 0x576: 0x0010, 0x577: 0x0010, 0x579: 0x0010, 0x57a: 0x0010, 0x57b: 0x0010, + 0x57c: 0x0010, 0x57e: 0x0010, +} + +// caseIndex: 25 blocks, 1600 entries, 3200 bytes +// Block 0 is the zero block. +var caseIndex = [1600]uint16{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x14, 0xc3: 0x15, 0xc4: 0x16, 0xc5: 0x17, 0xc6: 0x01, 0xc7: 0x02, + 0xc8: 0x18, 0xc9: 0x03, 0xca: 0x04, 0xcb: 0x19, 0xcc: 0x1a, 0xcd: 0x05, 0xce: 0x06, 0xcf: 0x07, + 0xd0: 0x1b, 0xd1: 0x1c, 0xd2: 0x1d, 0xd3: 0x1e, 0xd4: 0x1f, 0xd5: 0x20, 0xd6: 0x08, 0xd7: 0x21, + 0xd8: 0x22, 0xd9: 0x23, 0xda: 0x24, 0xdb: 0x25, 0xdc: 0x26, 0xdd: 0x27, 0xde: 0x28, 0xdf: 0x29, + 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05, + 0xea: 0x06, 0xeb: 0x07, 0xec: 0x07, 0xed: 0x08, 0xef: 0x09, + 0xf0: 0x14, 0xf3: 0x16, + // Block 0x4, offset 0x100 + 0x120: 0x2a, 0x121: 0x2b, 0x122: 0x2c, 0x123: 0x2d, 0x124: 0x2e, 0x125: 0x2f, 0x126: 0x30, 0x127: 0x31, + 0x128: 0x32, 0x129: 0x33, 0x12a: 0x34, 0x12b: 0x35, 0x12c: 0x36, 0x12d: 0x37, 0x12e: 0x38, 0x12f: 0x39, + 0x130: 0x3a, 0x131: 0x3b, 0x132: 0x3c, 0x133: 0x3d, 0x134: 0x3e, 0x135: 0x3f, 0x136: 0x40, 0x137: 0x41, + 0x138: 0x42, 0x139: 0x43, 0x13a: 0x44, 0x13b: 0x45, 0x13c: 0x46, 0x13d: 0x47, 0x13e: 0x48, 0x13f: 0x49, + // Block 0x5, offset 0x140 + 0x140: 0x4a, 0x141: 0x4b, 0x142: 0x4c, 0x143: 0x09, 0x144: 0x24, 0x145: 0x24, 0x146: 0x24, 0x147: 0x24, + 0x148: 0x24, 0x149: 0x4d, 0x14a: 0x4e, 0x14b: 0x4f, 0x14c: 0x50, 0x14d: 0x51, 0x14e: 0x52, 0x14f: 0x53, + 0x150: 0x54, 0x151: 0x24, 0x152: 0x24, 0x153: 0x24, 0x154: 0x24, 0x155: 0x24, 0x156: 0x24, 0x157: 0x24, + 0x158: 0x24, 0x159: 0x55, 0x15a: 0x56, 0x15b: 0x57, 0x15c: 0x58, 0x15d: 0x59, 0x15e: 0x5a, 0x15f: 0x5b, + 0x160: 0x5c, 0x161: 0x5d, 0x162: 0x5e, 0x163: 0x5f, 0x164: 0x60, 0x165: 0x61, 0x167: 0x62, + 0x168: 0x63, 0x169: 0x64, 0x16a: 0x65, 0x16c: 0x66, 0x16d: 0x67, 0x16e: 0x68, 0x16f: 0x69, + 0x170: 0x6a, 0x171: 0x6b, 0x172: 0x6c, 0x173: 0x6d, 0x174: 0x6e, 0x175: 0x6f, 0x176: 0x70, 0x177: 0x71, + 0x178: 0x72, 0x179: 0x72, 0x17a: 0x73, 0x17b: 0x72, 0x17c: 0x74, 0x17d: 0x0a, 0x17e: 0x0b, 0x17f: 0x0c, + // Block 0x6, offset 0x180 + 0x180: 0x75, 0x181: 0x76, 0x182: 0x77, 0x183: 0x78, 0x184: 0x0d, 0x185: 0x79, 0x186: 0x7a, + 0x192: 0x7b, 0x193: 0x0e, + 0x1b0: 0x7c, 0x1b1: 0x0f, 0x1b2: 0x72, 0x1b3: 0x7d, 0x1b4: 0x7e, 0x1b5: 0x7f, 0x1b6: 0x80, 0x1b7: 0x81, + 0x1b8: 0x82, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x83, 0x1c2: 0x84, 0x1c3: 0x85, 0x1c4: 0x86, 0x1c5: 0x24, 0x1c6: 0x87, + // Block 0x8, offset 0x200 + 0x200: 0x88, 0x201: 0x24, 0x202: 0x24, 0x203: 0x24, 0x204: 0x24, 0x205: 0x24, 0x206: 0x24, 0x207: 0x24, + 0x208: 0x24, 0x209: 0x24, 0x20a: 0x24, 0x20b: 0x24, 0x20c: 0x24, 0x20d: 0x24, 0x20e: 0x24, 0x20f: 0x24, + 0x210: 0x24, 0x211: 0x24, 0x212: 0x89, 0x213: 0x8a, 0x214: 0x24, 0x215: 0x24, 0x216: 0x24, 0x217: 0x24, + 0x218: 0x8b, 0x219: 0x8c, 0x21a: 0x8d, 0x21b: 0x8e, 0x21c: 0x8f, 0x21d: 0x90, 0x21e: 0x10, 0x21f: 0x91, + 0x220: 0x92, 0x221: 0x93, 0x222: 0x24, 0x223: 0x94, 0x224: 0x95, 0x225: 0x96, 0x226: 0x97, 0x227: 0x98, + 0x228: 0x99, 0x229: 0x9a, 0x22a: 0x9b, 0x22b: 0x9c, 0x22c: 0x9d, 0x22d: 0x9e, 0x22e: 0x9f, 0x22f: 0xa0, + 0x230: 0x24, 0x231: 0x24, 0x232: 0x24, 0x233: 0x24, 0x234: 0x24, 0x235: 0x24, 0x236: 0x24, 0x237: 0x24, + 0x238: 0x24, 0x239: 0x24, 0x23a: 0x24, 0x23b: 0x24, 0x23c: 0x24, 0x23d: 0x24, 0x23e: 0x24, 0x23f: 0x24, + // Block 0x9, offset 0x240 + 0x240: 0x24, 0x241: 0x24, 0x242: 0x24, 0x243: 0x24, 0x244: 0x24, 0x245: 0x24, 0x246: 0x24, 0x247: 0x24, + 0x248: 0x24, 0x249: 0x24, 0x24a: 0x24, 0x24b: 0x24, 0x24c: 0x24, 0x24d: 0x24, 0x24e: 0x24, 0x24f: 0x24, + 0x250: 0x24, 0x251: 0x24, 0x252: 0x24, 0x253: 0x24, 0x254: 0x24, 0x255: 0x24, 0x256: 0x24, 0x257: 0x24, + 0x258: 0x24, 0x259: 0x24, 0x25a: 0x24, 0x25b: 0x24, 0x25c: 0x24, 0x25d: 0x24, 0x25e: 0x24, 0x25f: 0x24, + 0x260: 0x24, 0x261: 0x24, 0x262: 0x24, 0x263: 0x24, 0x264: 0x24, 0x265: 0x24, 0x266: 0x24, 0x267: 0x24, + 0x268: 0x24, 0x269: 0x24, 0x26a: 0x24, 0x26b: 0x24, 0x26c: 0x24, 0x26d: 0x24, 0x26e: 0x24, 0x26f: 0x24, + 0x270: 0x24, 0x271: 0x24, 0x272: 0x24, 0x273: 0x24, 0x274: 0x24, 0x275: 0x24, 0x276: 0x24, 0x277: 0x24, + 0x278: 0x24, 0x279: 0x24, 0x27a: 0x24, 0x27b: 0x24, 0x27c: 0x24, 0x27d: 0x24, 0x27e: 0x24, 0x27f: 0x24, + // Block 0xa, offset 0x280 + 0x280: 0x24, 0x281: 0x24, 0x282: 0x24, 0x283: 0x24, 0x284: 0x24, 0x285: 0x24, 0x286: 0x24, 0x287: 0x24, + 0x288: 0x24, 0x289: 0x24, 0x28a: 0x24, 0x28b: 0x24, 0x28c: 0x24, 0x28d: 0x24, 0x28e: 0x24, 0x28f: 0x24, + 0x290: 0x24, 0x291: 0x24, 0x292: 0x24, 0x293: 0x24, 0x294: 0x24, 0x295: 0x24, 0x296: 0x24, 0x297: 0x24, + 0x298: 0x24, 0x299: 0x24, 0x29a: 0x24, 0x29b: 0x24, 0x29c: 0x24, 0x29d: 0x24, 0x29e: 0xa1, 0x29f: 0xa2, + // Block 0xb, offset 0x2c0 + 0x2ec: 0x11, 0x2ed: 0xa3, 0x2ee: 0xa4, 0x2ef: 0xa5, + 0x2f0: 0x24, 0x2f1: 0x24, 0x2f2: 0x24, 0x2f3: 0x24, 0x2f4: 0xa6, 0x2f5: 0xa7, 0x2f6: 0xa8, 0x2f7: 0xa9, + 0x2f8: 0xaa, 0x2f9: 0xab, 0x2fa: 0x24, 0x2fb: 0xac, 0x2fc: 0xad, 0x2fd: 0xae, 0x2fe: 0xaf, 0x2ff: 0xb0, + // Block 0xc, offset 0x300 + 0x300: 0xb1, 0x301: 0xb2, 0x302: 0x24, 0x303: 0xb3, 0x305: 0xb4, 0x307: 0xb5, + 0x30a: 0xb6, 0x30b: 0xb7, 0x30c: 0xb8, 0x30d: 0xb9, 0x30e: 0xba, 0x30f: 0xbb, + 0x310: 0xbc, 0x311: 0xbd, 0x312: 0xbe, 0x313: 0xbf, 0x314: 0xc0, 0x315: 0xc1, + 0x318: 0x24, 0x319: 0x24, 0x31a: 0x24, 0x31b: 0x24, 0x31c: 0xc2, 0x31d: 0xc3, + 0x320: 0xc4, 0x321: 0xc5, 0x322: 0xc6, 0x323: 0xc7, 0x324: 0xc8, 0x326: 0xc9, + 0x328: 0xca, 0x329: 0xcb, 0x32a: 0xcc, 0x32b: 0xcd, 0x32c: 0x5f, 0x32d: 0xce, 0x32e: 0xcf, + 0x330: 0x24, 0x331: 0xd0, 0x332: 0xd1, 0x333: 0xd2, 0x334: 0xd3, + 0x33c: 0xd4, 0x33d: 0xd5, 0x33f: 0xd6, + // Block 0xd, offset 0x340 + 0x340: 0xd7, 0x341: 0xd8, 0x342: 0xd9, 0x343: 0xda, 0x344: 0xdb, 0x345: 0xdc, 0x346: 0xdd, 0x347: 0xde, + 0x348: 0xdf, 0x34a: 0xe0, 0x34b: 0xe1, 0x34c: 0xe2, 0x34d: 0xe3, + 0x350: 0xe4, 0x351: 0xe5, 0x352: 0xe6, 0x353: 0xe7, 0x356: 0xe8, 0x357: 0xe9, + 0x358: 0xea, 0x359: 0xeb, 0x35a: 0xec, 0x35b: 0xed, 0x35c: 0xee, + 0x360: 0xef, 0x362: 0xf0, 0x363: 0xf1, 0x366: 0xf2, 0x367: 0xf3, + 0x368: 0xf4, 0x369: 0xf5, 0x36a: 0xf6, 0x36b: 0xf7, + 0x370: 0xf8, 0x371: 0xf9, 0x372: 0xfa, 0x374: 0xfb, 0x375: 0xfc, 0x376: 0xfd, + 0x37b: 0xfe, + // Block 0xe, offset 0x380 + 0x380: 0x24, 0x381: 0x24, 0x382: 0x24, 0x383: 0x24, 0x384: 0x24, 0x385: 0x24, 0x386: 0x24, 0x387: 0x24, + 0x388: 0x24, 0x389: 0x24, 0x38a: 0x24, 0x38b: 0x24, 0x38c: 0x24, 0x38d: 0x24, 0x38e: 0xff, + 0x390: 0x24, 0x391: 0x100, 0x392: 0x24, 0x393: 0x24, 0x394: 0x24, 0x395: 0x101, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x24, 0x3c1: 0x24, 0x3c2: 0x24, 0x3c3: 0x24, 0x3c4: 0x24, 0x3c5: 0x24, 0x3c6: 0x24, 0x3c7: 0x24, + 0x3c8: 0x24, 0x3c9: 0x24, 0x3ca: 0x24, 0x3cb: 0x24, 0x3cc: 0x24, 0x3cd: 0x24, 0x3ce: 0x24, 0x3cf: 0x24, + 0x3d0: 0x102, + // Block 0x10, offset 0x400 + 0x410: 0x24, 0x411: 0x24, 0x412: 0x24, 0x413: 0x24, 0x414: 0x24, 0x415: 0x24, 0x416: 0x24, 0x417: 0x24, + 0x418: 0x24, 0x419: 0x103, + // Block 0x11, offset 0x440 + 0x460: 0x24, 0x461: 0x24, 0x462: 0x24, 0x463: 0x24, 0x464: 0x24, 0x465: 0x24, 0x466: 0x24, 0x467: 0x24, + 0x468: 0xf7, 0x469: 0x104, 0x46b: 0x105, 0x46c: 0x106, 0x46d: 0x107, 0x46e: 0x108, + 0x479: 0x109, 0x47c: 0x24, 0x47d: 0x10a, 0x47e: 0x10b, 0x47f: 0x10c, + // Block 0x12, offset 0x480 + 0x4b0: 0x24, 0x4b1: 0x10d, 0x4b2: 0x10e, + // Block 0x13, offset 0x4c0 + 0x4c5: 0x10f, 0x4c6: 0x110, + 0x4c9: 0x111, + 0x4d0: 0x112, 0x4d1: 0x113, 0x4d2: 0x114, 0x4d3: 0x115, 0x4d4: 0x116, 0x4d5: 0x117, 0x4d6: 0x118, 0x4d7: 0x119, + 0x4d8: 0x11a, 0x4d9: 0x11b, 0x4da: 0x11c, 0x4db: 0x11d, 0x4dc: 0x11e, 0x4dd: 0x11f, 0x4de: 0x120, 0x4df: 0x121, + 0x4e8: 0x122, 0x4e9: 0x123, 0x4ea: 0x124, + // Block 0x14, offset 0x500 + 0x500: 0x125, 0x504: 0x126, 0x505: 0x127, + 0x50b: 0x128, + 0x520: 0x24, 0x521: 0x24, 0x522: 0x24, 0x523: 0x129, 0x524: 0x12, 0x525: 0x12a, + 0x538: 0x12b, 0x539: 0x13, 0x53a: 0x12c, + // Block 0x15, offset 0x540 + 0x544: 0x12d, 0x545: 0x12e, 0x546: 0x12f, + 0x54f: 0x130, + // Block 0x16, offset 0x580 + 0x590: 0x0a, 0x591: 0x0b, 0x592: 0x0c, 0x593: 0x0d, 0x594: 0x0e, 0x596: 0x0f, + 0x59b: 0x10, 0x59d: 0x11, 0x59e: 0x12, 0x59f: 0x13, + // Block 0x17, offset 0x5c0 + 0x5c0: 0x131, 0x5c1: 0x132, 0x5c4: 0x132, 0x5c5: 0x132, 0x5c6: 0x132, 0x5c7: 0x133, + // Block 0x18, offset 0x600 + 0x620: 0x15, +} + +// sparseOffsets: 289 entries, 578 bytes +var sparseOffsets = []uint16{0x0, 0x9, 0xf, 0x18, 0x24, 0x2e, 0x35, 0x38, 0x3c, 0x3f, 0x43, 0x4d, 0x4f, 0x57, 0x5e, 0x63, 0x71, 0x72, 0x80, 0x8f, 0x99, 0x9c, 0xa3, 0xab, 0xae, 0xb0, 0xbf, 0xc5, 0xd3, 0xde, 0xeb, 0xf6, 0x102, 0x10c, 0x118, 0x123, 0x12f, 0x13b, 0x143, 0x14c, 0x156, 0x161, 0x16d, 0x174, 0x17f, 0x184, 0x18c, 0x18f, 0x194, 0x198, 0x19c, 0x1a3, 0x1ac, 0x1b4, 0x1b5, 0x1be, 0x1c5, 0x1cd, 0x1d3, 0x1d8, 0x1dc, 0x1df, 0x1e1, 0x1e4, 0x1e9, 0x1ea, 0x1ec, 0x1ee, 0x1f0, 0x1f7, 0x1fc, 0x200, 0x209, 0x20c, 0x20f, 0x215, 0x216, 0x221, 0x222, 0x223, 0x228, 0x235, 0x23d, 0x245, 0x24e, 0x257, 0x260, 0x265, 0x268, 0x273, 0x281, 0x283, 0x28a, 0x28e, 0x29a, 0x29b, 0x2a6, 0x2ae, 0x2b6, 0x2bc, 0x2bd, 0x2cb, 0x2d0, 0x2d3, 0x2d8, 0x2dc, 0x2e2, 0x2e7, 0x2ea, 0x2ef, 0x2f4, 0x2f5, 0x2fb, 0x2fd, 0x2fe, 0x300, 0x302, 0x305, 0x306, 0x308, 0x30b, 0x311, 0x315, 0x317, 0x31c, 0x323, 0x32b, 0x334, 0x335, 0x33e, 0x342, 0x347, 0x34f, 0x355, 0x35b, 0x365, 0x36a, 0x373, 0x379, 0x380, 0x384, 0x38c, 0x38e, 0x390, 0x393, 0x395, 0x397, 0x398, 0x399, 0x39b, 0x39d, 0x3a3, 0x3a8, 0x3aa, 0x3b1, 0x3b4, 0x3b6, 0x3bc, 0x3c1, 0x3c3, 0x3c4, 0x3c5, 0x3c6, 0x3c8, 0x3ca, 0x3cc, 0x3cf, 0x3d1, 0x3d4, 0x3dc, 0x3df, 0x3e3, 0x3eb, 0x3ed, 0x3ee, 0x3ef, 0x3f1, 0x3f7, 0x3f9, 0x3fa, 0x3fc, 0x3fe, 0x400, 0x40d, 0x40e, 0x40f, 0x413, 0x415, 0x416, 0x417, 0x418, 0x419, 0x41c, 0x41f, 0x425, 0x426, 0x42a, 0x42e, 0x434, 0x437, 0x43e, 0x442, 0x446, 0x44d, 0x456, 0x45c, 0x462, 0x46c, 0x476, 0x478, 0x481, 0x487, 0x48d, 0x493, 0x496, 0x49c, 0x49f, 0x4a8, 0x4a9, 0x4b0, 0x4b4, 0x4b5, 0x4b8, 0x4ba, 0x4c1, 0x4c9, 0x4cf, 0x4d5, 0x4d6, 0x4dc, 0x4df, 0x4e7, 0x4ee, 0x4f8, 0x500, 0x503, 0x504, 0x505, 0x506, 0x508, 0x509, 0x50b, 0x50d, 0x50f, 0x513, 0x514, 0x516, 0x519, 0x51b, 0x51d, 0x51f, 0x524, 0x529, 0x52d, 0x52e, 0x531, 0x535, 0x540, 0x544, 0x54c, 0x551, 0x555, 0x558, 0x55c, 0x55f, 0x562, 0x567, 0x56b, 0x56f, 0x573, 0x577, 0x579, 0x57b, 0x57e, 0x583, 0x586, 0x588, 0x58b, 0x58d, 0x593, 0x59c, 0x5a1, 0x5a2, 0x5a5, 0x5a6, 0x5a7, 0x5a9, 0x5aa, 0x5ab} + +// sparseValues: 1451 entries, 5804 bytes +var sparseValues = [1451]valueRange{ + // Block 0x0, offset 0x0 + {value: 0x0004, lo: 0xa8, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xaa}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0004, lo: 0xaf, hi: 0xaf}, + {value: 0x0004, lo: 0xb4, hi: 0xb4}, + {value: 0x001a, lo: 0xb5, hi: 0xb5}, + {value: 0x0054, lo: 0xb7, hi: 0xb7}, + {value: 0x0004, lo: 0xb8, hi: 0xb8}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + // Block 0x1, offset 0x9 + {value: 0x2013, lo: 0x80, hi: 0x96}, + {value: 0x2013, lo: 0x98, hi: 0x9e}, + {value: 0x009a, lo: 0x9f, hi: 0x9f}, + {value: 0x2012, lo: 0xa0, hi: 0xb6}, + {value: 0x2012, lo: 0xb8, hi: 0xbe}, + {value: 0x0252, lo: 0xbf, hi: 0xbf}, + // Block 0x2, offset 0xf + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x011b, lo: 0xb0, hi: 0xb0}, + {value: 0x019a, lo: 0xb1, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb7}, + {value: 0x0012, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x0553, lo: 0xbf, hi: 0xbf}, + // Block 0x3, offset 0x18 + {value: 0x0552, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x01da, lo: 0x89, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xb7}, + {value: 0x0253, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x028a, lo: 0xbf, hi: 0xbf}, + // Block 0x4, offset 0x24 + {value: 0x0117, lo: 0x80, hi: 0x9f}, + {value: 0x2f53, lo: 0xa0, hi: 0xa0}, + {value: 0x0012, lo: 0xa1, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xb3}, + {value: 0x0012, lo: 0xb4, hi: 0xb9}, + {value: 0x090b, lo: 0xba, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x2953, lo: 0xbd, hi: 0xbd}, + {value: 0x098b, lo: 0xbe, hi: 0xbe}, + {value: 0x0a0a, lo: 0xbf, hi: 0xbf}, + // Block 0x5, offset 0x2e + {value: 0x0015, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x97}, + {value: 0x0004, lo: 0x98, hi: 0x9d}, + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0015, lo: 0xa0, hi: 0xa4}, + {value: 0x0004, lo: 0xa5, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xbf}, + // Block 0x6, offset 0x35 + {value: 0x0024, lo: 0x80, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbf}, + // Block 0x7, offset 0x38 + {value: 0x6553, lo: 0x80, hi: 0x8f}, + {value: 0x2013, lo: 0x90, hi: 0x9f}, + {value: 0x5f53, lo: 0xa0, hi: 0xaf}, + {value: 0x2012, lo: 0xb0, hi: 0xbf}, + // Block 0x8, offset 0x3c + {value: 0x5f52, lo: 0x80, hi: 0x8f}, + {value: 0x6552, lo: 0x90, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x9, offset 0x3f + {value: 0x0117, lo: 0x80, hi: 0x81}, + {value: 0x0024, lo: 0x83, hi: 0x87}, + {value: 0x0014, lo: 0x88, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xbf}, + // Block 0xa, offset 0x43 + {value: 0x0f13, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x0316, lo: 0x89, hi: 0x8a}, + {value: 0x0716, lo: 0x8b, hi: 0x8c}, + {value: 0x0316, lo: 0x8d, hi: 0x8e}, + {value: 0x0f12, lo: 0x8f, hi: 0x8f}, + {value: 0x0117, lo: 0x90, hi: 0xbf}, + // Block 0xb, offset 0x4d + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x6553, lo: 0xb1, hi: 0xbf}, + // Block 0xc, offset 0x4f + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6853, lo: 0x90, hi: 0x96}, + {value: 0x0014, lo: 0x99, hi: 0x99}, + {value: 0x0010, lo: 0x9b, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0012, lo: 0xa0, hi: 0xa0}, + {value: 0x6552, lo: 0xa1, hi: 0xaf}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0xd, offset 0x57 + {value: 0x0034, lo: 0x81, hi: 0x82}, + {value: 0x0024, lo: 0x84, hi: 0x84}, + {value: 0x0034, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0xaa}, + {value: 0x0010, lo: 0xaf, hi: 0xb3}, + {value: 0x0054, lo: 0xb4, hi: 0xb4}, + // Block 0xe, offset 0x5e + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0024, lo: 0x90, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0014, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xf, offset 0x63 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x8a}, + {value: 0x0034, lo: 0x8b, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9c}, + {value: 0x0024, lo: 0x9d, hi: 0x9e}, + {value: 0x0034, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0034, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x10, offset 0x71 + {value: 0x0010, lo: 0x80, hi: 0xbf}, + // Block 0x11, offset 0x72 + {value: 0x0010, lo: 0x80, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0024, lo: 0x9f, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xaa, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x12, offset 0x80 + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0034, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0034, lo: 0xb1, hi: 0xb1}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbd}, + {value: 0x0034, lo: 0xbe, hi: 0xbe}, + {value: 0x0024, lo: 0xbf, hi: 0xbf}, + // Block 0x13, offset 0x8f + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0024, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x88}, + {value: 0x0024, lo: 0x89, hi: 0x8a}, + {value: 0x0010, lo: 0x8d, hi: 0xbf}, + // Block 0x14, offset 0x99 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x15, offset 0x9c + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xb1}, + {value: 0x0034, lo: 0xb2, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + // Block 0x16, offset 0xa3 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x99}, + {value: 0x0014, lo: 0x9a, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0xa3}, + {value: 0x0014, lo: 0xa4, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xad}, + // Block 0x17, offset 0xab + {value: 0x0010, lo: 0x80, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x9b}, + {value: 0x0010, lo: 0xa0, hi: 0xaa}, + // Block 0x18, offset 0xae + {value: 0x0010, lo: 0xa0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbd}, + // Block 0x19, offset 0xb0 + {value: 0x0034, lo: 0x93, hi: 0x93}, + {value: 0x0024, lo: 0x94, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0024, lo: 0xaa, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbf}, + // Block 0x1a, offset 0xbf + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1b, offset 0xc5 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x98, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0x1c, offset 0xd3 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb6, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1d, offset 0xde + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xb1}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + {value: 0x0024, lo: 0xbe, hi: 0xbe}, + // Block 0x1e, offset 0xeb + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x1f, offset 0xf6 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x99, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x20, offset 0x102 + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x21, offset 0x10c + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x85}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xbf}, + // Block 0x22, offset 0x118 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x23, offset 0x123 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x24, offset 0x12f + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0010, lo: 0xa8, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x25, offset 0x13b + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x26, offset 0x143 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x83}, + {value: 0x0014, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb9}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbf}, + // Block 0x27, offset 0x14c + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x88}, + {value: 0x0014, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x28, offset 0x156 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x29, offset 0x161 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb2}, + // Block 0x2a, offset 0x16d + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x2b, offset 0x174 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x94, hi: 0x97}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xba, hi: 0xbf}, + // Block 0x2c, offset 0x17f + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x96}, + {value: 0x0010, lo: 0x9a, hi: 0xb1}, + {value: 0x0010, lo: 0xb3, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x2d, offset 0x184 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x94}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9f}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + // Block 0x2e, offset 0x18c + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xba}, + // Block 0x2f, offset 0x18f + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x30, offset 0x194 + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xba}, + {value: 0x0014, lo: 0xbb, hi: 0xbc}, + // Block 0x31, offset 0x198 + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x32, offset 0x19c + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0034, lo: 0x98, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0034, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x33, offset 0x1a3 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xac}, + {value: 0x0034, lo: 0xb1, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xba, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x34, offset 0x1ac + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0024, lo: 0x82, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x86, hi: 0x87}, + {value: 0x0010, lo: 0x88, hi: 0x8c}, + {value: 0x0014, lo: 0x8d, hi: 0x97}, + {value: 0x0014, lo: 0x99, hi: 0xbc}, + // Block 0x35, offset 0x1b4 + {value: 0x0034, lo: 0x86, hi: 0x86}, + // Block 0x36, offset 0x1b5 + {value: 0x0010, lo: 0xab, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbe}, + // Block 0x37, offset 0x1be + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x96, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x99}, + {value: 0x0014, lo: 0x9e, hi: 0xa0}, + {value: 0x0010, lo: 0xa2, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xad}, + {value: 0x0014, lo: 0xb1, hi: 0xb4}, + // Block 0x38, offset 0x1c5 + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x6c53, lo: 0xa0, hi: 0xbf}, + // Block 0x39, offset 0x1cd + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x9a, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x3a, offset 0x1d3 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x3b, offset 0x1d8 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x82, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3c, offset 0x1dc + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3d, offset 0x1df + {value: 0x0010, lo: 0x80, hi: 0x9a}, + {value: 0x0024, lo: 0x9d, hi: 0x9f}, + // Block 0x3e, offset 0x1e1 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x7453, lo: 0xa0, hi: 0xaf}, + {value: 0x7853, lo: 0xb0, hi: 0xbf}, + // Block 0x3f, offset 0x1e4 + {value: 0x7c53, lo: 0x80, hi: 0x8f}, + {value: 0x8053, lo: 0x90, hi: 0x9f}, + {value: 0x7c53, lo: 0xa0, hi: 0xaf}, + {value: 0x0813, lo: 0xb0, hi: 0xb5}, + {value: 0x0892, lo: 0xb8, hi: 0xbd}, + // Block 0x40, offset 0x1e9 + {value: 0x0010, lo: 0x81, hi: 0xbf}, + // Block 0x41, offset 0x1ea + {value: 0x0010, lo: 0x80, hi: 0xac}, + {value: 0x0010, lo: 0xaf, hi: 0xbf}, + // Block 0x42, offset 0x1ec + {value: 0x0010, lo: 0x81, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x43, offset 0x1ee + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb8}, + // Block 0x44, offset 0x1f0 + {value: 0x0010, lo: 0x80, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0034, lo: 0x94, hi: 0x94}, + {value: 0x0010, lo: 0xa0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + // Block 0x45, offset 0x1f7 + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xac}, + {value: 0x0010, lo: 0xae, hi: 0xb0}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + // Block 0x46, offset 0x1fc + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x47, offset 0x200 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x89, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0014, lo: 0x93, hi: 0x93}, + {value: 0x0004, lo: 0x97, hi: 0x97}, + {value: 0x0024, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0x48, offset 0x209 + {value: 0x0014, lo: 0x8b, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x49, offset 0x20c + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb8}, + // Block 0x4a, offset 0x20f + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x4b, offset 0x215 + {value: 0x0010, lo: 0x80, hi: 0xb5}, + // Block 0x4c, offset 0x216 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbb}, + // Block 0x4d, offset 0x221 + {value: 0x0010, lo: 0x86, hi: 0x8f}, + // Block 0x4e, offset 0x222 + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x4f, offset 0x223 + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + // Block 0x50, offset 0x228 + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x9e}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xac}, + {value: 0x0010, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xbc}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x51, offset 0x235 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xa7, hi: 0xa7}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + {value: 0x0034, lo: 0xb5, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbc}, + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0x52, offset 0x23d + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x53, offset 0x245 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0030, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x8b}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xab, hi: 0xab}, + {value: 0x0034, lo: 0xac, hi: 0xac}, + {value: 0x0024, lo: 0xad, hi: 0xb3}, + // Block 0x54, offset 0x24e + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0030, lo: 0xaa, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbf}, + // Block 0x55, offset 0x257 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0030, lo: 0xb2, hi: 0xb3}, + // Block 0x56, offset 0x260 + {value: 0x0010, lo: 0x80, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + // Block 0x57, offset 0x265 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8d, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x58, offset 0x268 + {value: 0x31ea, lo: 0x80, hi: 0x80}, + {value: 0x326a, lo: 0x81, hi: 0x81}, + {value: 0x32ea, lo: 0x82, hi: 0x82}, + {value: 0x336a, lo: 0x83, hi: 0x83}, + {value: 0x33ea, lo: 0x84, hi: 0x84}, + {value: 0x346a, lo: 0x85, hi: 0x85}, + {value: 0x34ea, lo: 0x86, hi: 0x86}, + {value: 0x356a, lo: 0x87, hi: 0x87}, + {value: 0x35ea, lo: 0x88, hi: 0x88}, + {value: 0x8353, lo: 0x90, hi: 0xba}, + {value: 0x8353, lo: 0xbd, hi: 0xbf}, + // Block 0x59, offset 0x273 + {value: 0x0024, lo: 0x90, hi: 0x92}, + {value: 0x0034, lo: 0x94, hi: 0x99}, + {value: 0x0024, lo: 0x9a, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9f}, + {value: 0x0024, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0034, lo: 0xa2, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xb3}, + {value: 0x0024, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb7}, + {value: 0x0024, lo: 0xb8, hi: 0xb9}, + {value: 0x0010, lo: 0xba, hi: 0xba}, + // Block 0x5a, offset 0x281 + {value: 0x0012, lo: 0x80, hi: 0xab}, + {value: 0x0015, lo: 0xac, hi: 0xbf}, + // Block 0x5b, offset 0x283 + {value: 0x0015, lo: 0x80, hi: 0xaa}, + {value: 0x0012, lo: 0xab, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb8}, + {value: 0x8752, lo: 0xb9, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbc}, + {value: 0x8b52, lo: 0xbd, hi: 0xbd}, + {value: 0x0012, lo: 0xbe, hi: 0xbf}, + // Block 0x5c, offset 0x28a + {value: 0x0012, lo: 0x80, hi: 0x8d}, + {value: 0x8f52, lo: 0x8e, hi: 0x8e}, + {value: 0x0012, lo: 0x8f, hi: 0x9a}, + {value: 0x0015, lo: 0x9b, hi: 0xbf}, + // Block 0x5d, offset 0x28e + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0024, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb9}, + {value: 0x0024, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbd}, + {value: 0x0024, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x5e, offset 0x29a + {value: 0x0117, lo: 0x80, hi: 0xbf}, + // Block 0x5f, offset 0x29b + {value: 0x0117, lo: 0x80, hi: 0x95}, + {value: 0x369a, lo: 0x96, hi: 0x96}, + {value: 0x374a, lo: 0x97, hi: 0x97}, + {value: 0x37fa, lo: 0x98, hi: 0x98}, + {value: 0x38aa, lo: 0x99, hi: 0x99}, + {value: 0x395a, lo: 0x9a, hi: 0x9a}, + {value: 0x3a0a, lo: 0x9b, hi: 0x9b}, + {value: 0x0012, lo: 0x9c, hi: 0x9d}, + {value: 0x3abb, lo: 0x9e, hi: 0x9e}, + {value: 0x0012, lo: 0x9f, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x60, offset 0x2a6 + {value: 0x0812, lo: 0x80, hi: 0x87}, + {value: 0x0813, lo: 0x88, hi: 0x8f}, + {value: 0x0812, lo: 0x90, hi: 0x95}, + {value: 0x0813, lo: 0x98, hi: 0x9d}, + {value: 0x0812, lo: 0xa0, hi: 0xa7}, + {value: 0x0813, lo: 0xa8, hi: 0xaf}, + {value: 0x0812, lo: 0xb0, hi: 0xb7}, + {value: 0x0813, lo: 0xb8, hi: 0xbf}, + // Block 0x61, offset 0x2ae + {value: 0x0004, lo: 0x8b, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8f}, + {value: 0x0054, lo: 0x98, hi: 0x99}, + {value: 0x0054, lo: 0xa4, hi: 0xa4}, + {value: 0x0054, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xaa, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xaf}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x62, offset 0x2b6 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x94, hi: 0x94}, + {value: 0x0014, lo: 0xa0, hi: 0xa4}, + {value: 0x0014, lo: 0xa6, hi: 0xaf}, + {value: 0x0015, lo: 0xb1, hi: 0xb1}, + {value: 0x0015, lo: 0xbf, hi: 0xbf}, + // Block 0x63, offset 0x2bc + {value: 0x0015, lo: 0x90, hi: 0x9c}, + // Block 0x64, offset 0x2bd + {value: 0x0024, lo: 0x90, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x93}, + {value: 0x0024, lo: 0x94, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0xa0}, + {value: 0x0024, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa4}, + {value: 0x0034, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa7}, + {value: 0x0034, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xa9}, + {value: 0x0034, lo: 0xaa, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + // Block 0x65, offset 0x2cb + {value: 0x0016, lo: 0x85, hi: 0x86}, + {value: 0x0012, lo: 0x87, hi: 0x89}, + {value: 0xa452, lo: 0x8e, hi: 0x8e}, + {value: 0x1013, lo: 0xa0, hi: 0xaf}, + {value: 0x1012, lo: 0xb0, hi: 0xbf}, + // Block 0x66, offset 0x2d0 + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x88}, + // Block 0x67, offset 0x2d3 + {value: 0xa753, lo: 0xb6, hi: 0xb7}, + {value: 0xaa53, lo: 0xb8, hi: 0xb9}, + {value: 0xad53, lo: 0xba, hi: 0xbb}, + {value: 0xaa53, lo: 0xbc, hi: 0xbd}, + {value: 0xa753, lo: 0xbe, hi: 0xbf}, + // Block 0x68, offset 0x2d8 + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6553, lo: 0x90, hi: 0x9f}, + {value: 0xb053, lo: 0xa0, hi: 0xae}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0x69, offset 0x2dc + {value: 0x0117, lo: 0x80, hi: 0xa3}, + {value: 0x0012, lo: 0xa4, hi: 0xa4}, + {value: 0x0716, lo: 0xab, hi: 0xac}, + {value: 0x0316, lo: 0xad, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb3}, + // Block 0x6a, offset 0x2e2 + {value: 0x6c52, lo: 0x80, hi: 0x9f}, + {value: 0x7052, lo: 0xa0, hi: 0xa5}, + {value: 0x7052, lo: 0xa7, hi: 0xa7}, + {value: 0x7052, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x6b, offset 0x2e7 + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x6c, offset 0x2ea + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0010, lo: 0xb0, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x6d, offset 0x2ef + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9e}, + {value: 0x0024, lo: 0xa0, hi: 0xbf}, + // Block 0x6e, offset 0x2f4 + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + // Block 0x6f, offset 0x2f5 + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0xaa, hi: 0xad}, + {value: 0x0030, lo: 0xae, hi: 0xaf}, + {value: 0x0004, lo: 0xb1, hi: 0xb5}, + {value: 0x0014, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + // Block 0x70, offset 0x2fb + {value: 0x0034, lo: 0x99, hi: 0x9a}, + {value: 0x0004, lo: 0x9b, hi: 0x9e}, + // Block 0x71, offset 0x2fd + {value: 0x0004, lo: 0xbc, hi: 0xbe}, + // Block 0x72, offset 0x2fe + {value: 0x0010, lo: 0x85, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x73, offset 0x300 + {value: 0x0010, lo: 0x80, hi: 0x8e}, + {value: 0x0010, lo: 0xa0, hi: 0xba}, + // Block 0x74, offset 0x302 + {value: 0x0010, lo: 0x80, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0x96, hi: 0xbf}, + // Block 0x75, offset 0x305 + {value: 0x0010, lo: 0x80, hi: 0x8c}, + // Block 0x76, offset 0x306 + {value: 0x0010, lo: 0x90, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x77, offset 0x308 + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0xab}, + // Block 0x78, offset 0x30b + {value: 0x0117, lo: 0x80, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb2}, + {value: 0x0024, lo: 0xb4, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x79, offset 0x311 + {value: 0x0117, lo: 0x80, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9d}, + {value: 0x0024, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x7a, offset 0x315 + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb1}, + // Block 0x7b, offset 0x317 + {value: 0x0004, lo: 0x80, hi: 0x96}, + {value: 0x0014, lo: 0x97, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xaf}, + {value: 0x0012, lo: 0xb0, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xbf}, + // Block 0x7c, offset 0x31c + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x0015, lo: 0xb0, hi: 0xb0}, + {value: 0x0012, lo: 0xb1, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x8753, lo: 0xbd, hi: 0xbd}, + {value: 0x0117, lo: 0xbe, hi: 0xbf}, + // Block 0x7d, offset 0x323 + {value: 0x0117, lo: 0x82, hi: 0x83}, + {value: 0x6553, lo: 0x84, hi: 0x84}, + {value: 0x908b, lo: 0x85, hi: 0x85}, + {value: 0x8f53, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0xb7, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbf}, + // Block 0x7e, offset 0x32b + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8b}, + {value: 0x0010, lo: 0x8c, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + // Block 0x7f, offset 0x334 + {value: 0x0010, lo: 0x80, hi: 0xb3}, + // Block 0x80, offset 0x335 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xa0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb7}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x81, offset 0x33e + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x82, offset 0x342 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0x92}, + {value: 0x0030, lo: 0x93, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0x83, offset 0x347 + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb9}, + {value: 0x0010, lo: 0xba, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x84, offset 0x34f + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0004, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x85, offset 0x355 + {value: 0x0010, lo: 0x80, hi: 0xa8}, + {value: 0x0014, lo: 0xa9, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0x86, offset 0x35b + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x87, offset 0x365 + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0024, lo: 0xbe, hi: 0xbf}, + // Block 0x88, offset 0x36a + {value: 0x0024, lo: 0x81, hi: 0x81}, + {value: 0x0004, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + // Block 0x89, offset 0x373 + {value: 0x0010, lo: 0x81, hi: 0x86}, + {value: 0x0010, lo: 0x89, hi: 0x8e}, + {value: 0x0010, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x8a, offset 0x379 + {value: 0x0012, lo: 0x80, hi: 0x92}, + {value: 0xb352, lo: 0x93, hi: 0x93}, + {value: 0x0012, lo: 0x94, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9f}, + {value: 0x0012, lo: 0xa0, hi: 0xa7}, + {value: 0x74d2, lo: 0xb0, hi: 0xbf}, + // Block 0x8b, offset 0x380 + {value: 0x78d2, lo: 0x80, hi: 0x8f}, + {value: 0x7cd2, lo: 0x90, hi: 0x9f}, + {value: 0x80d2, lo: 0xa0, hi: 0xaf}, + {value: 0x7cd2, lo: 0xb0, hi: 0xbf}, + // Block 0x8c, offset 0x384 + {value: 0x0010, lo: 0x80, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x8d, offset 0x38c + {value: 0x0010, lo: 0x80, hi: 0xa3}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x8e, offset 0x38e + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x8b, hi: 0xbb}, + // Block 0x8f, offset 0x390 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0xbf}, + // Block 0x90, offset 0x393 + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0004, lo: 0xb2, hi: 0xbf}, + // Block 0x91, offset 0x395 + {value: 0x0004, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x93, hi: 0xbf}, + // Block 0x92, offset 0x397 + {value: 0x0010, lo: 0x80, hi: 0xbd}, + // Block 0x93, offset 0x398 + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0x94, offset 0x399 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0xbf}, + // Block 0x95, offset 0x39b + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0xb0, hi: 0xbb}, + // Block 0x96, offset 0x39d + {value: 0x0014, lo: 0x80, hi: 0x8f}, + {value: 0x0054, lo: 0x93, hi: 0x93}, + {value: 0x0024, lo: 0xa0, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xad}, + {value: 0x0024, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + // Block 0x97, offset 0x3a3 + {value: 0x0010, lo: 0x8d, hi: 0x8f}, + {value: 0x0054, lo: 0x92, hi: 0x92}, + {value: 0x0054, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0xb0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0x98, offset 0x3a8 + {value: 0x0010, lo: 0x80, hi: 0xbc}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x99, offset 0x3aa + {value: 0x0054, lo: 0x87, hi: 0x87}, + {value: 0x0054, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0054, lo: 0x9a, hi: 0x9a}, + {value: 0x5f53, lo: 0xa1, hi: 0xba}, + {value: 0x0004, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x9a, offset 0x3b1 + {value: 0x0004, lo: 0x80, hi: 0x80}, + {value: 0x5f52, lo: 0x81, hi: 0x9a}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + // Block 0x9b, offset 0x3b4 + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbe}, + // Block 0x9c, offset 0x3b6 + {value: 0x0010, lo: 0x82, hi: 0x87}, + {value: 0x0010, lo: 0x8a, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0x97}, + {value: 0x0010, lo: 0x9a, hi: 0x9c}, + {value: 0x0004, lo: 0xa3, hi: 0xa3}, + {value: 0x0014, lo: 0xb9, hi: 0xbb}, + // Block 0x9d, offset 0x3bc + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0010, lo: 0x8d, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xba}, + {value: 0x0010, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x9e, offset 0x3c1 + {value: 0x0010, lo: 0x80, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x9d}, + // Block 0x9f, offset 0x3c3 + {value: 0x0010, lo: 0x80, hi: 0xba}, + // Block 0xa0, offset 0x3c4 + {value: 0x0010, lo: 0x80, hi: 0xb4}, + // Block 0xa1, offset 0x3c5 + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + // Block 0xa2, offset 0x3c6 + {value: 0x0010, lo: 0x80, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa3, offset 0x3c8 + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + // Block 0xa4, offset 0x3ca + {value: 0x0010, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xad, hi: 0xbf}, + // Block 0xa5, offset 0x3cc + {value: 0x0010, lo: 0x80, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0xb5}, + {value: 0x0024, lo: 0xb6, hi: 0xba}, + // Block 0xa6, offset 0x3cf + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa7, offset 0x3d1 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x91, hi: 0x95}, + // Block 0xa8, offset 0x3d4 + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x97}, + {value: 0xb653, lo: 0x98, hi: 0x9f}, + {value: 0xb953, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbf}, + // Block 0xa9, offset 0x3dc + {value: 0xb652, lo: 0x80, hi: 0x87}, + {value: 0xb952, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0xaa, offset 0x3df + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0xb953, lo: 0xb0, hi: 0xb7}, + {value: 0xb653, lo: 0xb8, hi: 0xbf}, + // Block 0xab, offset 0x3e3 + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x93}, + {value: 0xb952, lo: 0x98, hi: 0x9f}, + {value: 0xb652, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbb}, + // Block 0xac, offset 0x3eb + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xad, offset 0x3ed + {value: 0x0010, lo: 0x80, hi: 0xa3}, + // Block 0xae, offset 0x3ee + {value: 0x0010, lo: 0x80, hi: 0xb6}, + // Block 0xaf, offset 0x3ef + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xa7}, + // Block 0xb0, offset 0x3f1 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xb5}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xb1, offset 0x3f7 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb6}, + // Block 0xb2, offset 0x3f9 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + // Block 0xb3, offset 0x3fa + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + // Block 0xb4, offset 0x3fc + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb9}, + // Block 0xb5, offset 0x3fe + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0xb6, offset 0x400 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x83}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x8e, hi: 0x8e}, + {value: 0x0024, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x99, hi: 0xb5}, + {value: 0x0024, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xb7, offset 0x40d + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0xb8, offset 0x40e + {value: 0x0010, lo: 0x80, hi: 0x9c}, + // Block 0xb9, offset 0x40f + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + // Block 0xba, offset 0x413 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + // Block 0xbb, offset 0x415 + {value: 0x0010, lo: 0x80, hi: 0x91}, + // Block 0xbc, offset 0x416 + {value: 0x0010, lo: 0x80, hi: 0x88}, + // Block 0xbd, offset 0x417 + {value: 0x5653, lo: 0x80, hi: 0xb2}, + // Block 0xbe, offset 0x418 + {value: 0x5652, lo: 0x80, hi: 0xb2}, + // Block 0xbf, offset 0x419 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xc0, offset 0x41c + {value: 0x0010, lo: 0x80, hi: 0x9c}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xc1, offset 0x41f + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x87}, + {value: 0x0024, lo: 0x88, hi: 0x8a}, + {value: 0x0034, lo: 0x8b, hi: 0x8b}, + {value: 0x0024, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x90}, + // Block 0xc2, offset 0x425 + {value: 0x0010, lo: 0xa0, hi: 0xb6}, + // Block 0xc3, offset 0x426 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xc4, offset 0x42a + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xc5, offset 0x42e + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb6}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + // Block 0xc6, offset 0x434 + {value: 0x0014, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xc7, offset 0x437 + {value: 0x0024, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0xc8, offset 0x43e + {value: 0x0010, lo: 0x84, hi: 0x86}, + {value: 0x0010, lo: 0x90, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + // Block 0xc9, offset 0x442 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xca, offset 0x446 + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0014, lo: 0x89, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + // Block 0xcb, offset 0x44d + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb4}, + {value: 0x0030, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xb7}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0xcc, offset 0x456 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xcd, offset 0x45c + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa2}, + {value: 0x0014, lo: 0xa3, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xce, offset 0x462 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xcf, offset 0x46c + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0030, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9d, hi: 0xa3}, + {value: 0x0024, lo: 0xa6, hi: 0xac}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + // Block 0xd0, offset 0x476 + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xd1, offset 0x478 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0x9f, hi: 0x9f}, + // Block 0xd2, offset 0x481 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xd3, offset 0x487 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd4, offset 0x48d + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd5, offset 0x493 + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x98, hi: 0x9b}, + {value: 0x0014, lo: 0x9c, hi: 0x9d}, + // Block 0xd6, offset 0x496 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd7, offset 0x49c + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd8, offset 0x49f + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0014, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb5}, + {value: 0x0030, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + // Block 0xd9, offset 0x4a8 + {value: 0x0010, lo: 0x80, hi: 0x89}, + // Block 0xda, offset 0x4a9 + {value: 0x0014, lo: 0x9d, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xdb, offset 0x4b0 + {value: 0x0010, lo: 0x80, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + // Block 0xdc, offset 0x4b4 + {value: 0x5f53, lo: 0xa0, hi: 0xbf}, + // Block 0xdd, offset 0x4b5 + {value: 0x5f52, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xde, offset 0x4b8 + {value: 0x0010, lo: 0xa0, hi: 0xa7}, + {value: 0x0010, lo: 0xaa, hi: 0xbf}, + // Block 0xdf, offset 0x4ba + {value: 0x0010, lo: 0x80, hi: 0x93}, + {value: 0x0014, lo: 0x94, hi: 0x97}, + {value: 0x0014, lo: 0x9a, hi: 0x9b}, + {value: 0x0010, lo: 0x9c, hi: 0x9f}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + // Block 0xe0, offset 0x4c1 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x8a}, + {value: 0x0010, lo: 0x8b, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbb, hi: 0xbe}, + // Block 0xe1, offset 0x4c9 + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0014, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x98}, + {value: 0x0014, lo: 0x99, hi: 0x9b}, + {value: 0x0010, lo: 0x9c, hi: 0xbf}, + // Block 0xe2, offset 0x4cf + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0014, lo: 0x8a, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x99}, + {value: 0x0010, lo: 0x9d, hi: 0x9d}, + // Block 0xe3, offset 0x4d5 + {value: 0x0010, lo: 0x80, hi: 0xb8}, + // Block 0xe4, offset 0x4d6 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb6}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xe5, offset 0x4dc + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0xe6, offset 0x4df + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0014, lo: 0x92, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xa9}, + {value: 0x0014, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0xe7, offset 0x4e7 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb6}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xe8, offset 0x4ee + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xa5}, + {value: 0x0010, lo: 0xa7, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xbf}, + // Block 0xe9, offset 0x4f8 + {value: 0x0010, lo: 0x80, hi: 0x8e}, + {value: 0x0014, lo: 0x90, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0x96, hi: 0x96}, + {value: 0x0034, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0xea, offset 0x500 + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + // Block 0xeb, offset 0x503 + {value: 0x0010, lo: 0x80, hi: 0x99}, + // Block 0xec, offset 0x504 + {value: 0x0010, lo: 0x80, hi: 0xae}, + // Block 0xed, offset 0x505 + {value: 0x0010, lo: 0x80, hi: 0x83}, + // Block 0xee, offset 0x506 + {value: 0x0010, lo: 0x80, hi: 0xae}, + {value: 0x0014, lo: 0xb0, hi: 0xb8}, + // Block 0xef, offset 0x508 + {value: 0x0010, lo: 0x80, hi: 0x86}, + // Block 0xf0, offset 0x509 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0xf1, offset 0x50b + {value: 0x0010, lo: 0x90, hi: 0xad}, + {value: 0x0034, lo: 0xb0, hi: 0xb4}, + // Block 0xf2, offset 0x50d + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb6}, + // Block 0xf3, offset 0x50f + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa3, hi: 0xb7}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xf4, offset 0x513 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + // Block 0xf5, offset 0x514 + {value: 0x2013, lo: 0x80, hi: 0x9f}, + {value: 0x2012, lo: 0xa0, hi: 0xbf}, + // Block 0xf6, offset 0x516 + {value: 0x0010, lo: 0x80, hi: 0x8a}, + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0xf7, offset 0x519 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0014, lo: 0x8f, hi: 0x9f}, + // Block 0xf8, offset 0x51b + {value: 0x0014, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa3, hi: 0xa3}, + // Block 0xf9, offset 0x51d + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbc}, + // Block 0xfa, offset 0x51f + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0034, lo: 0x9e, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa3}, + // Block 0xfb, offset 0x524 + {value: 0x0030, lo: 0xa5, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xa9}, + {value: 0x0030, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbf}, + // Block 0xfc, offset 0x529 + {value: 0x0034, lo: 0x80, hi: 0x82}, + {value: 0x0024, lo: 0x85, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8b}, + {value: 0x0024, lo: 0xaa, hi: 0xad}, + // Block 0xfd, offset 0x52d + {value: 0x0024, lo: 0x82, hi: 0x84}, + // Block 0xfe, offset 0x52e + {value: 0x0013, lo: 0x80, hi: 0x99}, + {value: 0x0012, lo: 0x9a, hi: 0xb3}, + {value: 0x0013, lo: 0xb4, hi: 0xbf}, + // Block 0xff, offset 0x531 + {value: 0x0013, lo: 0x80, hi: 0x8d}, + {value: 0x0012, lo: 0x8e, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0xa7}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0x100, offset 0x535 + {value: 0x0013, lo: 0x80, hi: 0x81}, + {value: 0x0012, lo: 0x82, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0x9c}, + {value: 0x0013, lo: 0x9e, hi: 0x9f}, + {value: 0x0013, lo: 0xa2, hi: 0xa2}, + {value: 0x0013, lo: 0xa5, hi: 0xa6}, + {value: 0x0013, lo: 0xa9, hi: 0xac}, + {value: 0x0013, lo: 0xae, hi: 0xb5}, + {value: 0x0012, lo: 0xb6, hi: 0xb9}, + {value: 0x0012, lo: 0xbb, hi: 0xbb}, + {value: 0x0012, lo: 0xbd, hi: 0xbf}, + // Block 0x101, offset 0x540 + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0012, lo: 0x85, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0x102, offset 0x544 + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0013, lo: 0x84, hi: 0x85}, + {value: 0x0013, lo: 0x87, hi: 0x8a}, + {value: 0x0013, lo: 0x8d, hi: 0x94}, + {value: 0x0013, lo: 0x96, hi: 0x9c}, + {value: 0x0012, lo: 0x9e, hi: 0xb7}, + {value: 0x0013, lo: 0xb8, hi: 0xb9}, + {value: 0x0013, lo: 0xbb, hi: 0xbe}, + // Block 0x103, offset 0x54c + {value: 0x0013, lo: 0x80, hi: 0x84}, + {value: 0x0013, lo: 0x86, hi: 0x86}, + {value: 0x0013, lo: 0x8a, hi: 0x90}, + {value: 0x0012, lo: 0x92, hi: 0xab}, + {value: 0x0013, lo: 0xac, hi: 0xbf}, + // Block 0x104, offset 0x551 + {value: 0x0013, lo: 0x80, hi: 0x85}, + {value: 0x0012, lo: 0x86, hi: 0x9f}, + {value: 0x0013, lo: 0xa0, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbf}, + // Block 0x105, offset 0x555 + {value: 0x0012, lo: 0x80, hi: 0x93}, + {value: 0x0013, lo: 0x94, hi: 0xad}, + {value: 0x0012, lo: 0xae, hi: 0xbf}, + // Block 0x106, offset 0x558 + {value: 0x0012, lo: 0x80, hi: 0x87}, + {value: 0x0013, lo: 0x88, hi: 0xa1}, + {value: 0x0012, lo: 0xa2, hi: 0xbb}, + {value: 0x0013, lo: 0xbc, hi: 0xbf}, + // Block 0x107, offset 0x55c + {value: 0x0013, lo: 0x80, hi: 0x95}, + {value: 0x0012, lo: 0x96, hi: 0xaf}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x108, offset 0x55f + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0012, lo: 0x8a, hi: 0xa5}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0x109, offset 0x562 + {value: 0x0013, lo: 0x80, hi: 0x80}, + {value: 0x0012, lo: 0x82, hi: 0x9a}, + {value: 0x0012, lo: 0x9c, hi: 0xa1}, + {value: 0x0013, lo: 0xa2, hi: 0xba}, + {value: 0x0012, lo: 0xbc, hi: 0xbf}, + // Block 0x10a, offset 0x567 + {value: 0x0012, lo: 0x80, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0xb4}, + {value: 0x0012, lo: 0xb6, hi: 0xbf}, + // Block 0x10b, offset 0x56b + {value: 0x0012, lo: 0x80, hi: 0x8e}, + {value: 0x0012, lo: 0x90, hi: 0x95}, + {value: 0x0013, lo: 0x96, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x10c, offset 0x56f + {value: 0x0012, lo: 0x80, hi: 0x88}, + {value: 0x0012, lo: 0x8a, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0x10d, offset 0x573 + {value: 0x0012, lo: 0x80, hi: 0x82}, + {value: 0x0012, lo: 0x84, hi: 0x89}, + {value: 0x0017, lo: 0x8a, hi: 0x8b}, + {value: 0x0010, lo: 0x8e, hi: 0xbf}, + // Block 0x10e, offset 0x577 + {value: 0x0014, lo: 0x80, hi: 0xb6}, + {value: 0x0014, lo: 0xbb, hi: 0xbf}, + // Block 0x10f, offset 0x579 + {value: 0x0014, lo: 0x80, hi: 0xac}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x110, offset 0x57b + {value: 0x0014, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x9b, hi: 0x9f}, + {value: 0x0014, lo: 0xa1, hi: 0xaf}, + // Block 0x111, offset 0x57e + {value: 0x0024, lo: 0x80, hi: 0x86}, + {value: 0x0024, lo: 0x88, hi: 0x98}, + {value: 0x0024, lo: 0x9b, hi: 0xa1}, + {value: 0x0024, lo: 0xa3, hi: 0xa4}, + {value: 0x0024, lo: 0xa6, hi: 0xaa}, + // Block 0x112, offset 0x583 + {value: 0x0010, lo: 0x80, hi: 0xac}, + {value: 0x0024, lo: 0xb0, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xbd}, + // Block 0x113, offset 0x586 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + // Block 0x114, offset 0x588 + {value: 0x0010, lo: 0x80, hi: 0xab}, + {value: 0x0024, lo: 0xac, hi: 0xaf}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x115, offset 0x58b + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0034, lo: 0x90, hi: 0x96}, + // Block 0x116, offset 0x58d + {value: 0xbc52, lo: 0x80, hi: 0x81}, + {value: 0xbf52, lo: 0x82, hi: 0x83}, + {value: 0x0024, lo: 0x84, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8b}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x117, offset 0x593 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x9f}, + {value: 0x0010, lo: 0xa1, hi: 0xa2}, + {value: 0x0010, lo: 0xa4, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb7}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + // Block 0x118, offset 0x59c + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x9b}, + {value: 0x0010, lo: 0xa1, hi: 0xa3}, + {value: 0x0010, lo: 0xa5, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xbb}, + // Block 0x119, offset 0x5a1 + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x11a, offset 0x5a2 + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x11b, offset 0x5a5 + {value: 0x0013, lo: 0x80, hi: 0x89}, + // Block 0x11c, offset 0x5a6 + {value: 0x0014, lo: 0xbb, hi: 0xbf}, + // Block 0x11d, offset 0x5a7 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0014, lo: 0xa0, hi: 0xbf}, + // Block 0x11e, offset 0x5a9 + {value: 0x0014, lo: 0x80, hi: 0xbf}, + // Block 0x11f, offset 0x5aa + {value: 0x0014, lo: 0x80, hi: 0xaf}, +} + +// Total table size 15070 bytes (14KiB); checksum: 1EB13752 diff --git a/vendor/golang.org/x/text/cases/tables13.0.0.go b/vendor/golang.org/x/text/cases/tables13.0.0.go new file mode 100644 index 000000000..cd874775b --- /dev/null +++ b/vendor/golang.org/x/text/cases/tables13.0.0.go @@ -0,0 +1,2400 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.16 +// +build go1.16 + +package cases + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "13.0.0" + +var xorData string = "" + // Size: 192 bytes + "\x00\x06\x07\x00\x01?\x00\x0f\x03\x00\x0f\x12\x00\x0f\x1f\x00\x0f\x1d" + + "\x00\x01\x13\x00\x0f\x16\x00\x0f\x0b\x00\x0f3\x00\x0f7\x00\x01#\x00\x0f?" + + "\x00\x0e'\x00\x0f/\x00\x0e>\x00\x0f*\x00\x0c&\x00\x0c*\x00\x0c;\x00\x0c9" + + "\x00\x0c%\x00\x01\x08\x00\x03\x0d\x00\x03\x09\x00\x02\x06\x00\x02\x02" + + "\x00\x02\x0c\x00\x01\x00\x00\x01\x03\x00\x01\x01\x00\x01 \x00\x01\x0c" + + "\x00\x01\x10\x00\x03\x10\x00\x036 \x00\x037 \x00\x0b#\x10\x00\x0b 0\x00" + + "\x0b!\x10\x00\x0b!0\x001\x00\x00\x0b(\x04\x00\x03\x04\x1e\x00\x0b)\x08" + + "\x00\x03\x0a\x00\x02:\x00\x02>\x00\x02,\x00\x02\x00\x00\x02\x10\x00\x01<" + + "\x00\x01&\x00\x01*\x00\x01.\x00\x010\x003 \x00\x01\x18\x00\x01(\x00\x01" + + "\x1e\x00\x01\x22" + +var exceptions string = "" + // Size: 2450 bytes + "\x00\x12\x12μΜΜ\x12\x12ssSSSs\x13\x18i̇i̇\x10\x09II\x13\x1bʼnʼNʼN\x11" + + "\x09sSS\x12\x12dždžDž\x12\x12dždžDŽ\x10\x12DŽDž\x12\x12ljljLj\x12\x12ljljLJ\x10\x12LJLj" + + "\x12\x12njnjNj\x12\x12njnjNJ\x10\x12NJNj\x13\x1bǰJ̌J̌\x12\x12dzdzDz\x12\x12dzdzDZ\x10" + + "\x12DZDz\x13\x18ⱥⱥ\x13\x18ⱦⱦ\x10\x1bⱾⱾ\x10\x1bⱿⱿ\x10\x1bⱯⱯ\x10\x1bⱭⱭ\x10" + + "\x1bⱰⱰ\x10\x1bꞫꞫ\x10\x1bꞬꞬ\x10\x1bꞍꞍ\x10\x1bꞪꞪ\x10\x1bꞮꞮ\x10\x1bⱢⱢ\x10" + + "\x1bꞭꞭ\x10\x1bⱮⱮ\x10\x1bⱤⱤ\x10\x1bꟅꟅ\x10\x1bꞱꞱ\x10\x1bꞲꞲ\x10\x1bꞰꞰ2\x12ι" + + "ΙΙ\x166ΐΪ́Ϊ́\x166ΰΫ́Ϋ́\x12\x12σΣΣ\x12\x12βΒΒ\x12\x12θΘΘ\x12\x12" + + "φΦΦ\x12\x12πΠΠ\x12\x12κΚΚ\x12\x12ρΡΡ\x12\x12εΕΕ\x14$եւԵՒԵւ\x10\x1bᲐა" + + "\x10\x1bᲑბ\x10\x1bᲒგ\x10\x1bᲓდ\x10\x1bᲔე\x10\x1bᲕვ\x10\x1bᲖზ\x10\x1bᲗთ" + + "\x10\x1bᲘი\x10\x1bᲙკ\x10\x1bᲚლ\x10\x1bᲛმ\x10\x1bᲜნ\x10\x1bᲝო\x10\x1bᲞპ" + + "\x10\x1bᲟჟ\x10\x1bᲠრ\x10\x1bᲡს\x10\x1bᲢტ\x10\x1bᲣუ\x10\x1bᲤფ\x10\x1bᲥქ" + + "\x10\x1bᲦღ\x10\x1bᲧყ\x10\x1bᲨშ\x10\x1bᲩჩ\x10\x1bᲪც\x10\x1bᲫძ\x10\x1bᲬწ" + + "\x10\x1bᲭჭ\x10\x1bᲮხ\x10\x1bᲯჯ\x10\x1bᲰჰ\x10\x1bᲱჱ\x10\x1bᲲჲ\x10\x1bᲳჳ" + + "\x10\x1bᲴჴ\x10\x1bᲵჵ\x10\x1bᲶჶ\x10\x1bᲷჷ\x10\x1bᲸჸ\x10\x1bᲹჹ\x10\x1bᲺჺ" + + "\x10\x1bᲽჽ\x10\x1bᲾჾ\x10\x1bᲿჿ\x12\x12вВВ\x12\x12дДД\x12\x12оОО\x12\x12с" + + "СС\x12\x12тТТ\x12\x12тТТ\x12\x12ъЪЪ\x12\x12ѣѢѢ\x13\x1bꙋꙊꙊ\x13\x1bẖH̱H̱" + + "\x13\x1bẗT̈T̈\x13\x1bẘW̊W̊\x13\x1bẙY̊Y̊\x13\x1baʾAʾAʾ\x13\x1bṡṠṠ\x12" + + "\x10ssß\x14$ὐΥ̓Υ̓\x166ὒΥ̓̀Υ̓̀\x166ὔΥ̓́Υ̓́\x166ὖΥ̓͂Υ̓͂\x15+ἀιἈΙᾈ" + + "\x15+ἁιἉΙᾉ\x15+ἂιἊΙᾊ\x15+ἃιἋΙᾋ\x15+ἄιἌΙᾌ\x15+ἅιἍΙᾍ\x15+ἆιἎΙᾎ\x15+ἇιἏΙᾏ" + + "\x15\x1dἀιᾀἈΙ\x15\x1dἁιᾁἉΙ\x15\x1dἂιᾂἊΙ\x15\x1dἃιᾃἋΙ\x15\x1dἄιᾄἌΙ\x15" + + "\x1dἅιᾅἍΙ\x15\x1dἆιᾆἎΙ\x15\x1dἇιᾇἏΙ\x15+ἠιἨΙᾘ\x15+ἡιἩΙᾙ\x15+ἢιἪΙᾚ\x15+ἣι" + + "ἫΙᾛ\x15+ἤιἬΙᾜ\x15+ἥιἭΙᾝ\x15+ἦιἮΙᾞ\x15+ἧιἯΙᾟ\x15\x1dἠιᾐἨΙ\x15\x1dἡιᾑἩΙ" + + "\x15\x1dἢιᾒἪΙ\x15\x1dἣιᾓἫΙ\x15\x1dἤιᾔἬΙ\x15\x1dἥιᾕἭΙ\x15\x1dἦιᾖἮΙ\x15" + + "\x1dἧιᾗἯΙ\x15+ὠιὨΙᾨ\x15+ὡιὩΙᾩ\x15+ὢιὪΙᾪ\x15+ὣιὫΙᾫ\x15+ὤιὬΙᾬ\x15+ὥιὭΙᾭ" + + "\x15+ὦιὮΙᾮ\x15+ὧιὯΙᾯ\x15\x1dὠιᾠὨΙ\x15\x1dὡιᾡὩΙ\x15\x1dὢιᾢὪΙ\x15\x1dὣιᾣὫΙ" + + "\x15\x1dὤιᾤὬΙ\x15\x1dὥιᾥὭΙ\x15\x1dὦιᾦὮΙ\x15\x1dὧιᾧὯΙ\x15-ὰιᾺΙᾺͅ\x14#αιΑΙ" + + "ᾼ\x14$άιΆΙΆͅ\x14$ᾶΑ͂Α͂\x166ᾶιΑ͂Ιᾼ͂\x14\x1cαιᾳΑΙ\x12\x12ιΙΙ\x15-ὴιῊΙ" + + "Ὴͅ\x14#ηιΗΙῌ\x14$ήιΉΙΉͅ\x14$ῆΗ͂Η͂\x166ῆιΗ͂Ιῌ͂\x14\x1cηιῃΗΙ\x166ῒΙ" + + "̈̀Ϊ̀\x166ΐΪ́Ϊ́\x14$ῖΙ͂Ι͂\x166ῗΪ͂Ϊ͂\x166ῢΫ̀Ϋ̀\x166ΰΫ́Ϋ" + + "́\x14$ῤΡ̓Ρ̓\x14$ῦΥ͂Υ͂\x166ῧΫ͂Ϋ͂\x15-ὼιῺΙῺͅ\x14#ωιΩΙῼ\x14$ώιΏΙΏͅ" + + "\x14$ῶΩ͂Ω͂\x166ῶιΩ͂Ιῼ͂\x14\x1cωιῳΩΙ\x12\x10ωω\x11\x08kk\x12\x10åå\x12" + + "\x10ɫɫ\x12\x10ɽɽ\x10\x12ȺȺ\x10\x12ȾȾ\x12\x10ɑɑ\x12\x10ɱɱ\x12\x10ɐɐ\x12" + + "\x10ɒɒ\x12\x10ȿȿ\x12\x10ɀɀ\x12\x10ɥɥ\x12\x10ɦɦ\x12\x10ɜɜ\x12\x10ɡɡ\x12" + + "\x10ɬɬ\x12\x10ɪɪ\x12\x10ʞʞ\x12\x10ʇʇ\x12\x10ʝʝ\x12\x10ʂʂ\x12\x12ffFFFf" + + "\x12\x12fiFIFi\x12\x12flFLFl\x13\x1bffiFFIFfi\x13\x1bfflFFLFfl\x12\x12st" + + "STSt\x12\x12stSTSt\x14$մնՄՆՄն\x14$մեՄԵՄե\x14$միՄԻՄի\x14$վնՎՆՎն\x14$մխՄԽՄ" + + "խ" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// caseTrie. Total size: 12538 bytes (12.24 KiB). Checksum: af4dfa7d60c71d4c. +type caseTrie struct{} + +func newCaseTrie(i int) *caseTrie { + return &caseTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *caseTrie) lookupValue(n uint32, b byte) uint16 { + switch { + case n < 20: + return uint16(caseValues[n<<6+uint32(b)]) + default: + n -= 20 + return uint16(sparse.lookup(n, b)) + } +} + +// caseValues: 22 blocks, 1408 entries, 2816 bytes +// The third block is the zero block. +var caseValues = [1408]uint16{ + // Block 0x0, offset 0x0 + 0x27: 0x0054, + 0x2e: 0x0054, + 0x30: 0x0010, 0x31: 0x0010, 0x32: 0x0010, 0x33: 0x0010, 0x34: 0x0010, 0x35: 0x0010, + 0x36: 0x0010, 0x37: 0x0010, 0x38: 0x0010, 0x39: 0x0010, 0x3a: 0x0054, + // Block 0x1, offset 0x40 + 0x41: 0x2013, 0x42: 0x2013, 0x43: 0x2013, 0x44: 0x2013, 0x45: 0x2013, + 0x46: 0x2013, 0x47: 0x2013, 0x48: 0x2013, 0x49: 0x2013, 0x4a: 0x2013, 0x4b: 0x2013, + 0x4c: 0x2013, 0x4d: 0x2013, 0x4e: 0x2013, 0x4f: 0x2013, 0x50: 0x2013, 0x51: 0x2013, + 0x52: 0x2013, 0x53: 0x2013, 0x54: 0x2013, 0x55: 0x2013, 0x56: 0x2013, 0x57: 0x2013, + 0x58: 0x2013, 0x59: 0x2013, 0x5a: 0x2013, + 0x5e: 0x0004, 0x5f: 0x0010, 0x60: 0x0004, 0x61: 0x2012, 0x62: 0x2012, 0x63: 0x2012, + 0x64: 0x2012, 0x65: 0x2012, 0x66: 0x2012, 0x67: 0x2012, 0x68: 0x2012, 0x69: 0x2012, + 0x6a: 0x2012, 0x6b: 0x2012, 0x6c: 0x2012, 0x6d: 0x2012, 0x6e: 0x2012, 0x6f: 0x2012, + 0x70: 0x2012, 0x71: 0x2012, 0x72: 0x2012, 0x73: 0x2012, 0x74: 0x2012, 0x75: 0x2012, + 0x76: 0x2012, 0x77: 0x2012, 0x78: 0x2012, 0x79: 0x2012, 0x7a: 0x2012, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc0: 0x0852, 0xc1: 0x0b53, 0xc2: 0x0113, 0xc3: 0x0112, 0xc4: 0x0113, 0xc5: 0x0112, + 0xc6: 0x0b53, 0xc7: 0x0f13, 0xc8: 0x0f12, 0xc9: 0x0e53, 0xca: 0x1153, 0xcb: 0x0713, + 0xcc: 0x0712, 0xcd: 0x0012, 0xce: 0x1453, 0xcf: 0x1753, 0xd0: 0x1a53, 0xd1: 0x0313, + 0xd2: 0x0312, 0xd3: 0x1d53, 0xd4: 0x2053, 0xd5: 0x2352, 0xd6: 0x2653, 0xd7: 0x2653, + 0xd8: 0x0113, 0xd9: 0x0112, 0xda: 0x2952, 0xdb: 0x0012, 0xdc: 0x1d53, 0xdd: 0x2c53, + 0xde: 0x2f52, 0xdf: 0x3253, 0xe0: 0x0113, 0xe1: 0x0112, 0xe2: 0x0113, 0xe3: 0x0112, + 0xe4: 0x0113, 0xe5: 0x0112, 0xe6: 0x3553, 0xe7: 0x0f13, 0xe8: 0x0f12, 0xe9: 0x3853, + 0xea: 0x0012, 0xeb: 0x0012, 0xec: 0x0113, 0xed: 0x0112, 0xee: 0x3553, 0xef: 0x1f13, + 0xf0: 0x1f12, 0xf1: 0x3b53, 0xf2: 0x3e53, 0xf3: 0x0713, 0xf4: 0x0712, 0xf5: 0x0313, + 0xf6: 0x0312, 0xf7: 0x4153, 0xf8: 0x0113, 0xf9: 0x0112, 0xfa: 0x0012, 0xfb: 0x0010, + 0xfc: 0x0113, 0xfd: 0x0112, 0xfe: 0x0012, 0xff: 0x4452, + // Block 0x4, offset 0x100 + 0x100: 0x0010, 0x101: 0x0010, 0x102: 0x0010, 0x103: 0x0010, 0x104: 0x02db, 0x105: 0x0359, + 0x106: 0x03da, 0x107: 0x043b, 0x108: 0x04b9, 0x109: 0x053a, 0x10a: 0x059b, 0x10b: 0x0619, + 0x10c: 0x069a, 0x10d: 0x0313, 0x10e: 0x0312, 0x10f: 0x1f13, 0x110: 0x1f12, 0x111: 0x0313, + 0x112: 0x0312, 0x113: 0x0713, 0x114: 0x0712, 0x115: 0x0313, 0x116: 0x0312, 0x117: 0x0f13, + 0x118: 0x0f12, 0x119: 0x0313, 0x11a: 0x0312, 0x11b: 0x0713, 0x11c: 0x0712, 0x11d: 0x1452, + 0x11e: 0x0113, 0x11f: 0x0112, 0x120: 0x0113, 0x121: 0x0112, 0x122: 0x0113, 0x123: 0x0112, + 0x124: 0x0113, 0x125: 0x0112, 0x126: 0x0113, 0x127: 0x0112, 0x128: 0x0113, 0x129: 0x0112, + 0x12a: 0x0113, 0x12b: 0x0112, 0x12c: 0x0113, 0x12d: 0x0112, 0x12e: 0x0113, 0x12f: 0x0112, + 0x130: 0x06fa, 0x131: 0x07ab, 0x132: 0x0829, 0x133: 0x08aa, 0x134: 0x0113, 0x135: 0x0112, + 0x136: 0x2353, 0x137: 0x4453, 0x138: 0x0113, 0x139: 0x0112, 0x13a: 0x0113, 0x13b: 0x0112, + 0x13c: 0x0113, 0x13d: 0x0112, 0x13e: 0x0113, 0x13f: 0x0112, + // Block 0x5, offset 0x140 + 0x140: 0x0a8a, 0x141: 0x0313, 0x142: 0x0312, 0x143: 0x0853, 0x144: 0x4753, 0x145: 0x4a53, + 0x146: 0x0113, 0x147: 0x0112, 0x148: 0x0113, 0x149: 0x0112, 0x14a: 0x0113, 0x14b: 0x0112, + 0x14c: 0x0113, 0x14d: 0x0112, 0x14e: 0x0113, 0x14f: 0x0112, 0x150: 0x0b0a, 0x151: 0x0b8a, + 0x152: 0x0c0a, 0x153: 0x0b52, 0x154: 0x0b52, 0x155: 0x0012, 0x156: 0x0e52, 0x157: 0x1152, + 0x158: 0x0012, 0x159: 0x1752, 0x15a: 0x0012, 0x15b: 0x1a52, 0x15c: 0x0c8a, 0x15d: 0x0012, + 0x15e: 0x0012, 0x15f: 0x0012, 0x160: 0x1d52, 0x161: 0x0d0a, 0x162: 0x0012, 0x163: 0x2052, + 0x164: 0x0012, 0x165: 0x0d8a, 0x166: 0x0e0a, 0x167: 0x0012, 0x168: 0x2652, 0x169: 0x2652, + 0x16a: 0x0e8a, 0x16b: 0x0f0a, 0x16c: 0x0f8a, 0x16d: 0x0012, 0x16e: 0x0012, 0x16f: 0x1d52, + 0x170: 0x0012, 0x171: 0x100a, 0x172: 0x2c52, 0x173: 0x0012, 0x174: 0x0012, 0x175: 0x3252, + 0x176: 0x0012, 0x177: 0x0012, 0x178: 0x0012, 0x179: 0x0012, 0x17a: 0x0012, 0x17b: 0x0012, + 0x17c: 0x0012, 0x17d: 0x108a, 0x17e: 0x0012, 0x17f: 0x0012, + // Block 0x6, offset 0x180 + 0x180: 0x3552, 0x181: 0x0012, 0x182: 0x110a, 0x183: 0x3852, 0x184: 0x0012, 0x185: 0x0012, + 0x186: 0x0012, 0x187: 0x118a, 0x188: 0x3552, 0x189: 0x4752, 0x18a: 0x3b52, 0x18b: 0x3e52, + 0x18c: 0x4a52, 0x18d: 0x0012, 0x18e: 0x0012, 0x18f: 0x0012, 0x190: 0x0012, 0x191: 0x0012, + 0x192: 0x4152, 0x193: 0x0012, 0x194: 0x0010, 0x195: 0x0012, 0x196: 0x0012, 0x197: 0x0012, + 0x198: 0x0012, 0x199: 0x0012, 0x19a: 0x0012, 0x19b: 0x0012, 0x19c: 0x0012, 0x19d: 0x120a, + 0x19e: 0x128a, 0x19f: 0x0012, 0x1a0: 0x0012, 0x1a1: 0x0012, 0x1a2: 0x0012, 0x1a3: 0x0012, + 0x1a4: 0x0012, 0x1a5: 0x0012, 0x1a6: 0x0012, 0x1a7: 0x0012, 0x1a8: 0x0012, 0x1a9: 0x0012, + 0x1aa: 0x0012, 0x1ab: 0x0012, 0x1ac: 0x0012, 0x1ad: 0x0012, 0x1ae: 0x0012, 0x1af: 0x0012, + 0x1b0: 0x0015, 0x1b1: 0x0015, 0x1b2: 0x0015, 0x1b3: 0x0015, 0x1b4: 0x0015, 0x1b5: 0x0015, + 0x1b6: 0x0015, 0x1b7: 0x0015, 0x1b8: 0x0015, 0x1b9: 0x0014, 0x1ba: 0x0014, 0x1bb: 0x0014, + 0x1bc: 0x0014, 0x1bd: 0x0014, 0x1be: 0x0014, 0x1bf: 0x0014, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0024, 0x1c1: 0x0024, 0x1c2: 0x0024, 0x1c3: 0x0024, 0x1c4: 0x0024, 0x1c5: 0x130d, + 0x1c6: 0x0024, 0x1c7: 0x0034, 0x1c8: 0x0034, 0x1c9: 0x0034, 0x1ca: 0x0024, 0x1cb: 0x0024, + 0x1cc: 0x0024, 0x1cd: 0x0034, 0x1ce: 0x0034, 0x1cf: 0x0014, 0x1d0: 0x0024, 0x1d1: 0x0024, + 0x1d2: 0x0024, 0x1d3: 0x0034, 0x1d4: 0x0034, 0x1d5: 0x0034, 0x1d6: 0x0034, 0x1d7: 0x0024, + 0x1d8: 0x0034, 0x1d9: 0x0034, 0x1da: 0x0034, 0x1db: 0x0024, 0x1dc: 0x0034, 0x1dd: 0x0034, + 0x1de: 0x0034, 0x1df: 0x0034, 0x1e0: 0x0034, 0x1e1: 0x0034, 0x1e2: 0x0034, 0x1e3: 0x0024, + 0x1e4: 0x0024, 0x1e5: 0x0024, 0x1e6: 0x0024, 0x1e7: 0x0024, 0x1e8: 0x0024, 0x1e9: 0x0024, + 0x1ea: 0x0024, 0x1eb: 0x0024, 0x1ec: 0x0024, 0x1ed: 0x0024, 0x1ee: 0x0024, 0x1ef: 0x0024, + 0x1f0: 0x0113, 0x1f1: 0x0112, 0x1f2: 0x0113, 0x1f3: 0x0112, 0x1f4: 0x0014, 0x1f5: 0x0004, + 0x1f6: 0x0113, 0x1f7: 0x0112, 0x1fa: 0x0015, 0x1fb: 0x4d52, + 0x1fc: 0x5052, 0x1fd: 0x5052, 0x1ff: 0x5353, + // Block 0x8, offset 0x200 + 0x204: 0x0004, 0x205: 0x0004, + 0x206: 0x2a13, 0x207: 0x0054, 0x208: 0x2513, 0x209: 0x2713, 0x20a: 0x2513, + 0x20c: 0x5653, 0x20e: 0x5953, 0x20f: 0x5c53, 0x210: 0x138a, 0x211: 0x2013, + 0x212: 0x2013, 0x213: 0x2013, 0x214: 0x2013, 0x215: 0x2013, 0x216: 0x2013, 0x217: 0x2013, + 0x218: 0x2013, 0x219: 0x2013, 0x21a: 0x2013, 0x21b: 0x2013, 0x21c: 0x2013, 0x21d: 0x2013, + 0x21e: 0x2013, 0x21f: 0x2013, 0x220: 0x5f53, 0x221: 0x5f53, 0x223: 0x5f53, + 0x224: 0x5f53, 0x225: 0x5f53, 0x226: 0x5f53, 0x227: 0x5f53, 0x228: 0x5f53, 0x229: 0x5f53, + 0x22a: 0x5f53, 0x22b: 0x5f53, 0x22c: 0x2a12, 0x22d: 0x2512, 0x22e: 0x2712, 0x22f: 0x2512, + 0x230: 0x14ca, 0x231: 0x2012, 0x232: 0x2012, 0x233: 0x2012, 0x234: 0x2012, 0x235: 0x2012, + 0x236: 0x2012, 0x237: 0x2012, 0x238: 0x2012, 0x239: 0x2012, 0x23a: 0x2012, 0x23b: 0x2012, + 0x23c: 0x2012, 0x23d: 0x2012, 0x23e: 0x2012, 0x23f: 0x2012, + // Block 0x9, offset 0x240 + 0x240: 0x5f52, 0x241: 0x5f52, 0x242: 0x160a, 0x243: 0x5f52, 0x244: 0x5f52, 0x245: 0x5f52, + 0x246: 0x5f52, 0x247: 0x5f52, 0x248: 0x5f52, 0x249: 0x5f52, 0x24a: 0x5f52, 0x24b: 0x5f52, + 0x24c: 0x5652, 0x24d: 0x5952, 0x24e: 0x5c52, 0x24f: 0x1813, 0x250: 0x168a, 0x251: 0x170a, + 0x252: 0x0013, 0x253: 0x0013, 0x254: 0x0013, 0x255: 0x178a, 0x256: 0x180a, 0x257: 0x1812, + 0x258: 0x0113, 0x259: 0x0112, 0x25a: 0x0113, 0x25b: 0x0112, 0x25c: 0x0113, 0x25d: 0x0112, + 0x25e: 0x0113, 0x25f: 0x0112, 0x260: 0x0113, 0x261: 0x0112, 0x262: 0x0113, 0x263: 0x0112, + 0x264: 0x0113, 0x265: 0x0112, 0x266: 0x0113, 0x267: 0x0112, 0x268: 0x0113, 0x269: 0x0112, + 0x26a: 0x0113, 0x26b: 0x0112, 0x26c: 0x0113, 0x26d: 0x0112, 0x26e: 0x0113, 0x26f: 0x0112, + 0x270: 0x188a, 0x271: 0x190a, 0x272: 0x0b12, 0x273: 0x5352, 0x274: 0x6253, 0x275: 0x198a, + 0x277: 0x0f13, 0x278: 0x0f12, 0x279: 0x0b13, 0x27a: 0x0113, 0x27b: 0x0112, + 0x27c: 0x0012, 0x27d: 0x4d53, 0x27e: 0x5053, 0x27f: 0x5053, + // Block 0xa, offset 0x280 + 0x280: 0x6852, 0x281: 0x6852, 0x282: 0x6852, 0x283: 0x6852, 0x284: 0x6852, 0x285: 0x6852, + 0x286: 0x6852, 0x287: 0x1a0a, 0x288: 0x0012, 0x28a: 0x0010, + 0x291: 0x0034, + 0x292: 0x0024, 0x293: 0x0024, 0x294: 0x0024, 0x295: 0x0024, 0x296: 0x0034, 0x297: 0x0024, + 0x298: 0x0024, 0x299: 0x0024, 0x29a: 0x0034, 0x29b: 0x0034, 0x29c: 0x0024, 0x29d: 0x0024, + 0x29e: 0x0024, 0x29f: 0x0024, 0x2a0: 0x0024, 0x2a1: 0x0024, 0x2a2: 0x0034, 0x2a3: 0x0034, + 0x2a4: 0x0034, 0x2a5: 0x0034, 0x2a6: 0x0034, 0x2a7: 0x0034, 0x2a8: 0x0024, 0x2a9: 0x0024, + 0x2aa: 0x0034, 0x2ab: 0x0024, 0x2ac: 0x0024, 0x2ad: 0x0034, 0x2ae: 0x0034, 0x2af: 0x0024, + 0x2b0: 0x0034, 0x2b1: 0x0034, 0x2b2: 0x0034, 0x2b3: 0x0034, 0x2b4: 0x0034, 0x2b5: 0x0034, + 0x2b6: 0x0034, 0x2b7: 0x0034, 0x2b8: 0x0034, 0x2b9: 0x0034, 0x2ba: 0x0034, 0x2bb: 0x0034, + 0x2bc: 0x0034, 0x2bd: 0x0034, 0x2bf: 0x0034, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x7053, 0x2c1: 0x7053, 0x2c2: 0x7053, 0x2c3: 0x7053, 0x2c4: 0x7053, 0x2c5: 0x7053, + 0x2c7: 0x7053, + 0x2cd: 0x7053, 0x2d0: 0x1aea, 0x2d1: 0x1b6a, + 0x2d2: 0x1bea, 0x2d3: 0x1c6a, 0x2d4: 0x1cea, 0x2d5: 0x1d6a, 0x2d6: 0x1dea, 0x2d7: 0x1e6a, + 0x2d8: 0x1eea, 0x2d9: 0x1f6a, 0x2da: 0x1fea, 0x2db: 0x206a, 0x2dc: 0x20ea, 0x2dd: 0x216a, + 0x2de: 0x21ea, 0x2df: 0x226a, 0x2e0: 0x22ea, 0x2e1: 0x236a, 0x2e2: 0x23ea, 0x2e3: 0x246a, + 0x2e4: 0x24ea, 0x2e5: 0x256a, 0x2e6: 0x25ea, 0x2e7: 0x266a, 0x2e8: 0x26ea, 0x2e9: 0x276a, + 0x2ea: 0x27ea, 0x2eb: 0x286a, 0x2ec: 0x28ea, 0x2ed: 0x296a, 0x2ee: 0x29ea, 0x2ef: 0x2a6a, + 0x2f0: 0x2aea, 0x2f1: 0x2b6a, 0x2f2: 0x2bea, 0x2f3: 0x2c6a, 0x2f4: 0x2cea, 0x2f5: 0x2d6a, + 0x2f6: 0x2dea, 0x2f7: 0x2e6a, 0x2f8: 0x2eea, 0x2f9: 0x2f6a, 0x2fa: 0x2fea, + 0x2fc: 0x0014, 0x2fd: 0x306a, 0x2fe: 0x30ea, 0x2ff: 0x316a, + // Block 0xc, offset 0x300 + 0x300: 0x0812, 0x301: 0x0812, 0x302: 0x0812, 0x303: 0x0812, 0x304: 0x0812, 0x305: 0x0812, + 0x308: 0x0813, 0x309: 0x0813, 0x30a: 0x0813, 0x30b: 0x0813, + 0x30c: 0x0813, 0x30d: 0x0813, 0x310: 0x3b1a, 0x311: 0x0812, + 0x312: 0x3bfa, 0x313: 0x0812, 0x314: 0x3d3a, 0x315: 0x0812, 0x316: 0x3e7a, 0x317: 0x0812, + 0x319: 0x0813, 0x31b: 0x0813, 0x31d: 0x0813, + 0x31f: 0x0813, 0x320: 0x0812, 0x321: 0x0812, 0x322: 0x0812, 0x323: 0x0812, + 0x324: 0x0812, 0x325: 0x0812, 0x326: 0x0812, 0x327: 0x0812, 0x328: 0x0813, 0x329: 0x0813, + 0x32a: 0x0813, 0x32b: 0x0813, 0x32c: 0x0813, 0x32d: 0x0813, 0x32e: 0x0813, 0x32f: 0x0813, + 0x330: 0x9252, 0x331: 0x9252, 0x332: 0x9552, 0x333: 0x9552, 0x334: 0x9852, 0x335: 0x9852, + 0x336: 0x9b52, 0x337: 0x9b52, 0x338: 0x9e52, 0x339: 0x9e52, 0x33a: 0xa152, 0x33b: 0xa152, + 0x33c: 0x4d52, 0x33d: 0x4d52, + // Block 0xd, offset 0x340 + 0x340: 0x3fba, 0x341: 0x40aa, 0x342: 0x419a, 0x343: 0x428a, 0x344: 0x437a, 0x345: 0x446a, + 0x346: 0x455a, 0x347: 0x464a, 0x348: 0x4739, 0x349: 0x4829, 0x34a: 0x4919, 0x34b: 0x4a09, + 0x34c: 0x4af9, 0x34d: 0x4be9, 0x34e: 0x4cd9, 0x34f: 0x4dc9, 0x350: 0x4eba, 0x351: 0x4faa, + 0x352: 0x509a, 0x353: 0x518a, 0x354: 0x527a, 0x355: 0x536a, 0x356: 0x545a, 0x357: 0x554a, + 0x358: 0x5639, 0x359: 0x5729, 0x35a: 0x5819, 0x35b: 0x5909, 0x35c: 0x59f9, 0x35d: 0x5ae9, + 0x35e: 0x5bd9, 0x35f: 0x5cc9, 0x360: 0x5dba, 0x361: 0x5eaa, 0x362: 0x5f9a, 0x363: 0x608a, + 0x364: 0x617a, 0x365: 0x626a, 0x366: 0x635a, 0x367: 0x644a, 0x368: 0x6539, 0x369: 0x6629, + 0x36a: 0x6719, 0x36b: 0x6809, 0x36c: 0x68f9, 0x36d: 0x69e9, 0x36e: 0x6ad9, 0x36f: 0x6bc9, + 0x370: 0x0812, 0x371: 0x0812, 0x372: 0x6cba, 0x373: 0x6dca, 0x374: 0x6e9a, + 0x376: 0x6f7a, 0x377: 0x705a, 0x378: 0x0813, 0x379: 0x0813, 0x37a: 0x9253, 0x37b: 0x9253, + 0x37c: 0x7199, 0x37d: 0x0004, 0x37e: 0x726a, 0x37f: 0x0004, + // Block 0xe, offset 0x380 + 0x380: 0x0004, 0x381: 0x0004, 0x382: 0x72ea, 0x383: 0x73fa, 0x384: 0x74ca, + 0x386: 0x75aa, 0x387: 0x768a, 0x388: 0x9553, 0x389: 0x9553, 0x38a: 0x9853, 0x38b: 0x9853, + 0x38c: 0x77c9, 0x38d: 0x0004, 0x38e: 0x0004, 0x38f: 0x0004, 0x390: 0x0812, 0x391: 0x0812, + 0x392: 0x789a, 0x393: 0x79da, 0x396: 0x7b1a, 0x397: 0x7bfa, + 0x398: 0x0813, 0x399: 0x0813, 0x39a: 0x9b53, 0x39b: 0x9b53, 0x39d: 0x0004, + 0x39e: 0x0004, 0x39f: 0x0004, 0x3a0: 0x0812, 0x3a1: 0x0812, 0x3a2: 0x7d3a, 0x3a3: 0x7e7a, + 0x3a4: 0x7fba, 0x3a5: 0x0912, 0x3a6: 0x809a, 0x3a7: 0x817a, 0x3a8: 0x0813, 0x3a9: 0x0813, + 0x3aa: 0xa153, 0x3ab: 0xa153, 0x3ac: 0x0913, 0x3ad: 0x0004, 0x3ae: 0x0004, 0x3af: 0x0004, + 0x3b2: 0x82ba, 0x3b3: 0x83ca, 0x3b4: 0x849a, + 0x3b6: 0x857a, 0x3b7: 0x865a, 0x3b8: 0x9e53, 0x3b9: 0x9e53, 0x3ba: 0x4d53, 0x3bb: 0x4d53, + 0x3bc: 0x8799, 0x3bd: 0x0004, 0x3be: 0x0004, + // Block 0xf, offset 0x3c0 + 0x3c2: 0x0013, + 0x3c7: 0x0013, 0x3ca: 0x0012, 0x3cb: 0x0013, + 0x3cc: 0x0013, 0x3cd: 0x0013, 0x3ce: 0x0012, 0x3cf: 0x0012, 0x3d0: 0x0013, 0x3d1: 0x0013, + 0x3d2: 0x0013, 0x3d3: 0x0012, 0x3d5: 0x0013, + 0x3d9: 0x0013, 0x3da: 0x0013, 0x3db: 0x0013, 0x3dc: 0x0013, 0x3dd: 0x0013, + 0x3e4: 0x0013, 0x3e6: 0x886b, 0x3e8: 0x0013, + 0x3ea: 0x88cb, 0x3eb: 0x890b, 0x3ec: 0x0013, 0x3ed: 0x0013, 0x3ef: 0x0012, + 0x3f0: 0x0013, 0x3f1: 0x0013, 0x3f2: 0xa453, 0x3f3: 0x0013, 0x3f4: 0x0012, 0x3f5: 0x0010, + 0x3f6: 0x0010, 0x3f7: 0x0010, 0x3f8: 0x0010, 0x3f9: 0x0012, + 0x3fc: 0x0012, 0x3fd: 0x0012, 0x3fe: 0x0013, 0x3ff: 0x0013, + // Block 0x10, offset 0x400 + 0x400: 0x1a13, 0x401: 0x1a13, 0x402: 0x1e13, 0x403: 0x1e13, 0x404: 0x1a13, 0x405: 0x1a13, + 0x406: 0x2613, 0x407: 0x2613, 0x408: 0x2a13, 0x409: 0x2a13, 0x40a: 0x2e13, 0x40b: 0x2e13, + 0x40c: 0x2a13, 0x40d: 0x2a13, 0x40e: 0x2613, 0x40f: 0x2613, 0x410: 0xa752, 0x411: 0xa752, + 0x412: 0xaa52, 0x413: 0xaa52, 0x414: 0xad52, 0x415: 0xad52, 0x416: 0xaa52, 0x417: 0xaa52, + 0x418: 0xa752, 0x419: 0xa752, 0x41a: 0x1a12, 0x41b: 0x1a12, 0x41c: 0x1e12, 0x41d: 0x1e12, + 0x41e: 0x1a12, 0x41f: 0x1a12, 0x420: 0x2612, 0x421: 0x2612, 0x422: 0x2a12, 0x423: 0x2a12, + 0x424: 0x2e12, 0x425: 0x2e12, 0x426: 0x2a12, 0x427: 0x2a12, 0x428: 0x2612, 0x429: 0x2612, + // Block 0x11, offset 0x440 + 0x440: 0x6552, 0x441: 0x6552, 0x442: 0x6552, 0x443: 0x6552, 0x444: 0x6552, 0x445: 0x6552, + 0x446: 0x6552, 0x447: 0x6552, 0x448: 0x6552, 0x449: 0x6552, 0x44a: 0x6552, 0x44b: 0x6552, + 0x44c: 0x6552, 0x44d: 0x6552, 0x44e: 0x6552, 0x44f: 0x6552, 0x450: 0xb052, 0x451: 0xb052, + 0x452: 0xb052, 0x453: 0xb052, 0x454: 0xb052, 0x455: 0xb052, 0x456: 0xb052, 0x457: 0xb052, + 0x458: 0xb052, 0x459: 0xb052, 0x45a: 0xb052, 0x45b: 0xb052, 0x45c: 0xb052, 0x45d: 0xb052, + 0x45e: 0xb052, 0x460: 0x0113, 0x461: 0x0112, 0x462: 0x896b, 0x463: 0x8b53, + 0x464: 0x89cb, 0x465: 0x8a2a, 0x466: 0x8a8a, 0x467: 0x0f13, 0x468: 0x0f12, 0x469: 0x0313, + 0x46a: 0x0312, 0x46b: 0x0713, 0x46c: 0x0712, 0x46d: 0x8aeb, 0x46e: 0x8b4b, 0x46f: 0x8bab, + 0x470: 0x8c0b, 0x471: 0x0012, 0x472: 0x0113, 0x473: 0x0112, 0x474: 0x0012, 0x475: 0x0313, + 0x476: 0x0312, 0x477: 0x0012, 0x478: 0x0012, 0x479: 0x0012, 0x47a: 0x0012, 0x47b: 0x0012, + 0x47c: 0x0015, 0x47d: 0x0015, 0x47e: 0x8c6b, 0x47f: 0x8ccb, + // Block 0x12, offset 0x480 + 0x480: 0x0113, 0x481: 0x0112, 0x482: 0x0113, 0x483: 0x0112, 0x484: 0x0113, 0x485: 0x0112, + 0x486: 0x0113, 0x487: 0x0112, 0x488: 0x0014, 0x489: 0x0014, 0x48a: 0x0014, 0x48b: 0x0713, + 0x48c: 0x0712, 0x48d: 0x8d2b, 0x48e: 0x0012, 0x48f: 0x0010, 0x490: 0x0113, 0x491: 0x0112, + 0x492: 0x0113, 0x493: 0x0112, 0x494: 0x6552, 0x495: 0x0012, 0x496: 0x0113, 0x497: 0x0112, + 0x498: 0x0113, 0x499: 0x0112, 0x49a: 0x0113, 0x49b: 0x0112, 0x49c: 0x0113, 0x49d: 0x0112, + 0x49e: 0x0113, 0x49f: 0x0112, 0x4a0: 0x0113, 0x4a1: 0x0112, 0x4a2: 0x0113, 0x4a3: 0x0112, + 0x4a4: 0x0113, 0x4a5: 0x0112, 0x4a6: 0x0113, 0x4a7: 0x0112, 0x4a8: 0x0113, 0x4a9: 0x0112, + 0x4aa: 0x8d8b, 0x4ab: 0x8deb, 0x4ac: 0x8e4b, 0x4ad: 0x8eab, 0x4ae: 0x8f0b, 0x4af: 0x0012, + 0x4b0: 0x8f6b, 0x4b1: 0x8fcb, 0x4b2: 0x902b, 0x4b3: 0xb353, 0x4b4: 0x0113, 0x4b5: 0x0112, + 0x4b6: 0x0113, 0x4b7: 0x0112, 0x4b8: 0x0113, 0x4b9: 0x0112, 0x4ba: 0x0113, 0x4bb: 0x0112, + 0x4bc: 0x0113, 0x4bd: 0x0112, 0x4be: 0x0113, 0x4bf: 0x0112, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x90ea, 0x4c1: 0x916a, 0x4c2: 0x91ea, 0x4c3: 0x926a, 0x4c4: 0x931a, 0x4c5: 0x93ca, + 0x4c6: 0x944a, + 0x4d3: 0x94ca, 0x4d4: 0x95aa, 0x4d5: 0x968a, 0x4d6: 0x976a, 0x4d7: 0x984a, + 0x4dd: 0x0010, + 0x4de: 0x0034, 0x4df: 0x0010, 0x4e0: 0x0010, 0x4e1: 0x0010, 0x4e2: 0x0010, 0x4e3: 0x0010, + 0x4e4: 0x0010, 0x4e5: 0x0010, 0x4e6: 0x0010, 0x4e7: 0x0010, 0x4e8: 0x0010, + 0x4ea: 0x0010, 0x4eb: 0x0010, 0x4ec: 0x0010, 0x4ed: 0x0010, 0x4ee: 0x0010, 0x4ef: 0x0010, + 0x4f0: 0x0010, 0x4f1: 0x0010, 0x4f2: 0x0010, 0x4f3: 0x0010, 0x4f4: 0x0010, 0x4f5: 0x0010, + 0x4f6: 0x0010, 0x4f8: 0x0010, 0x4f9: 0x0010, 0x4fa: 0x0010, 0x4fb: 0x0010, + 0x4fc: 0x0010, 0x4fe: 0x0010, + // Block 0x14, offset 0x500 + 0x500: 0x2213, 0x501: 0x2213, 0x502: 0x2613, 0x503: 0x2613, 0x504: 0x2213, 0x505: 0x2213, + 0x506: 0x2e13, 0x507: 0x2e13, 0x508: 0x2213, 0x509: 0x2213, 0x50a: 0x2613, 0x50b: 0x2613, + 0x50c: 0x2213, 0x50d: 0x2213, 0x50e: 0x3e13, 0x50f: 0x3e13, 0x510: 0x2213, 0x511: 0x2213, + 0x512: 0x2613, 0x513: 0x2613, 0x514: 0x2213, 0x515: 0x2213, 0x516: 0x2e13, 0x517: 0x2e13, + 0x518: 0x2213, 0x519: 0x2213, 0x51a: 0x2613, 0x51b: 0x2613, 0x51c: 0x2213, 0x51d: 0x2213, + 0x51e: 0xbc53, 0x51f: 0xbc53, 0x520: 0xbf53, 0x521: 0xbf53, 0x522: 0x2212, 0x523: 0x2212, + 0x524: 0x2612, 0x525: 0x2612, 0x526: 0x2212, 0x527: 0x2212, 0x528: 0x2e12, 0x529: 0x2e12, + 0x52a: 0x2212, 0x52b: 0x2212, 0x52c: 0x2612, 0x52d: 0x2612, 0x52e: 0x2212, 0x52f: 0x2212, + 0x530: 0x3e12, 0x531: 0x3e12, 0x532: 0x2212, 0x533: 0x2212, 0x534: 0x2612, 0x535: 0x2612, + 0x536: 0x2212, 0x537: 0x2212, 0x538: 0x2e12, 0x539: 0x2e12, 0x53a: 0x2212, 0x53b: 0x2212, + 0x53c: 0x2612, 0x53d: 0x2612, 0x53e: 0x2212, 0x53f: 0x2212, + // Block 0x15, offset 0x540 + 0x542: 0x0010, + 0x547: 0x0010, 0x549: 0x0010, 0x54b: 0x0010, + 0x54d: 0x0010, 0x54e: 0x0010, 0x54f: 0x0010, 0x551: 0x0010, + 0x552: 0x0010, 0x554: 0x0010, 0x557: 0x0010, + 0x559: 0x0010, 0x55b: 0x0010, 0x55d: 0x0010, + 0x55f: 0x0010, 0x561: 0x0010, 0x562: 0x0010, + 0x564: 0x0010, 0x567: 0x0010, 0x568: 0x0010, 0x569: 0x0010, + 0x56a: 0x0010, 0x56c: 0x0010, 0x56d: 0x0010, 0x56e: 0x0010, 0x56f: 0x0010, + 0x570: 0x0010, 0x571: 0x0010, 0x572: 0x0010, 0x574: 0x0010, 0x575: 0x0010, + 0x576: 0x0010, 0x577: 0x0010, 0x579: 0x0010, 0x57a: 0x0010, 0x57b: 0x0010, + 0x57c: 0x0010, 0x57e: 0x0010, +} + +// caseIndex: 25 blocks, 1600 entries, 3200 bytes +// Block 0 is the zero block. +var caseIndex = [1600]uint16{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x14, 0xc3: 0x15, 0xc4: 0x16, 0xc5: 0x17, 0xc6: 0x01, 0xc7: 0x02, + 0xc8: 0x18, 0xc9: 0x03, 0xca: 0x04, 0xcb: 0x19, 0xcc: 0x1a, 0xcd: 0x05, 0xce: 0x06, 0xcf: 0x07, + 0xd0: 0x1b, 0xd1: 0x1c, 0xd2: 0x1d, 0xd3: 0x1e, 0xd4: 0x1f, 0xd5: 0x20, 0xd6: 0x08, 0xd7: 0x21, + 0xd8: 0x22, 0xd9: 0x23, 0xda: 0x24, 0xdb: 0x25, 0xdc: 0x26, 0xdd: 0x27, 0xde: 0x28, 0xdf: 0x29, + 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05, + 0xea: 0x06, 0xeb: 0x07, 0xec: 0x07, 0xed: 0x08, 0xef: 0x09, + 0xf0: 0x14, 0xf3: 0x16, + // Block 0x4, offset 0x100 + 0x120: 0x2a, 0x121: 0x2b, 0x122: 0x2c, 0x123: 0x2d, 0x124: 0x2e, 0x125: 0x2f, 0x126: 0x30, 0x127: 0x31, + 0x128: 0x32, 0x129: 0x33, 0x12a: 0x34, 0x12b: 0x35, 0x12c: 0x36, 0x12d: 0x37, 0x12e: 0x38, 0x12f: 0x39, + 0x130: 0x3a, 0x131: 0x3b, 0x132: 0x3c, 0x133: 0x3d, 0x134: 0x3e, 0x135: 0x3f, 0x136: 0x40, 0x137: 0x41, + 0x138: 0x42, 0x139: 0x43, 0x13a: 0x44, 0x13b: 0x45, 0x13c: 0x46, 0x13d: 0x47, 0x13e: 0x48, 0x13f: 0x49, + // Block 0x5, offset 0x140 + 0x140: 0x4a, 0x141: 0x4b, 0x142: 0x4c, 0x143: 0x09, 0x144: 0x24, 0x145: 0x24, 0x146: 0x24, 0x147: 0x24, + 0x148: 0x24, 0x149: 0x4d, 0x14a: 0x4e, 0x14b: 0x4f, 0x14c: 0x50, 0x14d: 0x51, 0x14e: 0x52, 0x14f: 0x53, + 0x150: 0x54, 0x151: 0x24, 0x152: 0x24, 0x153: 0x24, 0x154: 0x24, 0x155: 0x24, 0x156: 0x24, 0x157: 0x24, + 0x158: 0x24, 0x159: 0x55, 0x15a: 0x56, 0x15b: 0x57, 0x15c: 0x58, 0x15d: 0x59, 0x15e: 0x5a, 0x15f: 0x5b, + 0x160: 0x5c, 0x161: 0x5d, 0x162: 0x5e, 0x163: 0x5f, 0x164: 0x60, 0x165: 0x61, 0x167: 0x62, + 0x168: 0x63, 0x169: 0x64, 0x16a: 0x65, 0x16b: 0x66, 0x16c: 0x67, 0x16d: 0x68, 0x16e: 0x69, 0x16f: 0x6a, + 0x170: 0x6b, 0x171: 0x6c, 0x172: 0x6d, 0x173: 0x6e, 0x174: 0x6f, 0x175: 0x70, 0x176: 0x71, 0x177: 0x72, + 0x178: 0x73, 0x179: 0x73, 0x17a: 0x74, 0x17b: 0x73, 0x17c: 0x75, 0x17d: 0x0a, 0x17e: 0x0b, 0x17f: 0x0c, + // Block 0x6, offset 0x180 + 0x180: 0x76, 0x181: 0x77, 0x182: 0x78, 0x183: 0x79, 0x184: 0x0d, 0x185: 0x7a, 0x186: 0x7b, + 0x192: 0x7c, 0x193: 0x0e, + 0x1b0: 0x7d, 0x1b1: 0x0f, 0x1b2: 0x73, 0x1b3: 0x7e, 0x1b4: 0x7f, 0x1b5: 0x80, 0x1b6: 0x81, 0x1b7: 0x82, + 0x1b8: 0x83, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x84, 0x1c2: 0x85, 0x1c3: 0x86, 0x1c4: 0x87, 0x1c5: 0x24, 0x1c6: 0x88, + // Block 0x8, offset 0x200 + 0x200: 0x89, 0x201: 0x24, 0x202: 0x24, 0x203: 0x24, 0x204: 0x24, 0x205: 0x24, 0x206: 0x24, 0x207: 0x24, + 0x208: 0x24, 0x209: 0x24, 0x20a: 0x24, 0x20b: 0x24, 0x20c: 0x24, 0x20d: 0x24, 0x20e: 0x24, 0x20f: 0x24, + 0x210: 0x24, 0x211: 0x24, 0x212: 0x8a, 0x213: 0x8b, 0x214: 0x24, 0x215: 0x24, 0x216: 0x24, 0x217: 0x24, + 0x218: 0x8c, 0x219: 0x8d, 0x21a: 0x8e, 0x21b: 0x8f, 0x21c: 0x90, 0x21d: 0x91, 0x21e: 0x10, 0x21f: 0x92, + 0x220: 0x93, 0x221: 0x94, 0x222: 0x24, 0x223: 0x95, 0x224: 0x96, 0x225: 0x97, 0x226: 0x98, 0x227: 0x99, + 0x228: 0x9a, 0x229: 0x9b, 0x22a: 0x9c, 0x22b: 0x9d, 0x22c: 0x9e, 0x22d: 0x9f, 0x22e: 0xa0, 0x22f: 0xa1, + 0x230: 0x24, 0x231: 0x24, 0x232: 0x24, 0x233: 0x24, 0x234: 0x24, 0x235: 0x24, 0x236: 0x24, 0x237: 0x24, + 0x238: 0x24, 0x239: 0x24, 0x23a: 0x24, 0x23b: 0x24, 0x23c: 0x24, 0x23d: 0x24, 0x23e: 0x24, 0x23f: 0x24, + // Block 0x9, offset 0x240 + 0x240: 0x24, 0x241: 0x24, 0x242: 0x24, 0x243: 0x24, 0x244: 0x24, 0x245: 0x24, 0x246: 0x24, 0x247: 0x24, + 0x248: 0x24, 0x249: 0x24, 0x24a: 0x24, 0x24b: 0x24, 0x24c: 0x24, 0x24d: 0x24, 0x24e: 0x24, 0x24f: 0x24, + 0x250: 0x24, 0x251: 0x24, 0x252: 0x24, 0x253: 0x24, 0x254: 0x24, 0x255: 0x24, 0x256: 0x24, 0x257: 0x24, + 0x258: 0x24, 0x259: 0x24, 0x25a: 0x24, 0x25b: 0x24, 0x25c: 0x24, 0x25d: 0x24, 0x25e: 0x24, 0x25f: 0x24, + 0x260: 0x24, 0x261: 0x24, 0x262: 0x24, 0x263: 0x24, 0x264: 0x24, 0x265: 0x24, 0x266: 0x24, 0x267: 0x24, + 0x268: 0x24, 0x269: 0x24, 0x26a: 0x24, 0x26b: 0x24, 0x26c: 0x24, 0x26d: 0x24, 0x26e: 0x24, 0x26f: 0x24, + 0x270: 0x24, 0x271: 0x24, 0x272: 0x24, 0x273: 0x24, 0x274: 0x24, 0x275: 0x24, 0x276: 0x24, 0x277: 0x24, + 0x278: 0x24, 0x279: 0x24, 0x27a: 0x24, 0x27b: 0x24, 0x27c: 0x24, 0x27d: 0x24, 0x27e: 0x24, 0x27f: 0x24, + // Block 0xa, offset 0x280 + 0x280: 0x24, 0x281: 0x24, 0x282: 0x24, 0x283: 0x24, 0x284: 0x24, 0x285: 0x24, 0x286: 0x24, 0x287: 0x24, + 0x288: 0x24, 0x289: 0x24, 0x28a: 0x24, 0x28b: 0x24, 0x28c: 0x24, 0x28d: 0x24, 0x28e: 0x24, 0x28f: 0x24, + 0x290: 0x24, 0x291: 0x24, 0x292: 0x24, 0x293: 0x24, 0x294: 0x24, 0x295: 0x24, 0x296: 0x24, 0x297: 0x24, + 0x298: 0x24, 0x299: 0x24, 0x29a: 0x24, 0x29b: 0x24, 0x29c: 0x24, 0x29d: 0x24, 0x29e: 0xa2, 0x29f: 0xa3, + // Block 0xb, offset 0x2c0 + 0x2ec: 0x11, 0x2ed: 0xa4, 0x2ee: 0xa5, 0x2ef: 0xa6, + 0x2f0: 0x24, 0x2f1: 0x24, 0x2f2: 0x24, 0x2f3: 0x24, 0x2f4: 0xa7, 0x2f5: 0xa8, 0x2f6: 0xa9, 0x2f7: 0xaa, + 0x2f8: 0xab, 0x2f9: 0xac, 0x2fa: 0x24, 0x2fb: 0xad, 0x2fc: 0xae, 0x2fd: 0xaf, 0x2fe: 0xb0, 0x2ff: 0xb1, + // Block 0xc, offset 0x300 + 0x300: 0xb2, 0x301: 0xb3, 0x302: 0x24, 0x303: 0xb4, 0x305: 0xb5, 0x307: 0xb6, + 0x30a: 0xb7, 0x30b: 0xb8, 0x30c: 0xb9, 0x30d: 0xba, 0x30e: 0xbb, 0x30f: 0xbc, + 0x310: 0xbd, 0x311: 0xbe, 0x312: 0xbf, 0x313: 0xc0, 0x314: 0xc1, 0x315: 0xc2, + 0x318: 0x24, 0x319: 0x24, 0x31a: 0x24, 0x31b: 0x24, 0x31c: 0xc3, 0x31d: 0xc4, + 0x320: 0xc5, 0x321: 0xc6, 0x322: 0xc7, 0x323: 0xc8, 0x324: 0xc9, 0x326: 0xca, + 0x328: 0xcb, 0x329: 0xcc, 0x32a: 0xcd, 0x32b: 0xce, 0x32c: 0x5f, 0x32d: 0xcf, 0x32e: 0xd0, + 0x330: 0x24, 0x331: 0xd1, 0x332: 0xd2, 0x333: 0xd3, 0x334: 0xd4, + 0x33a: 0xd5, 0x33c: 0xd6, 0x33d: 0xd7, 0x33e: 0xd8, 0x33f: 0xd9, + // Block 0xd, offset 0x340 + 0x340: 0xda, 0x341: 0xdb, 0x342: 0xdc, 0x343: 0xdd, 0x344: 0xde, 0x345: 0xdf, 0x346: 0xe0, 0x347: 0xe1, + 0x348: 0xe2, 0x34a: 0xe3, 0x34b: 0xe4, 0x34c: 0xe5, 0x34d: 0xe6, + 0x350: 0xe7, 0x351: 0xe8, 0x352: 0xe9, 0x353: 0xea, 0x356: 0xeb, 0x357: 0xec, + 0x358: 0xed, 0x359: 0xee, 0x35a: 0xef, 0x35b: 0xf0, 0x35c: 0xf1, + 0x360: 0xf2, 0x362: 0xf3, 0x363: 0xf4, 0x364: 0xf5, 0x365: 0xf6, 0x366: 0xf7, 0x367: 0xf8, + 0x368: 0xf9, 0x369: 0xfa, 0x36a: 0xfb, 0x36b: 0xfc, + 0x370: 0xfd, 0x371: 0xfe, 0x372: 0xff, 0x374: 0x100, 0x375: 0x101, 0x376: 0x102, + 0x37b: 0x103, 0x37e: 0x104, + // Block 0xe, offset 0x380 + 0x380: 0x24, 0x381: 0x24, 0x382: 0x24, 0x383: 0x24, 0x384: 0x24, 0x385: 0x24, 0x386: 0x24, 0x387: 0x24, + 0x388: 0x24, 0x389: 0x24, 0x38a: 0x24, 0x38b: 0x24, 0x38c: 0x24, 0x38d: 0x24, 0x38e: 0x105, + 0x390: 0x24, 0x391: 0x106, 0x392: 0x24, 0x393: 0x24, 0x394: 0x24, 0x395: 0x107, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x24, 0x3c1: 0x24, 0x3c2: 0x24, 0x3c3: 0x24, 0x3c4: 0x24, 0x3c5: 0x24, 0x3c6: 0x24, 0x3c7: 0x24, + 0x3c8: 0x24, 0x3c9: 0x24, 0x3ca: 0x24, 0x3cb: 0x24, 0x3cc: 0x24, 0x3cd: 0x24, 0x3ce: 0x24, 0x3cf: 0x24, + 0x3d0: 0x108, + // Block 0x10, offset 0x400 + 0x410: 0x24, 0x411: 0x24, 0x412: 0x24, 0x413: 0x24, 0x414: 0x24, 0x415: 0x24, 0x416: 0x24, 0x417: 0x24, + 0x418: 0x24, 0x419: 0x109, + // Block 0x11, offset 0x440 + 0x460: 0x24, 0x461: 0x24, 0x462: 0x24, 0x463: 0x24, 0x464: 0x24, 0x465: 0x24, 0x466: 0x24, 0x467: 0x24, + 0x468: 0xfc, 0x469: 0x10a, 0x46b: 0x10b, 0x46c: 0x10c, 0x46d: 0x10d, 0x46e: 0x10e, + 0x479: 0x10f, 0x47c: 0x24, 0x47d: 0x110, 0x47e: 0x111, 0x47f: 0x112, + // Block 0x12, offset 0x480 + 0x4b0: 0x24, 0x4b1: 0x113, 0x4b2: 0x114, + // Block 0x13, offset 0x4c0 + 0x4c5: 0x115, 0x4c6: 0x116, + 0x4c9: 0x117, + 0x4d0: 0x118, 0x4d1: 0x119, 0x4d2: 0x11a, 0x4d3: 0x11b, 0x4d4: 0x11c, 0x4d5: 0x11d, 0x4d6: 0x11e, 0x4d7: 0x11f, + 0x4d8: 0x120, 0x4d9: 0x121, 0x4da: 0x122, 0x4db: 0x123, 0x4dc: 0x124, 0x4dd: 0x125, 0x4de: 0x126, 0x4df: 0x127, + 0x4e8: 0x128, 0x4e9: 0x129, 0x4ea: 0x12a, + // Block 0x14, offset 0x500 + 0x500: 0x12b, 0x504: 0x12c, 0x505: 0x12d, + 0x50b: 0x12e, + 0x520: 0x24, 0x521: 0x24, 0x522: 0x24, 0x523: 0x12f, 0x524: 0x12, 0x525: 0x130, + 0x538: 0x131, 0x539: 0x13, 0x53a: 0x132, + // Block 0x15, offset 0x540 + 0x544: 0x133, 0x545: 0x134, 0x546: 0x135, + 0x54f: 0x136, + 0x56f: 0x137, + // Block 0x16, offset 0x580 + 0x590: 0x0a, 0x591: 0x0b, 0x592: 0x0c, 0x593: 0x0d, 0x594: 0x0e, 0x596: 0x0f, + 0x59b: 0x10, 0x59d: 0x11, 0x59e: 0x12, 0x59f: 0x13, + // Block 0x17, offset 0x5c0 + 0x5c0: 0x138, 0x5c1: 0x139, 0x5c4: 0x139, 0x5c5: 0x139, 0x5c6: 0x139, 0x5c7: 0x13a, + // Block 0x18, offset 0x600 + 0x620: 0x15, +} + +// sparseOffsets: 296 entries, 592 bytes +var sparseOffsets = []uint16{0x0, 0x9, 0xf, 0x18, 0x24, 0x2e, 0x34, 0x37, 0x3b, 0x3e, 0x42, 0x4c, 0x4e, 0x57, 0x5e, 0x63, 0x71, 0x72, 0x80, 0x8f, 0x99, 0x9c, 0xa3, 0xab, 0xae, 0xb0, 0xc0, 0xc6, 0xd4, 0xdf, 0xec, 0xf7, 0x103, 0x10d, 0x119, 0x124, 0x130, 0x13c, 0x144, 0x14d, 0x157, 0x162, 0x16e, 0x174, 0x17f, 0x185, 0x18d, 0x190, 0x195, 0x199, 0x19d, 0x1a4, 0x1ad, 0x1b5, 0x1b6, 0x1bf, 0x1c6, 0x1ce, 0x1d4, 0x1d9, 0x1dd, 0x1e0, 0x1e2, 0x1e5, 0x1ea, 0x1eb, 0x1ed, 0x1ef, 0x1f1, 0x1f8, 0x1fd, 0x201, 0x20a, 0x20d, 0x210, 0x216, 0x217, 0x222, 0x223, 0x224, 0x229, 0x236, 0x23f, 0x240, 0x248, 0x251, 0x25a, 0x263, 0x268, 0x26b, 0x276, 0x284, 0x286, 0x28d, 0x291, 0x29d, 0x29e, 0x2a9, 0x2b1, 0x2b9, 0x2bf, 0x2c0, 0x2ce, 0x2d3, 0x2d6, 0x2db, 0x2df, 0x2e5, 0x2ea, 0x2ed, 0x2f2, 0x2f7, 0x2f8, 0x2fe, 0x300, 0x301, 0x303, 0x305, 0x308, 0x309, 0x30b, 0x30e, 0x314, 0x318, 0x31a, 0x31f, 0x326, 0x331, 0x33b, 0x33c, 0x345, 0x349, 0x34e, 0x356, 0x35c, 0x362, 0x36c, 0x371, 0x37a, 0x380, 0x389, 0x38d, 0x395, 0x397, 0x399, 0x39c, 0x39e, 0x3a0, 0x3a1, 0x3a2, 0x3a4, 0x3a6, 0x3ac, 0x3b1, 0x3b3, 0x3ba, 0x3bd, 0x3bf, 0x3c5, 0x3ca, 0x3cc, 0x3cd, 0x3ce, 0x3cf, 0x3d1, 0x3d3, 0x3d5, 0x3d8, 0x3da, 0x3dd, 0x3e5, 0x3e8, 0x3ec, 0x3f4, 0x3f6, 0x3f7, 0x3f8, 0x3fa, 0x400, 0x402, 0x403, 0x405, 0x407, 0x409, 0x416, 0x417, 0x418, 0x41c, 0x41e, 0x41f, 0x420, 0x421, 0x422, 0x425, 0x428, 0x42b, 0x431, 0x432, 0x434, 0x438, 0x43c, 0x442, 0x445, 0x44c, 0x450, 0x454, 0x45d, 0x466, 0x46c, 0x472, 0x47c, 0x486, 0x488, 0x491, 0x497, 0x49d, 0x4a3, 0x4a6, 0x4ac, 0x4af, 0x4b8, 0x4b9, 0x4c0, 0x4c4, 0x4c5, 0x4c8, 0x4d2, 0x4d5, 0x4d7, 0x4de, 0x4e6, 0x4ec, 0x4f2, 0x4f3, 0x4f9, 0x4fc, 0x504, 0x50b, 0x515, 0x51d, 0x520, 0x521, 0x522, 0x523, 0x524, 0x526, 0x527, 0x529, 0x52b, 0x52d, 0x531, 0x532, 0x534, 0x537, 0x539, 0x53c, 0x53e, 0x543, 0x548, 0x54c, 0x54d, 0x550, 0x554, 0x55f, 0x563, 0x56b, 0x570, 0x574, 0x577, 0x57b, 0x57e, 0x581, 0x586, 0x58a, 0x58e, 0x592, 0x596, 0x598, 0x59a, 0x59d, 0x5a2, 0x5a5, 0x5a7, 0x5aa, 0x5ac, 0x5b2, 0x5bb, 0x5c0, 0x5c1, 0x5c4, 0x5c5, 0x5c6, 0x5c7, 0x5c9, 0x5ca, 0x5cb} + +// sparseValues: 1483 entries, 5932 bytes +var sparseValues = [1483]valueRange{ + // Block 0x0, offset 0x0 + {value: 0x0004, lo: 0xa8, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xaa}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0004, lo: 0xaf, hi: 0xaf}, + {value: 0x0004, lo: 0xb4, hi: 0xb4}, + {value: 0x001a, lo: 0xb5, hi: 0xb5}, + {value: 0x0054, lo: 0xb7, hi: 0xb7}, + {value: 0x0004, lo: 0xb8, hi: 0xb8}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + // Block 0x1, offset 0x9 + {value: 0x2013, lo: 0x80, hi: 0x96}, + {value: 0x2013, lo: 0x98, hi: 0x9e}, + {value: 0x009a, lo: 0x9f, hi: 0x9f}, + {value: 0x2012, lo: 0xa0, hi: 0xb6}, + {value: 0x2012, lo: 0xb8, hi: 0xbe}, + {value: 0x0252, lo: 0xbf, hi: 0xbf}, + // Block 0x2, offset 0xf + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x011b, lo: 0xb0, hi: 0xb0}, + {value: 0x019a, lo: 0xb1, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb7}, + {value: 0x0012, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x0553, lo: 0xbf, hi: 0xbf}, + // Block 0x3, offset 0x18 + {value: 0x0552, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x01da, lo: 0x89, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xb7}, + {value: 0x0253, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x028a, lo: 0xbf, hi: 0xbf}, + // Block 0x4, offset 0x24 + {value: 0x0117, lo: 0x80, hi: 0x9f}, + {value: 0x2f53, lo: 0xa0, hi: 0xa0}, + {value: 0x0012, lo: 0xa1, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xb3}, + {value: 0x0012, lo: 0xb4, hi: 0xb9}, + {value: 0x090b, lo: 0xba, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x2953, lo: 0xbd, hi: 0xbd}, + {value: 0x098b, lo: 0xbe, hi: 0xbe}, + {value: 0x0a0a, lo: 0xbf, hi: 0xbf}, + // Block 0x5, offset 0x2e + {value: 0x0015, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x97}, + {value: 0x0004, lo: 0x98, hi: 0x9d}, + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0015, lo: 0xa0, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xbf}, + // Block 0x6, offset 0x34 + {value: 0x0024, lo: 0x80, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbf}, + // Block 0x7, offset 0x37 + {value: 0x6553, lo: 0x80, hi: 0x8f}, + {value: 0x2013, lo: 0x90, hi: 0x9f}, + {value: 0x5f53, lo: 0xa0, hi: 0xaf}, + {value: 0x2012, lo: 0xb0, hi: 0xbf}, + // Block 0x8, offset 0x3b + {value: 0x5f52, lo: 0x80, hi: 0x8f}, + {value: 0x6552, lo: 0x90, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x9, offset 0x3e + {value: 0x0117, lo: 0x80, hi: 0x81}, + {value: 0x0024, lo: 0x83, hi: 0x87}, + {value: 0x0014, lo: 0x88, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xbf}, + // Block 0xa, offset 0x42 + {value: 0x0f13, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x0316, lo: 0x89, hi: 0x8a}, + {value: 0x0716, lo: 0x8b, hi: 0x8c}, + {value: 0x0316, lo: 0x8d, hi: 0x8e}, + {value: 0x0f12, lo: 0x8f, hi: 0x8f}, + {value: 0x0117, lo: 0x90, hi: 0xbf}, + // Block 0xb, offset 0x4c + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x6553, lo: 0xb1, hi: 0xbf}, + // Block 0xc, offset 0x4e + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6853, lo: 0x90, hi: 0x96}, + {value: 0x0014, lo: 0x99, hi: 0x99}, + {value: 0x0010, lo: 0x9a, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0054, lo: 0x9f, hi: 0x9f}, + {value: 0x0012, lo: 0xa0, hi: 0xa0}, + {value: 0x6552, lo: 0xa1, hi: 0xaf}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0xd, offset 0x57 + {value: 0x0034, lo: 0x81, hi: 0x82}, + {value: 0x0024, lo: 0x84, hi: 0x84}, + {value: 0x0034, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0xaa}, + {value: 0x0010, lo: 0xaf, hi: 0xb3}, + {value: 0x0054, lo: 0xb4, hi: 0xb4}, + // Block 0xe, offset 0x5e + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0024, lo: 0x90, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0014, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xf, offset 0x63 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x8a}, + {value: 0x0034, lo: 0x8b, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9c}, + {value: 0x0024, lo: 0x9d, hi: 0x9e}, + {value: 0x0034, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0034, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x10, offset 0x71 + {value: 0x0010, lo: 0x80, hi: 0xbf}, + // Block 0x11, offset 0x72 + {value: 0x0010, lo: 0x80, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0024, lo: 0x9f, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xaa, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x12, offset 0x80 + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0034, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0034, lo: 0xb1, hi: 0xb1}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbd}, + {value: 0x0034, lo: 0xbe, hi: 0xbe}, + {value: 0x0024, lo: 0xbf, hi: 0xbf}, + // Block 0x13, offset 0x8f + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0024, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x88}, + {value: 0x0024, lo: 0x89, hi: 0x8a}, + {value: 0x0010, lo: 0x8d, hi: 0xbf}, + // Block 0x14, offset 0x99 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x15, offset 0x9c + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xb1}, + {value: 0x0034, lo: 0xb2, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + // Block 0x16, offset 0xa3 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x99}, + {value: 0x0014, lo: 0x9a, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0xa3}, + {value: 0x0014, lo: 0xa4, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xad}, + // Block 0x17, offset 0xab + {value: 0x0010, lo: 0x80, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x9b}, + {value: 0x0010, lo: 0xa0, hi: 0xaa}, + // Block 0x18, offset 0xae + {value: 0x0010, lo: 0xa0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0x19, offset 0xb0 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0034, lo: 0x93, hi: 0x93}, + {value: 0x0024, lo: 0x94, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0024, lo: 0xaa, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbf}, + // Block 0x1a, offset 0xc0 + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1b, offset 0xc6 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x98, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0x1c, offset 0xd4 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb6, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1d, offset 0xdf + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xb1}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + {value: 0x0024, lo: 0xbe, hi: 0xbe}, + // Block 0x1e, offset 0xec + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x1f, offset 0xf7 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x99, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x20, offset 0x103 + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x21, offset 0x10d + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x85}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xbf}, + // Block 0x22, offset 0x119 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x23, offset 0x124 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x24, offset 0x130 + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0010, lo: 0xa8, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x25, offset 0x13c + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x26, offset 0x144 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x83}, + {value: 0x0014, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb9}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbf}, + // Block 0x27, offset 0x14d + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x88}, + {value: 0x0014, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x28, offset 0x157 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x29, offset 0x162 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb2}, + // Block 0x2a, offset 0x16e + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x2b, offset 0x174 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x94, hi: 0x97}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xba, hi: 0xbf}, + // Block 0x2c, offset 0x17f + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x96}, + {value: 0x0010, lo: 0x9a, hi: 0xb1}, + {value: 0x0010, lo: 0xb3, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x2d, offset 0x185 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x94}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9f}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + // Block 0x2e, offset 0x18d + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xba}, + // Block 0x2f, offset 0x190 + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x30, offset 0x195 + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xba}, + {value: 0x0014, lo: 0xbb, hi: 0xbc}, + // Block 0x31, offset 0x199 + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x32, offset 0x19d + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0034, lo: 0x98, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0034, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x33, offset 0x1a4 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xac}, + {value: 0x0034, lo: 0xb1, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xba, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x34, offset 0x1ad + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0024, lo: 0x82, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x86, hi: 0x87}, + {value: 0x0010, lo: 0x88, hi: 0x8c}, + {value: 0x0014, lo: 0x8d, hi: 0x97}, + {value: 0x0014, lo: 0x99, hi: 0xbc}, + // Block 0x35, offset 0x1b5 + {value: 0x0034, lo: 0x86, hi: 0x86}, + // Block 0x36, offset 0x1b6 + {value: 0x0010, lo: 0xab, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbe}, + // Block 0x37, offset 0x1bf + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x96, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x99}, + {value: 0x0014, lo: 0x9e, hi: 0xa0}, + {value: 0x0010, lo: 0xa2, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xad}, + {value: 0x0014, lo: 0xb1, hi: 0xb4}, + // Block 0x38, offset 0x1c6 + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x6c53, lo: 0xa0, hi: 0xbf}, + // Block 0x39, offset 0x1ce + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x9a, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x3a, offset 0x1d4 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x3b, offset 0x1d9 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x82, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3c, offset 0x1dd + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3d, offset 0x1e0 + {value: 0x0010, lo: 0x80, hi: 0x9a}, + {value: 0x0024, lo: 0x9d, hi: 0x9f}, + // Block 0x3e, offset 0x1e2 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x7453, lo: 0xa0, hi: 0xaf}, + {value: 0x7853, lo: 0xb0, hi: 0xbf}, + // Block 0x3f, offset 0x1e5 + {value: 0x7c53, lo: 0x80, hi: 0x8f}, + {value: 0x8053, lo: 0x90, hi: 0x9f}, + {value: 0x7c53, lo: 0xa0, hi: 0xaf}, + {value: 0x0813, lo: 0xb0, hi: 0xb5}, + {value: 0x0892, lo: 0xb8, hi: 0xbd}, + // Block 0x40, offset 0x1ea + {value: 0x0010, lo: 0x81, hi: 0xbf}, + // Block 0x41, offset 0x1eb + {value: 0x0010, lo: 0x80, hi: 0xac}, + {value: 0x0010, lo: 0xaf, hi: 0xbf}, + // Block 0x42, offset 0x1ed + {value: 0x0010, lo: 0x81, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x43, offset 0x1ef + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb8}, + // Block 0x44, offset 0x1f1 + {value: 0x0010, lo: 0x80, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0034, lo: 0x94, hi: 0x94}, + {value: 0x0010, lo: 0xa0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + // Block 0x45, offset 0x1f8 + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xac}, + {value: 0x0010, lo: 0xae, hi: 0xb0}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + // Block 0x46, offset 0x1fd + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x47, offset 0x201 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x89, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0014, lo: 0x93, hi: 0x93}, + {value: 0x0004, lo: 0x97, hi: 0x97}, + {value: 0x0024, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0x48, offset 0x20a + {value: 0x0014, lo: 0x8b, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x49, offset 0x20d + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb8}, + // Block 0x4a, offset 0x210 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x4b, offset 0x216 + {value: 0x0010, lo: 0x80, hi: 0xb5}, + // Block 0x4c, offset 0x217 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbb}, + // Block 0x4d, offset 0x222 + {value: 0x0010, lo: 0x86, hi: 0x8f}, + // Block 0x4e, offset 0x223 + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x4f, offset 0x224 + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + // Block 0x50, offset 0x229 + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x9e}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xac}, + {value: 0x0010, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xbc}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x51, offset 0x236 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xa7, hi: 0xa7}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + {value: 0x0034, lo: 0xb5, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbc}, + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x52, offset 0x23f + {value: 0x0034, lo: 0x80, hi: 0x80}, + // Block 0x53, offset 0x240 + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x54, offset 0x248 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0030, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x8b}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xab, hi: 0xab}, + {value: 0x0034, lo: 0xac, hi: 0xac}, + {value: 0x0024, lo: 0xad, hi: 0xb3}, + // Block 0x55, offset 0x251 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0030, lo: 0xaa, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbf}, + // Block 0x56, offset 0x25a + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0030, lo: 0xb2, hi: 0xb3}, + // Block 0x57, offset 0x263 + {value: 0x0010, lo: 0x80, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + // Block 0x58, offset 0x268 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8d, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x59, offset 0x26b + {value: 0x31ea, lo: 0x80, hi: 0x80}, + {value: 0x326a, lo: 0x81, hi: 0x81}, + {value: 0x32ea, lo: 0x82, hi: 0x82}, + {value: 0x336a, lo: 0x83, hi: 0x83}, + {value: 0x33ea, lo: 0x84, hi: 0x84}, + {value: 0x346a, lo: 0x85, hi: 0x85}, + {value: 0x34ea, lo: 0x86, hi: 0x86}, + {value: 0x356a, lo: 0x87, hi: 0x87}, + {value: 0x35ea, lo: 0x88, hi: 0x88}, + {value: 0x8353, lo: 0x90, hi: 0xba}, + {value: 0x8353, lo: 0xbd, hi: 0xbf}, + // Block 0x5a, offset 0x276 + {value: 0x0024, lo: 0x90, hi: 0x92}, + {value: 0x0034, lo: 0x94, hi: 0x99}, + {value: 0x0024, lo: 0x9a, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9f}, + {value: 0x0024, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0034, lo: 0xa2, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xb3}, + {value: 0x0024, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb7}, + {value: 0x0024, lo: 0xb8, hi: 0xb9}, + {value: 0x0010, lo: 0xba, hi: 0xba}, + // Block 0x5b, offset 0x284 + {value: 0x0012, lo: 0x80, hi: 0xab}, + {value: 0x0015, lo: 0xac, hi: 0xbf}, + // Block 0x5c, offset 0x286 + {value: 0x0015, lo: 0x80, hi: 0xaa}, + {value: 0x0012, lo: 0xab, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb8}, + {value: 0x8752, lo: 0xb9, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbc}, + {value: 0x8b52, lo: 0xbd, hi: 0xbd}, + {value: 0x0012, lo: 0xbe, hi: 0xbf}, + // Block 0x5d, offset 0x28d + {value: 0x0012, lo: 0x80, hi: 0x8d}, + {value: 0x8f52, lo: 0x8e, hi: 0x8e}, + {value: 0x0012, lo: 0x8f, hi: 0x9a}, + {value: 0x0015, lo: 0x9b, hi: 0xbf}, + // Block 0x5e, offset 0x291 + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0024, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb9}, + {value: 0x0024, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbd}, + {value: 0x0024, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x5f, offset 0x29d + {value: 0x0117, lo: 0x80, hi: 0xbf}, + // Block 0x60, offset 0x29e + {value: 0x0117, lo: 0x80, hi: 0x95}, + {value: 0x369a, lo: 0x96, hi: 0x96}, + {value: 0x374a, lo: 0x97, hi: 0x97}, + {value: 0x37fa, lo: 0x98, hi: 0x98}, + {value: 0x38aa, lo: 0x99, hi: 0x99}, + {value: 0x395a, lo: 0x9a, hi: 0x9a}, + {value: 0x3a0a, lo: 0x9b, hi: 0x9b}, + {value: 0x0012, lo: 0x9c, hi: 0x9d}, + {value: 0x3abb, lo: 0x9e, hi: 0x9e}, + {value: 0x0012, lo: 0x9f, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x61, offset 0x2a9 + {value: 0x0812, lo: 0x80, hi: 0x87}, + {value: 0x0813, lo: 0x88, hi: 0x8f}, + {value: 0x0812, lo: 0x90, hi: 0x95}, + {value: 0x0813, lo: 0x98, hi: 0x9d}, + {value: 0x0812, lo: 0xa0, hi: 0xa7}, + {value: 0x0813, lo: 0xa8, hi: 0xaf}, + {value: 0x0812, lo: 0xb0, hi: 0xb7}, + {value: 0x0813, lo: 0xb8, hi: 0xbf}, + // Block 0x62, offset 0x2b1 + {value: 0x0004, lo: 0x8b, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8f}, + {value: 0x0054, lo: 0x98, hi: 0x99}, + {value: 0x0054, lo: 0xa4, hi: 0xa4}, + {value: 0x0054, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xaa, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xaf}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x63, offset 0x2b9 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x94, hi: 0x94}, + {value: 0x0014, lo: 0xa0, hi: 0xa4}, + {value: 0x0014, lo: 0xa6, hi: 0xaf}, + {value: 0x0015, lo: 0xb1, hi: 0xb1}, + {value: 0x0015, lo: 0xbf, hi: 0xbf}, + // Block 0x64, offset 0x2bf + {value: 0x0015, lo: 0x90, hi: 0x9c}, + // Block 0x65, offset 0x2c0 + {value: 0x0024, lo: 0x90, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x93}, + {value: 0x0024, lo: 0x94, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0xa0}, + {value: 0x0024, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa4}, + {value: 0x0034, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa7}, + {value: 0x0034, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xa9}, + {value: 0x0034, lo: 0xaa, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + // Block 0x66, offset 0x2ce + {value: 0x0016, lo: 0x85, hi: 0x86}, + {value: 0x0012, lo: 0x87, hi: 0x89}, + {value: 0xa452, lo: 0x8e, hi: 0x8e}, + {value: 0x1013, lo: 0xa0, hi: 0xaf}, + {value: 0x1012, lo: 0xb0, hi: 0xbf}, + // Block 0x67, offset 0x2d3 + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x88}, + // Block 0x68, offset 0x2d6 + {value: 0xa753, lo: 0xb6, hi: 0xb7}, + {value: 0xaa53, lo: 0xb8, hi: 0xb9}, + {value: 0xad53, lo: 0xba, hi: 0xbb}, + {value: 0xaa53, lo: 0xbc, hi: 0xbd}, + {value: 0xa753, lo: 0xbe, hi: 0xbf}, + // Block 0x69, offset 0x2db + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6553, lo: 0x90, hi: 0x9f}, + {value: 0xb053, lo: 0xa0, hi: 0xae}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0x6a, offset 0x2df + {value: 0x0117, lo: 0x80, hi: 0xa3}, + {value: 0x0012, lo: 0xa4, hi: 0xa4}, + {value: 0x0716, lo: 0xab, hi: 0xac}, + {value: 0x0316, lo: 0xad, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb3}, + // Block 0x6b, offset 0x2e5 + {value: 0x6c52, lo: 0x80, hi: 0x9f}, + {value: 0x7052, lo: 0xa0, hi: 0xa5}, + {value: 0x7052, lo: 0xa7, hi: 0xa7}, + {value: 0x7052, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x6c, offset 0x2ea + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x6d, offset 0x2ed + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0010, lo: 0xb0, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x6e, offset 0x2f2 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9e}, + {value: 0x0024, lo: 0xa0, hi: 0xbf}, + // Block 0x6f, offset 0x2f7 + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + // Block 0x70, offset 0x2f8 + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0xaa, hi: 0xad}, + {value: 0x0030, lo: 0xae, hi: 0xaf}, + {value: 0x0004, lo: 0xb1, hi: 0xb5}, + {value: 0x0014, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + // Block 0x71, offset 0x2fe + {value: 0x0034, lo: 0x99, hi: 0x9a}, + {value: 0x0004, lo: 0x9b, hi: 0x9e}, + // Block 0x72, offset 0x300 + {value: 0x0004, lo: 0xbc, hi: 0xbe}, + // Block 0x73, offset 0x301 + {value: 0x0010, lo: 0x85, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x74, offset 0x303 + {value: 0x0010, lo: 0x80, hi: 0x8e}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x75, offset 0x305 + {value: 0x0010, lo: 0x80, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0x96, hi: 0xbf}, + // Block 0x76, offset 0x308 + {value: 0x0010, lo: 0x80, hi: 0x8c}, + // Block 0x77, offset 0x309 + {value: 0x0010, lo: 0x90, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x78, offset 0x30b + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0xab}, + // Block 0x79, offset 0x30e + {value: 0x0117, lo: 0x80, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb2}, + {value: 0x0024, lo: 0xb4, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x7a, offset 0x314 + {value: 0x0117, lo: 0x80, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9d}, + {value: 0x0024, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x7b, offset 0x318 + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb1}, + // Block 0x7c, offset 0x31a + {value: 0x0004, lo: 0x80, hi: 0x87}, + {value: 0x0014, lo: 0x88, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xaf}, + {value: 0x0012, lo: 0xb0, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xbf}, + // Block 0x7d, offset 0x31f + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x0015, lo: 0xb0, hi: 0xb0}, + {value: 0x0012, lo: 0xb1, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x8753, lo: 0xbd, hi: 0xbd}, + {value: 0x0117, lo: 0xbe, hi: 0xbf}, + // Block 0x7e, offset 0x326 + {value: 0x0117, lo: 0x82, hi: 0x83}, + {value: 0x6553, lo: 0x84, hi: 0x84}, + {value: 0x908b, lo: 0x85, hi: 0x85}, + {value: 0x8f53, lo: 0x86, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x0316, lo: 0x89, hi: 0x8a}, + {value: 0x0316, lo: 0xb5, hi: 0xb6}, + {value: 0x0010, lo: 0xb7, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbf}, + // Block 0x7f, offset 0x331 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8b}, + {value: 0x0010, lo: 0x8c, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0034, lo: 0xac, hi: 0xac}, + // Block 0x80, offset 0x33b + {value: 0x0010, lo: 0x80, hi: 0xb3}, + // Block 0x81, offset 0x33c + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xa0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb7}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x82, offset 0x345 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x83, offset 0x349 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0x92}, + {value: 0x0030, lo: 0x93, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0x84, offset 0x34e + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb9}, + {value: 0x0010, lo: 0xba, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x85, offset 0x356 + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0004, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x86, offset 0x35c + {value: 0x0010, lo: 0x80, hi: 0xa8}, + {value: 0x0014, lo: 0xa9, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0x87, offset 0x362 + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x88, offset 0x36c + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0024, lo: 0xbe, hi: 0xbf}, + // Block 0x89, offset 0x371 + {value: 0x0024, lo: 0x81, hi: 0x81}, + {value: 0x0004, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + // Block 0x8a, offset 0x37a + {value: 0x0010, lo: 0x81, hi: 0x86}, + {value: 0x0010, lo: 0x89, hi: 0x8e}, + {value: 0x0010, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x8b, offset 0x380 + {value: 0x0012, lo: 0x80, hi: 0x92}, + {value: 0xb352, lo: 0x93, hi: 0x93}, + {value: 0x0012, lo: 0x94, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9f}, + {value: 0x0012, lo: 0xa0, hi: 0xa8}, + {value: 0x0014, lo: 0xa9, hi: 0xa9}, + {value: 0x0004, lo: 0xaa, hi: 0xab}, + {value: 0x74d2, lo: 0xb0, hi: 0xbf}, + // Block 0x8c, offset 0x389 + {value: 0x78d2, lo: 0x80, hi: 0x8f}, + {value: 0x7cd2, lo: 0x90, hi: 0x9f}, + {value: 0x80d2, lo: 0xa0, hi: 0xaf}, + {value: 0x7cd2, lo: 0xb0, hi: 0xbf}, + // Block 0x8d, offset 0x38d + {value: 0x0010, lo: 0x80, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x8e, offset 0x395 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x8f, offset 0x397 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x8b, hi: 0xbb}, + // Block 0x90, offset 0x399 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0xbf}, + // Block 0x91, offset 0x39c + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0004, lo: 0xb2, hi: 0xbf}, + // Block 0x92, offset 0x39e + {value: 0x0004, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x93, hi: 0xbf}, + // Block 0x93, offset 0x3a0 + {value: 0x0010, lo: 0x80, hi: 0xbd}, + // Block 0x94, offset 0x3a1 + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0x95, offset 0x3a2 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0xbf}, + // Block 0x96, offset 0x3a4 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0xb0, hi: 0xbb}, + // Block 0x97, offset 0x3a6 + {value: 0x0014, lo: 0x80, hi: 0x8f}, + {value: 0x0054, lo: 0x93, hi: 0x93}, + {value: 0x0024, lo: 0xa0, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xad}, + {value: 0x0024, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + // Block 0x98, offset 0x3ac + {value: 0x0010, lo: 0x8d, hi: 0x8f}, + {value: 0x0054, lo: 0x92, hi: 0x92}, + {value: 0x0054, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0xb0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0x99, offset 0x3b1 + {value: 0x0010, lo: 0x80, hi: 0xbc}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x9a, offset 0x3b3 + {value: 0x0054, lo: 0x87, hi: 0x87}, + {value: 0x0054, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0054, lo: 0x9a, hi: 0x9a}, + {value: 0x5f53, lo: 0xa1, hi: 0xba}, + {value: 0x0004, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x9b, offset 0x3ba + {value: 0x0004, lo: 0x80, hi: 0x80}, + {value: 0x5f52, lo: 0x81, hi: 0x9a}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + // Block 0x9c, offset 0x3bd + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbe}, + // Block 0x9d, offset 0x3bf + {value: 0x0010, lo: 0x82, hi: 0x87}, + {value: 0x0010, lo: 0x8a, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0x97}, + {value: 0x0010, lo: 0x9a, hi: 0x9c}, + {value: 0x0004, lo: 0xa3, hi: 0xa3}, + {value: 0x0014, lo: 0xb9, hi: 0xbb}, + // Block 0x9e, offset 0x3c5 + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0010, lo: 0x8d, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xba}, + {value: 0x0010, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x9f, offset 0x3ca + {value: 0x0010, lo: 0x80, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x9d}, + // Block 0xa0, offset 0x3cc + {value: 0x0010, lo: 0x80, hi: 0xba}, + // Block 0xa1, offset 0x3cd + {value: 0x0010, lo: 0x80, hi: 0xb4}, + // Block 0xa2, offset 0x3ce + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + // Block 0xa3, offset 0x3cf + {value: 0x0010, lo: 0x80, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa4, offset 0x3d1 + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + // Block 0xa5, offset 0x3d3 + {value: 0x0010, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xad, hi: 0xbf}, + // Block 0xa6, offset 0x3d5 + {value: 0x0010, lo: 0x80, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0xb5}, + {value: 0x0024, lo: 0xb6, hi: 0xba}, + // Block 0xa7, offset 0x3d8 + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa8, offset 0x3da + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x91, hi: 0x95}, + // Block 0xa9, offset 0x3dd + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x97}, + {value: 0xb653, lo: 0x98, hi: 0x9f}, + {value: 0xb953, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbf}, + // Block 0xaa, offset 0x3e5 + {value: 0xb652, lo: 0x80, hi: 0x87}, + {value: 0xb952, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0xab, offset 0x3e8 + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0xb953, lo: 0xb0, hi: 0xb7}, + {value: 0xb653, lo: 0xb8, hi: 0xbf}, + // Block 0xac, offset 0x3ec + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x93}, + {value: 0xb952, lo: 0x98, hi: 0x9f}, + {value: 0xb652, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbb}, + // Block 0xad, offset 0x3f4 + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xae, offset 0x3f6 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + // Block 0xaf, offset 0x3f7 + {value: 0x0010, lo: 0x80, hi: 0xb6}, + // Block 0xb0, offset 0x3f8 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xa7}, + // Block 0xb1, offset 0x3fa + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xb5}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xb2, offset 0x400 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb6}, + // Block 0xb3, offset 0x402 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + // Block 0xb4, offset 0x403 + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + // Block 0xb5, offset 0x405 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb9}, + // Block 0xb6, offset 0x407 + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0xb7, offset 0x409 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x83}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x8e, hi: 0x8e}, + {value: 0x0024, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x99, hi: 0xb5}, + {value: 0x0024, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xb8, offset 0x416 + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0xb9, offset 0x417 + {value: 0x0010, lo: 0x80, hi: 0x9c}, + // Block 0xba, offset 0x418 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + // Block 0xbb, offset 0x41c + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + // Block 0xbc, offset 0x41e + {value: 0x0010, lo: 0x80, hi: 0x91}, + // Block 0xbd, offset 0x41f + {value: 0x0010, lo: 0x80, hi: 0x88}, + // Block 0xbe, offset 0x420 + {value: 0x5653, lo: 0x80, hi: 0xb2}, + // Block 0xbf, offset 0x421 + {value: 0x5652, lo: 0x80, hi: 0xb2}, + // Block 0xc0, offset 0x422 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xc1, offset 0x425 + {value: 0x0010, lo: 0x80, hi: 0xa9}, + {value: 0x0024, lo: 0xab, hi: 0xac}, + {value: 0x0010, lo: 0xb0, hi: 0xb1}, + // Block 0xc2, offset 0x428 + {value: 0x0010, lo: 0x80, hi: 0x9c}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xc3, offset 0x42b + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x87}, + {value: 0x0024, lo: 0x88, hi: 0x8a}, + {value: 0x0034, lo: 0x8b, hi: 0x8b}, + {value: 0x0024, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x90}, + // Block 0xc4, offset 0x431 + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xc5, offset 0x432 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0010, lo: 0xa0, hi: 0xb6}, + // Block 0xc6, offset 0x434 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xc7, offset 0x438 + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xc8, offset 0x43c + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb6}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + // Block 0xc9, offset 0x442 + {value: 0x0014, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xca, offset 0x445 + {value: 0x0024, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0xcb, offset 0x44c + {value: 0x0010, lo: 0x84, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + // Block 0xcc, offset 0x450 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xcd, offset 0x454 + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0014, lo: 0x89, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + // Block 0xce, offset 0x45d + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb4}, + {value: 0x0030, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xb7}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0xcf, offset 0x466 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xd0, offset 0x46c + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa2}, + {value: 0x0014, lo: 0xa3, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xd1, offset 0x472 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xd2, offset 0x47c + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0030, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9d, hi: 0xa3}, + {value: 0x0024, lo: 0xa6, hi: 0xac}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + // Block 0xd3, offset 0x486 + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xd4, offset 0x488 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + // Block 0xd5, offset 0x491 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xd6, offset 0x497 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd7, offset 0x49d + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd8, offset 0x4a3 + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x98, hi: 0x9b}, + {value: 0x0014, lo: 0x9c, hi: 0x9d}, + // Block 0xd9, offset 0x4a6 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xda, offset 0x4ac + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xdb, offset 0x4af + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0014, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb5}, + {value: 0x0030, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + // Block 0xdc, offset 0x4b8 + {value: 0x0010, lo: 0x80, hi: 0x89}, + // Block 0xdd, offset 0x4b9 + {value: 0x0014, lo: 0x9d, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xde, offset 0x4c0 + {value: 0x0010, lo: 0x80, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + // Block 0xdf, offset 0x4c4 + {value: 0x5f53, lo: 0xa0, hi: 0xbf}, + // Block 0xe0, offset 0x4c5 + {value: 0x5f52, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xe1, offset 0x4c8 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x89, hi: 0x89}, + {value: 0x0010, lo: 0x8c, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0xb5}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0014, lo: 0xbb, hi: 0xbc}, + {value: 0x0030, lo: 0xbd, hi: 0xbd}, + {value: 0x0034, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xe2, offset 0x4d2 + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0034, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xe3, offset 0x4d5 + {value: 0x0010, lo: 0xa0, hi: 0xa7}, + {value: 0x0010, lo: 0xaa, hi: 0xbf}, + // Block 0xe4, offset 0x4d7 + {value: 0x0010, lo: 0x80, hi: 0x93}, + {value: 0x0014, lo: 0x94, hi: 0x97}, + {value: 0x0014, lo: 0x9a, hi: 0x9b}, + {value: 0x0010, lo: 0x9c, hi: 0x9f}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + // Block 0xe5, offset 0x4de + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x8a}, + {value: 0x0010, lo: 0x8b, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbb, hi: 0xbe}, + // Block 0xe6, offset 0x4e6 + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0014, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x98}, + {value: 0x0014, lo: 0x99, hi: 0x9b}, + {value: 0x0010, lo: 0x9c, hi: 0xbf}, + // Block 0xe7, offset 0x4ec + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0014, lo: 0x8a, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x99}, + {value: 0x0010, lo: 0x9d, hi: 0x9d}, + // Block 0xe8, offset 0x4f2 + {value: 0x0010, lo: 0x80, hi: 0xb8}, + // Block 0xe9, offset 0x4f3 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb6}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xea, offset 0x4f9 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0xeb, offset 0x4fc + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0014, lo: 0x92, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xa9}, + {value: 0x0014, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0xec, offset 0x504 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb6}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xed, offset 0x50b + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xa5}, + {value: 0x0010, lo: 0xa7, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xbf}, + // Block 0xee, offset 0x515 + {value: 0x0010, lo: 0x80, hi: 0x8e}, + {value: 0x0014, lo: 0x90, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0x96, hi: 0x96}, + {value: 0x0034, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0xef, offset 0x51d + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + // Block 0xf0, offset 0x520 + {value: 0x0010, lo: 0xb0, hi: 0xb0}, + // Block 0xf1, offset 0x521 + {value: 0x0010, lo: 0x80, hi: 0x99}, + // Block 0xf2, offset 0x522 + {value: 0x0010, lo: 0x80, hi: 0xae}, + // Block 0xf3, offset 0x523 + {value: 0x0010, lo: 0x80, hi: 0x83}, + // Block 0xf4, offset 0x524 + {value: 0x0010, lo: 0x80, hi: 0xae}, + {value: 0x0014, lo: 0xb0, hi: 0xb8}, + // Block 0xf5, offset 0x526 + {value: 0x0010, lo: 0x80, hi: 0x86}, + // Block 0xf6, offset 0x527 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0xf7, offset 0x529 + {value: 0x0010, lo: 0x90, hi: 0xad}, + {value: 0x0034, lo: 0xb0, hi: 0xb4}, + // Block 0xf8, offset 0x52b + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb6}, + // Block 0xf9, offset 0x52d + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa3, hi: 0xb7}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xfa, offset 0x531 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + // Block 0xfb, offset 0x532 + {value: 0x2013, lo: 0x80, hi: 0x9f}, + {value: 0x2012, lo: 0xa0, hi: 0xbf}, + // Block 0xfc, offset 0x534 + {value: 0x0010, lo: 0x80, hi: 0x8a}, + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0xfd, offset 0x537 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0014, lo: 0x8f, hi: 0x9f}, + // Block 0xfe, offset 0x539 + {value: 0x0014, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa3, hi: 0xa4}, + {value: 0x0030, lo: 0xb0, hi: 0xb1}, + // Block 0xff, offset 0x53c + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbc}, + // Block 0x100, offset 0x53e + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0034, lo: 0x9e, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa3}, + // Block 0x101, offset 0x543 + {value: 0x0030, lo: 0xa5, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xa9}, + {value: 0x0030, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbf}, + // Block 0x102, offset 0x548 + {value: 0x0034, lo: 0x80, hi: 0x82}, + {value: 0x0024, lo: 0x85, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8b}, + {value: 0x0024, lo: 0xaa, hi: 0xad}, + // Block 0x103, offset 0x54c + {value: 0x0024, lo: 0x82, hi: 0x84}, + // Block 0x104, offset 0x54d + {value: 0x0013, lo: 0x80, hi: 0x99}, + {value: 0x0012, lo: 0x9a, hi: 0xb3}, + {value: 0x0013, lo: 0xb4, hi: 0xbf}, + // Block 0x105, offset 0x550 + {value: 0x0013, lo: 0x80, hi: 0x8d}, + {value: 0x0012, lo: 0x8e, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0xa7}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0x106, offset 0x554 + {value: 0x0013, lo: 0x80, hi: 0x81}, + {value: 0x0012, lo: 0x82, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0x9c}, + {value: 0x0013, lo: 0x9e, hi: 0x9f}, + {value: 0x0013, lo: 0xa2, hi: 0xa2}, + {value: 0x0013, lo: 0xa5, hi: 0xa6}, + {value: 0x0013, lo: 0xa9, hi: 0xac}, + {value: 0x0013, lo: 0xae, hi: 0xb5}, + {value: 0x0012, lo: 0xb6, hi: 0xb9}, + {value: 0x0012, lo: 0xbb, hi: 0xbb}, + {value: 0x0012, lo: 0xbd, hi: 0xbf}, + // Block 0x107, offset 0x55f + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0012, lo: 0x85, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0x108, offset 0x563 + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0013, lo: 0x84, hi: 0x85}, + {value: 0x0013, lo: 0x87, hi: 0x8a}, + {value: 0x0013, lo: 0x8d, hi: 0x94}, + {value: 0x0013, lo: 0x96, hi: 0x9c}, + {value: 0x0012, lo: 0x9e, hi: 0xb7}, + {value: 0x0013, lo: 0xb8, hi: 0xb9}, + {value: 0x0013, lo: 0xbb, hi: 0xbe}, + // Block 0x109, offset 0x56b + {value: 0x0013, lo: 0x80, hi: 0x84}, + {value: 0x0013, lo: 0x86, hi: 0x86}, + {value: 0x0013, lo: 0x8a, hi: 0x90}, + {value: 0x0012, lo: 0x92, hi: 0xab}, + {value: 0x0013, lo: 0xac, hi: 0xbf}, + // Block 0x10a, offset 0x570 + {value: 0x0013, lo: 0x80, hi: 0x85}, + {value: 0x0012, lo: 0x86, hi: 0x9f}, + {value: 0x0013, lo: 0xa0, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbf}, + // Block 0x10b, offset 0x574 + {value: 0x0012, lo: 0x80, hi: 0x93}, + {value: 0x0013, lo: 0x94, hi: 0xad}, + {value: 0x0012, lo: 0xae, hi: 0xbf}, + // Block 0x10c, offset 0x577 + {value: 0x0012, lo: 0x80, hi: 0x87}, + {value: 0x0013, lo: 0x88, hi: 0xa1}, + {value: 0x0012, lo: 0xa2, hi: 0xbb}, + {value: 0x0013, lo: 0xbc, hi: 0xbf}, + // Block 0x10d, offset 0x57b + {value: 0x0013, lo: 0x80, hi: 0x95}, + {value: 0x0012, lo: 0x96, hi: 0xaf}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x10e, offset 0x57e + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0012, lo: 0x8a, hi: 0xa5}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0x10f, offset 0x581 + {value: 0x0013, lo: 0x80, hi: 0x80}, + {value: 0x0012, lo: 0x82, hi: 0x9a}, + {value: 0x0012, lo: 0x9c, hi: 0xa1}, + {value: 0x0013, lo: 0xa2, hi: 0xba}, + {value: 0x0012, lo: 0xbc, hi: 0xbf}, + // Block 0x110, offset 0x586 + {value: 0x0012, lo: 0x80, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0xb4}, + {value: 0x0012, lo: 0xb6, hi: 0xbf}, + // Block 0x111, offset 0x58a + {value: 0x0012, lo: 0x80, hi: 0x8e}, + {value: 0x0012, lo: 0x90, hi: 0x95}, + {value: 0x0013, lo: 0x96, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x112, offset 0x58e + {value: 0x0012, lo: 0x80, hi: 0x88}, + {value: 0x0012, lo: 0x8a, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0x113, offset 0x592 + {value: 0x0012, lo: 0x80, hi: 0x82}, + {value: 0x0012, lo: 0x84, hi: 0x89}, + {value: 0x0017, lo: 0x8a, hi: 0x8b}, + {value: 0x0010, lo: 0x8e, hi: 0xbf}, + // Block 0x114, offset 0x596 + {value: 0x0014, lo: 0x80, hi: 0xb6}, + {value: 0x0014, lo: 0xbb, hi: 0xbf}, + // Block 0x115, offset 0x598 + {value: 0x0014, lo: 0x80, hi: 0xac}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x116, offset 0x59a + {value: 0x0014, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x9b, hi: 0x9f}, + {value: 0x0014, lo: 0xa1, hi: 0xaf}, + // Block 0x117, offset 0x59d + {value: 0x0024, lo: 0x80, hi: 0x86}, + {value: 0x0024, lo: 0x88, hi: 0x98}, + {value: 0x0024, lo: 0x9b, hi: 0xa1}, + {value: 0x0024, lo: 0xa3, hi: 0xa4}, + {value: 0x0024, lo: 0xa6, hi: 0xaa}, + // Block 0x118, offset 0x5a2 + {value: 0x0010, lo: 0x80, hi: 0xac}, + {value: 0x0024, lo: 0xb0, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xbd}, + // Block 0x119, offset 0x5a5 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + // Block 0x11a, offset 0x5a7 + {value: 0x0010, lo: 0x80, hi: 0xab}, + {value: 0x0024, lo: 0xac, hi: 0xaf}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x11b, offset 0x5aa + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0034, lo: 0x90, hi: 0x96}, + // Block 0x11c, offset 0x5ac + {value: 0xbc52, lo: 0x80, hi: 0x81}, + {value: 0xbf52, lo: 0x82, hi: 0x83}, + {value: 0x0024, lo: 0x84, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8b}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x11d, offset 0x5b2 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x9f}, + {value: 0x0010, lo: 0xa1, hi: 0xa2}, + {value: 0x0010, lo: 0xa4, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb7}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + // Block 0x11e, offset 0x5bb + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x9b}, + {value: 0x0010, lo: 0xa1, hi: 0xa3}, + {value: 0x0010, lo: 0xa5, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xbb}, + // Block 0x11f, offset 0x5c0 + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x120, offset 0x5c1 + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x121, offset 0x5c4 + {value: 0x0013, lo: 0x80, hi: 0x89}, + // Block 0x122, offset 0x5c5 + {value: 0x0014, lo: 0xbb, hi: 0xbf}, + // Block 0x123, offset 0x5c6 + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x124, offset 0x5c7 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0014, lo: 0xa0, hi: 0xbf}, + // Block 0x125, offset 0x5c9 + {value: 0x0014, lo: 0x80, hi: 0xbf}, + // Block 0x126, offset 0x5ca + {value: 0x0014, lo: 0x80, hi: 0xaf}, +} + +// Total table size 15212 bytes (14KiB); checksum: 1EB13752 diff --git a/vendor/golang.org/x/text/cases/tables9.0.0.go b/vendor/golang.org/x/text/cases/tables9.0.0.go new file mode 100644 index 000000000..636d5d14d --- /dev/null +++ b/vendor/golang.org/x/text/cases/tables9.0.0.go @@ -0,0 +1,2216 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build !go1.10 +// +build !go1.10 + +package cases + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "9.0.0" + +var xorData string = "" + // Size: 185 bytes + "\x00\x06\x07\x00\x01?\x00\x0f\x03\x00\x0f\x12\x00\x0f\x1f\x00\x0f\x1d" + + "\x00\x01\x13\x00\x0f\x16\x00\x0f\x0b\x00\x0f3\x00\x0f7\x00\x01#\x00\x0f?" + + "\x00\x0e'\x00\x0f/\x00\x0e>\x00\x0f*\x00\x0c&\x00\x0c*\x00\x0c;\x00\x0c9" + + "\x00\x0c%\x00\x01\x08\x00\x03\x0d\x00\x03\x09\x00\x02\x06\x00\x02\x02" + + "\x00\x02\x0c\x00\x01\x00\x00\x01\x03\x00\x01\x01\x00\x01 \x00\x01\x0c" + + "\x00\x01\x10\x00\x03\x10\x00\x036 \x00\x037 \x00\x0b#\x10\x00\x0b 0\x00" + + "\x0b!\x10\x00\x0b!0\x00\x0b(\x04\x00\x03\x04\x1e\x00\x03\x0a\x00\x02:" + + "\x00\x02>\x00\x02,\x00\x02\x00\x00\x02\x10\x00\x01<\x00\x01&\x00\x01*" + + "\x00\x01.\x00\x010\x003 \x00\x01\x18\x00\x01(\x00\x01\x1e\x00\x01\x22" + +var exceptions string = "" + // Size: 2068 bytes + "\x00\x12\x12μΜΜ\x12\x12ssSSSs\x13\x18i̇i̇\x10\x09II\x13\x1bʼnʼNʼN\x11" + + "\x09sSS\x12\x12dždžDž\x12\x12dždžDŽ\x10\x12DŽDž\x12\x12ljljLj\x12\x12ljljLJ\x10\x12LJLj" + + "\x12\x12njnjNj\x12\x12njnjNJ\x10\x12NJNj\x13\x1bǰJ̌J̌\x12\x12dzdzDz\x12\x12dzdzDZ\x10" + + "\x12DZDz\x13\x18ⱥⱥ\x13\x18ⱦⱦ\x10\x1bⱾⱾ\x10\x1bⱿⱿ\x10\x1bⱯⱯ\x10\x1bⱭⱭ\x10" + + "\x1bⱰⱰ\x10\x1bꞫꞫ\x10\x1bꞬꞬ\x10\x1bꞍꞍ\x10\x1bꞪꞪ\x10\x1bꞮꞮ\x10\x1bⱢⱢ\x10" + + "\x1bꞭꞭ\x10\x1bⱮⱮ\x10\x1bⱤⱤ\x10\x1bꞱꞱ\x10\x1bꞲꞲ\x10\x1bꞰꞰ2\x12ιΙΙ\x166ΐ" + + "Ϊ́Ϊ́\x166ΰΫ́Ϋ́\x12\x12σΣΣ\x12\x12βΒΒ\x12\x12θΘΘ\x12\x12φΦΦ\x12" + + "\x12πΠΠ\x12\x12κΚΚ\x12\x12ρΡΡ\x12\x12εΕΕ\x14$եւԵՒԵւ\x12\x12вВВ\x12\x12дД" + + "Д\x12\x12оОО\x12\x12сСС\x12\x12тТТ\x12\x12тТТ\x12\x12ъЪЪ\x12\x12ѣѢѢ\x13" + + "\x1bꙋꙊꙊ\x13\x1bẖH̱H̱\x13\x1bẗT̈T̈\x13\x1bẘW̊W̊\x13\x1bẙY̊Y̊\x13\x1ba" + + "ʾAʾAʾ\x13\x1bṡṠṠ\x12\x10ssß\x14$ὐΥ̓Υ̓\x166ὒΥ̓̀Υ̓̀\x166ὔΥ̓́Υ̓́\x166" + + "ὖΥ̓͂Υ̓͂\x15+ἀιἈΙᾈ\x15+ἁιἉΙᾉ\x15+ἂιἊΙᾊ\x15+ἃιἋΙᾋ\x15+ἄιἌΙᾌ\x15+ἅιἍΙᾍ" + + "\x15+ἆιἎΙᾎ\x15+ἇιἏΙᾏ\x15\x1dἀιᾀἈΙ\x15\x1dἁιᾁἉΙ\x15\x1dἂιᾂἊΙ\x15\x1dἃιᾃἋΙ" + + "\x15\x1dἄιᾄἌΙ\x15\x1dἅιᾅἍΙ\x15\x1dἆιᾆἎΙ\x15\x1dἇιᾇἏΙ\x15+ἠιἨΙᾘ\x15+ἡιἩΙᾙ" + + "\x15+ἢιἪΙᾚ\x15+ἣιἫΙᾛ\x15+ἤιἬΙᾜ\x15+ἥιἭΙᾝ\x15+ἦιἮΙᾞ\x15+ἧιἯΙᾟ\x15\x1dἠιᾐἨ" + + "Ι\x15\x1dἡιᾑἩΙ\x15\x1dἢιᾒἪΙ\x15\x1dἣιᾓἫΙ\x15\x1dἤιᾔἬΙ\x15\x1dἥιᾕἭΙ\x15" + + "\x1dἦιᾖἮΙ\x15\x1dἧιᾗἯΙ\x15+ὠιὨΙᾨ\x15+ὡιὩΙᾩ\x15+ὢιὪΙᾪ\x15+ὣιὫΙᾫ\x15+ὤιὬΙᾬ" + + "\x15+ὥιὭΙᾭ\x15+ὦιὮΙᾮ\x15+ὧιὯΙᾯ\x15\x1dὠιᾠὨΙ\x15\x1dὡιᾡὩΙ\x15\x1dὢιᾢὪΙ" + + "\x15\x1dὣιᾣὫΙ\x15\x1dὤιᾤὬΙ\x15\x1dὥιᾥὭΙ\x15\x1dὦιᾦὮΙ\x15\x1dὧιᾧὯΙ\x15-ὰι" + + "ᾺΙᾺͅ\x14#αιΑΙᾼ\x14$άιΆΙΆͅ\x14$ᾶΑ͂Α͂\x166ᾶιΑ͂Ιᾼ͂\x14\x1cαιᾳΑΙ\x12" + + "\x12ιΙΙ\x15-ὴιῊΙῊͅ\x14#ηιΗΙῌ\x14$ήιΉΙΉͅ\x14$ῆΗ͂Η͂\x166ῆιΗ͂Ιῌ͂\x14\x1c" + + "ηιῃΗΙ\x166ῒΪ̀Ϊ̀\x166ΐΪ́Ϊ́\x14$ῖΙ͂Ι͂\x166ῗΪ͂Ϊ͂\x166ῢΫ̀Ϋ" + + "̀\x166ΰΫ́Ϋ́\x14$ῤΡ̓Ρ̓\x14$ῦΥ͂Υ͂\x166ῧΫ͂Ϋ͂\x15-ὼιῺΙῺͅ\x14#ωιΩΙ" + + "ῼ\x14$ώιΏΙΏͅ\x14$ῶΩ͂Ω͂\x166ῶιΩ͂Ιῼ͂\x14\x1cωιῳΩΙ\x12\x10ωω\x11\x08kk" + + "\x12\x10åå\x12\x10ɫɫ\x12\x10ɽɽ\x10\x12ȺȺ\x10\x12ȾȾ\x12\x10ɑɑ\x12\x10ɱɱ" + + "\x12\x10ɐɐ\x12\x10ɒɒ\x12\x10ȿȿ\x12\x10ɀɀ\x12\x10ɥɥ\x12\x10ɦɦ\x12\x10ɜɜ" + + "\x12\x10ɡɡ\x12\x10ɬɬ\x12\x10ɪɪ\x12\x10ʞʞ\x12\x10ʇʇ\x12\x10ʝʝ\x12\x12ffFF" + + "Ff\x12\x12fiFIFi\x12\x12flFLFl\x13\x1bffiFFIFfi\x13\x1bfflFFLFfl\x12\x12" + + "stSTSt\x12\x12stSTSt\x14$մնՄՆՄն\x14$մեՄԵՄե\x14$միՄԻՄի\x14$վնՎՆՎն\x14$մխՄ" + + "ԽՄխ" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *caseTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return caseValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := caseIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = caseIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = caseIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *caseTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return caseValues[c0] + } + i := caseIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = caseIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = caseIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// caseTrie. Total size: 11742 bytes (11.47 KiB). Checksum: 795fe57ee5135873. +type caseTrie struct{} + +func newCaseTrie(i int) *caseTrie { + return &caseTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *caseTrie) lookupValue(n uint32, b byte) uint16 { + switch { + case n < 18: + return uint16(caseValues[n<<6+uint32(b)]) + default: + n -= 18 + return uint16(sparse.lookup(n, b)) + } +} + +// caseValues: 20 blocks, 1280 entries, 2560 bytes +// The third block is the zero block. +var caseValues = [1280]uint16{ + // Block 0x0, offset 0x0 + 0x27: 0x0054, + 0x2e: 0x0054, + 0x30: 0x0010, 0x31: 0x0010, 0x32: 0x0010, 0x33: 0x0010, 0x34: 0x0010, 0x35: 0x0010, + 0x36: 0x0010, 0x37: 0x0010, 0x38: 0x0010, 0x39: 0x0010, 0x3a: 0x0054, + // Block 0x1, offset 0x40 + 0x41: 0x2013, 0x42: 0x2013, 0x43: 0x2013, 0x44: 0x2013, 0x45: 0x2013, + 0x46: 0x2013, 0x47: 0x2013, 0x48: 0x2013, 0x49: 0x2013, 0x4a: 0x2013, 0x4b: 0x2013, + 0x4c: 0x2013, 0x4d: 0x2013, 0x4e: 0x2013, 0x4f: 0x2013, 0x50: 0x2013, 0x51: 0x2013, + 0x52: 0x2013, 0x53: 0x2013, 0x54: 0x2013, 0x55: 0x2013, 0x56: 0x2013, 0x57: 0x2013, + 0x58: 0x2013, 0x59: 0x2013, 0x5a: 0x2013, + 0x5e: 0x0004, 0x5f: 0x0010, 0x60: 0x0004, 0x61: 0x2012, 0x62: 0x2012, 0x63: 0x2012, + 0x64: 0x2012, 0x65: 0x2012, 0x66: 0x2012, 0x67: 0x2012, 0x68: 0x2012, 0x69: 0x2012, + 0x6a: 0x2012, 0x6b: 0x2012, 0x6c: 0x2012, 0x6d: 0x2012, 0x6e: 0x2012, 0x6f: 0x2012, + 0x70: 0x2012, 0x71: 0x2012, 0x72: 0x2012, 0x73: 0x2012, 0x74: 0x2012, 0x75: 0x2012, + 0x76: 0x2012, 0x77: 0x2012, 0x78: 0x2012, 0x79: 0x2012, 0x7a: 0x2012, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc0: 0x0852, 0xc1: 0x0b53, 0xc2: 0x0113, 0xc3: 0x0112, 0xc4: 0x0113, 0xc5: 0x0112, + 0xc6: 0x0b53, 0xc7: 0x0f13, 0xc8: 0x0f12, 0xc9: 0x0e53, 0xca: 0x1153, 0xcb: 0x0713, + 0xcc: 0x0712, 0xcd: 0x0012, 0xce: 0x1453, 0xcf: 0x1753, 0xd0: 0x1a53, 0xd1: 0x0313, + 0xd2: 0x0312, 0xd3: 0x1d53, 0xd4: 0x2053, 0xd5: 0x2352, 0xd6: 0x2653, 0xd7: 0x2653, + 0xd8: 0x0113, 0xd9: 0x0112, 0xda: 0x2952, 0xdb: 0x0012, 0xdc: 0x1d53, 0xdd: 0x2c53, + 0xde: 0x2f52, 0xdf: 0x3253, 0xe0: 0x0113, 0xe1: 0x0112, 0xe2: 0x0113, 0xe3: 0x0112, + 0xe4: 0x0113, 0xe5: 0x0112, 0xe6: 0x3553, 0xe7: 0x0f13, 0xe8: 0x0f12, 0xe9: 0x3853, + 0xea: 0x0012, 0xeb: 0x0012, 0xec: 0x0113, 0xed: 0x0112, 0xee: 0x3553, 0xef: 0x1f13, + 0xf0: 0x1f12, 0xf1: 0x3b53, 0xf2: 0x3e53, 0xf3: 0x0713, 0xf4: 0x0712, 0xf5: 0x0313, + 0xf6: 0x0312, 0xf7: 0x4153, 0xf8: 0x0113, 0xf9: 0x0112, 0xfa: 0x0012, 0xfb: 0x0010, + 0xfc: 0x0113, 0xfd: 0x0112, 0xfe: 0x0012, 0xff: 0x4452, + // Block 0x4, offset 0x100 + 0x100: 0x0010, 0x101: 0x0010, 0x102: 0x0010, 0x103: 0x0010, 0x104: 0x02db, 0x105: 0x0359, + 0x106: 0x03da, 0x107: 0x043b, 0x108: 0x04b9, 0x109: 0x053a, 0x10a: 0x059b, 0x10b: 0x0619, + 0x10c: 0x069a, 0x10d: 0x0313, 0x10e: 0x0312, 0x10f: 0x1f13, 0x110: 0x1f12, 0x111: 0x0313, + 0x112: 0x0312, 0x113: 0x0713, 0x114: 0x0712, 0x115: 0x0313, 0x116: 0x0312, 0x117: 0x0f13, + 0x118: 0x0f12, 0x119: 0x0313, 0x11a: 0x0312, 0x11b: 0x0713, 0x11c: 0x0712, 0x11d: 0x1452, + 0x11e: 0x0113, 0x11f: 0x0112, 0x120: 0x0113, 0x121: 0x0112, 0x122: 0x0113, 0x123: 0x0112, + 0x124: 0x0113, 0x125: 0x0112, 0x126: 0x0113, 0x127: 0x0112, 0x128: 0x0113, 0x129: 0x0112, + 0x12a: 0x0113, 0x12b: 0x0112, 0x12c: 0x0113, 0x12d: 0x0112, 0x12e: 0x0113, 0x12f: 0x0112, + 0x130: 0x06fa, 0x131: 0x07ab, 0x132: 0x0829, 0x133: 0x08aa, 0x134: 0x0113, 0x135: 0x0112, + 0x136: 0x2353, 0x137: 0x4453, 0x138: 0x0113, 0x139: 0x0112, 0x13a: 0x0113, 0x13b: 0x0112, + 0x13c: 0x0113, 0x13d: 0x0112, 0x13e: 0x0113, 0x13f: 0x0112, + // Block 0x5, offset 0x140 + 0x140: 0x0a8a, 0x141: 0x0313, 0x142: 0x0312, 0x143: 0x0853, 0x144: 0x4753, 0x145: 0x4a53, + 0x146: 0x0113, 0x147: 0x0112, 0x148: 0x0113, 0x149: 0x0112, 0x14a: 0x0113, 0x14b: 0x0112, + 0x14c: 0x0113, 0x14d: 0x0112, 0x14e: 0x0113, 0x14f: 0x0112, 0x150: 0x0b0a, 0x151: 0x0b8a, + 0x152: 0x0c0a, 0x153: 0x0b52, 0x154: 0x0b52, 0x155: 0x0012, 0x156: 0x0e52, 0x157: 0x1152, + 0x158: 0x0012, 0x159: 0x1752, 0x15a: 0x0012, 0x15b: 0x1a52, 0x15c: 0x0c8a, 0x15d: 0x0012, + 0x15e: 0x0012, 0x15f: 0x0012, 0x160: 0x1d52, 0x161: 0x0d0a, 0x162: 0x0012, 0x163: 0x2052, + 0x164: 0x0012, 0x165: 0x0d8a, 0x166: 0x0e0a, 0x167: 0x0012, 0x168: 0x2652, 0x169: 0x2652, + 0x16a: 0x0e8a, 0x16b: 0x0f0a, 0x16c: 0x0f8a, 0x16d: 0x0012, 0x16e: 0x0012, 0x16f: 0x1d52, + 0x170: 0x0012, 0x171: 0x100a, 0x172: 0x2c52, 0x173: 0x0012, 0x174: 0x0012, 0x175: 0x3252, + 0x176: 0x0012, 0x177: 0x0012, 0x178: 0x0012, 0x179: 0x0012, 0x17a: 0x0012, 0x17b: 0x0012, + 0x17c: 0x0012, 0x17d: 0x108a, 0x17e: 0x0012, 0x17f: 0x0012, + // Block 0x6, offset 0x180 + 0x180: 0x3552, 0x181: 0x0012, 0x182: 0x0012, 0x183: 0x3852, 0x184: 0x0012, 0x185: 0x0012, + 0x186: 0x0012, 0x187: 0x110a, 0x188: 0x3552, 0x189: 0x4752, 0x18a: 0x3b52, 0x18b: 0x3e52, + 0x18c: 0x4a52, 0x18d: 0x0012, 0x18e: 0x0012, 0x18f: 0x0012, 0x190: 0x0012, 0x191: 0x0012, + 0x192: 0x4152, 0x193: 0x0012, 0x194: 0x0010, 0x195: 0x0012, 0x196: 0x0012, 0x197: 0x0012, + 0x198: 0x0012, 0x199: 0x0012, 0x19a: 0x0012, 0x19b: 0x0012, 0x19c: 0x0012, 0x19d: 0x118a, + 0x19e: 0x120a, 0x19f: 0x0012, 0x1a0: 0x0012, 0x1a1: 0x0012, 0x1a2: 0x0012, 0x1a3: 0x0012, + 0x1a4: 0x0012, 0x1a5: 0x0012, 0x1a6: 0x0012, 0x1a7: 0x0012, 0x1a8: 0x0012, 0x1a9: 0x0012, + 0x1aa: 0x0012, 0x1ab: 0x0012, 0x1ac: 0x0012, 0x1ad: 0x0012, 0x1ae: 0x0012, 0x1af: 0x0012, + 0x1b0: 0x0015, 0x1b1: 0x0015, 0x1b2: 0x0015, 0x1b3: 0x0015, 0x1b4: 0x0015, 0x1b5: 0x0015, + 0x1b6: 0x0015, 0x1b7: 0x0015, 0x1b8: 0x0015, 0x1b9: 0x0014, 0x1ba: 0x0014, 0x1bb: 0x0014, + 0x1bc: 0x0014, 0x1bd: 0x0014, 0x1be: 0x0014, 0x1bf: 0x0014, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0024, 0x1c1: 0x0024, 0x1c2: 0x0024, 0x1c3: 0x0024, 0x1c4: 0x0024, 0x1c5: 0x128d, + 0x1c6: 0x0024, 0x1c7: 0x0034, 0x1c8: 0x0034, 0x1c9: 0x0034, 0x1ca: 0x0024, 0x1cb: 0x0024, + 0x1cc: 0x0024, 0x1cd: 0x0034, 0x1ce: 0x0034, 0x1cf: 0x0014, 0x1d0: 0x0024, 0x1d1: 0x0024, + 0x1d2: 0x0024, 0x1d3: 0x0034, 0x1d4: 0x0034, 0x1d5: 0x0034, 0x1d6: 0x0034, 0x1d7: 0x0024, + 0x1d8: 0x0034, 0x1d9: 0x0034, 0x1da: 0x0034, 0x1db: 0x0024, 0x1dc: 0x0034, 0x1dd: 0x0034, + 0x1de: 0x0034, 0x1df: 0x0034, 0x1e0: 0x0034, 0x1e1: 0x0034, 0x1e2: 0x0034, 0x1e3: 0x0024, + 0x1e4: 0x0024, 0x1e5: 0x0024, 0x1e6: 0x0024, 0x1e7: 0x0024, 0x1e8: 0x0024, 0x1e9: 0x0024, + 0x1ea: 0x0024, 0x1eb: 0x0024, 0x1ec: 0x0024, 0x1ed: 0x0024, 0x1ee: 0x0024, 0x1ef: 0x0024, + 0x1f0: 0x0113, 0x1f1: 0x0112, 0x1f2: 0x0113, 0x1f3: 0x0112, 0x1f4: 0x0014, 0x1f5: 0x0004, + 0x1f6: 0x0113, 0x1f7: 0x0112, 0x1fa: 0x0015, 0x1fb: 0x4d52, + 0x1fc: 0x5052, 0x1fd: 0x5052, 0x1ff: 0x5353, + // Block 0x8, offset 0x200 + 0x204: 0x0004, 0x205: 0x0004, + 0x206: 0x2a13, 0x207: 0x0054, 0x208: 0x2513, 0x209: 0x2713, 0x20a: 0x2513, + 0x20c: 0x5653, 0x20e: 0x5953, 0x20f: 0x5c53, 0x210: 0x130a, 0x211: 0x2013, + 0x212: 0x2013, 0x213: 0x2013, 0x214: 0x2013, 0x215: 0x2013, 0x216: 0x2013, 0x217: 0x2013, + 0x218: 0x2013, 0x219: 0x2013, 0x21a: 0x2013, 0x21b: 0x2013, 0x21c: 0x2013, 0x21d: 0x2013, + 0x21e: 0x2013, 0x21f: 0x2013, 0x220: 0x5f53, 0x221: 0x5f53, 0x223: 0x5f53, + 0x224: 0x5f53, 0x225: 0x5f53, 0x226: 0x5f53, 0x227: 0x5f53, 0x228: 0x5f53, 0x229: 0x5f53, + 0x22a: 0x5f53, 0x22b: 0x5f53, 0x22c: 0x2a12, 0x22d: 0x2512, 0x22e: 0x2712, 0x22f: 0x2512, + 0x230: 0x144a, 0x231: 0x2012, 0x232: 0x2012, 0x233: 0x2012, 0x234: 0x2012, 0x235: 0x2012, + 0x236: 0x2012, 0x237: 0x2012, 0x238: 0x2012, 0x239: 0x2012, 0x23a: 0x2012, 0x23b: 0x2012, + 0x23c: 0x2012, 0x23d: 0x2012, 0x23e: 0x2012, 0x23f: 0x2012, + // Block 0x9, offset 0x240 + 0x240: 0x5f52, 0x241: 0x5f52, 0x242: 0x158a, 0x243: 0x5f52, 0x244: 0x5f52, 0x245: 0x5f52, + 0x246: 0x5f52, 0x247: 0x5f52, 0x248: 0x5f52, 0x249: 0x5f52, 0x24a: 0x5f52, 0x24b: 0x5f52, + 0x24c: 0x5652, 0x24d: 0x5952, 0x24e: 0x5c52, 0x24f: 0x1813, 0x250: 0x160a, 0x251: 0x168a, + 0x252: 0x0013, 0x253: 0x0013, 0x254: 0x0013, 0x255: 0x170a, 0x256: 0x178a, 0x257: 0x1812, + 0x258: 0x0113, 0x259: 0x0112, 0x25a: 0x0113, 0x25b: 0x0112, 0x25c: 0x0113, 0x25d: 0x0112, + 0x25e: 0x0113, 0x25f: 0x0112, 0x260: 0x0113, 0x261: 0x0112, 0x262: 0x0113, 0x263: 0x0112, + 0x264: 0x0113, 0x265: 0x0112, 0x266: 0x0113, 0x267: 0x0112, 0x268: 0x0113, 0x269: 0x0112, + 0x26a: 0x0113, 0x26b: 0x0112, 0x26c: 0x0113, 0x26d: 0x0112, 0x26e: 0x0113, 0x26f: 0x0112, + 0x270: 0x180a, 0x271: 0x188a, 0x272: 0x0b12, 0x273: 0x5352, 0x274: 0x6253, 0x275: 0x190a, + 0x277: 0x0f13, 0x278: 0x0f12, 0x279: 0x0b13, 0x27a: 0x0113, 0x27b: 0x0112, + 0x27c: 0x0012, 0x27d: 0x4d53, 0x27e: 0x5053, 0x27f: 0x5053, + // Block 0xa, offset 0x280 + 0x280: 0x0812, 0x281: 0x0812, 0x282: 0x0812, 0x283: 0x0812, 0x284: 0x0812, 0x285: 0x0812, + 0x288: 0x0813, 0x289: 0x0813, 0x28a: 0x0813, 0x28b: 0x0813, + 0x28c: 0x0813, 0x28d: 0x0813, 0x290: 0x239a, 0x291: 0x0812, + 0x292: 0x247a, 0x293: 0x0812, 0x294: 0x25ba, 0x295: 0x0812, 0x296: 0x26fa, 0x297: 0x0812, + 0x299: 0x0813, 0x29b: 0x0813, 0x29d: 0x0813, + 0x29f: 0x0813, 0x2a0: 0x0812, 0x2a1: 0x0812, 0x2a2: 0x0812, 0x2a3: 0x0812, + 0x2a4: 0x0812, 0x2a5: 0x0812, 0x2a6: 0x0812, 0x2a7: 0x0812, 0x2a8: 0x0813, 0x2a9: 0x0813, + 0x2aa: 0x0813, 0x2ab: 0x0813, 0x2ac: 0x0813, 0x2ad: 0x0813, 0x2ae: 0x0813, 0x2af: 0x0813, + 0x2b0: 0x8b52, 0x2b1: 0x8b52, 0x2b2: 0x8e52, 0x2b3: 0x8e52, 0x2b4: 0x9152, 0x2b5: 0x9152, + 0x2b6: 0x9452, 0x2b7: 0x9452, 0x2b8: 0x9752, 0x2b9: 0x9752, 0x2ba: 0x9a52, 0x2bb: 0x9a52, + 0x2bc: 0x4d52, 0x2bd: 0x4d52, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x283a, 0x2c1: 0x292a, 0x2c2: 0x2a1a, 0x2c3: 0x2b0a, 0x2c4: 0x2bfa, 0x2c5: 0x2cea, + 0x2c6: 0x2dda, 0x2c7: 0x2eca, 0x2c8: 0x2fb9, 0x2c9: 0x30a9, 0x2ca: 0x3199, 0x2cb: 0x3289, + 0x2cc: 0x3379, 0x2cd: 0x3469, 0x2ce: 0x3559, 0x2cf: 0x3649, 0x2d0: 0x373a, 0x2d1: 0x382a, + 0x2d2: 0x391a, 0x2d3: 0x3a0a, 0x2d4: 0x3afa, 0x2d5: 0x3bea, 0x2d6: 0x3cda, 0x2d7: 0x3dca, + 0x2d8: 0x3eb9, 0x2d9: 0x3fa9, 0x2da: 0x4099, 0x2db: 0x4189, 0x2dc: 0x4279, 0x2dd: 0x4369, + 0x2de: 0x4459, 0x2df: 0x4549, 0x2e0: 0x463a, 0x2e1: 0x472a, 0x2e2: 0x481a, 0x2e3: 0x490a, + 0x2e4: 0x49fa, 0x2e5: 0x4aea, 0x2e6: 0x4bda, 0x2e7: 0x4cca, 0x2e8: 0x4db9, 0x2e9: 0x4ea9, + 0x2ea: 0x4f99, 0x2eb: 0x5089, 0x2ec: 0x5179, 0x2ed: 0x5269, 0x2ee: 0x5359, 0x2ef: 0x5449, + 0x2f0: 0x0812, 0x2f1: 0x0812, 0x2f2: 0x553a, 0x2f3: 0x564a, 0x2f4: 0x571a, + 0x2f6: 0x57fa, 0x2f7: 0x58da, 0x2f8: 0x0813, 0x2f9: 0x0813, 0x2fa: 0x8b53, 0x2fb: 0x8b53, + 0x2fc: 0x5a19, 0x2fd: 0x0004, 0x2fe: 0x5aea, 0x2ff: 0x0004, + // Block 0xc, offset 0x300 + 0x300: 0x0004, 0x301: 0x0004, 0x302: 0x5b6a, 0x303: 0x5c7a, 0x304: 0x5d4a, + 0x306: 0x5e2a, 0x307: 0x5f0a, 0x308: 0x8e53, 0x309: 0x8e53, 0x30a: 0x9153, 0x30b: 0x9153, + 0x30c: 0x6049, 0x30d: 0x0004, 0x30e: 0x0004, 0x30f: 0x0004, 0x310: 0x0812, 0x311: 0x0812, + 0x312: 0x611a, 0x313: 0x625a, 0x316: 0x639a, 0x317: 0x647a, + 0x318: 0x0813, 0x319: 0x0813, 0x31a: 0x9453, 0x31b: 0x9453, 0x31d: 0x0004, + 0x31e: 0x0004, 0x31f: 0x0004, 0x320: 0x0812, 0x321: 0x0812, 0x322: 0x65ba, 0x323: 0x66fa, + 0x324: 0x683a, 0x325: 0x0912, 0x326: 0x691a, 0x327: 0x69fa, 0x328: 0x0813, 0x329: 0x0813, + 0x32a: 0x9a53, 0x32b: 0x9a53, 0x32c: 0x0913, 0x32d: 0x0004, 0x32e: 0x0004, 0x32f: 0x0004, + 0x332: 0x6b3a, 0x333: 0x6c4a, 0x334: 0x6d1a, + 0x336: 0x6dfa, 0x337: 0x6eda, 0x338: 0x9753, 0x339: 0x9753, 0x33a: 0x4d53, 0x33b: 0x4d53, + 0x33c: 0x7019, 0x33d: 0x0004, 0x33e: 0x0004, + // Block 0xd, offset 0x340 + 0x342: 0x0013, + 0x347: 0x0013, 0x34a: 0x0012, 0x34b: 0x0013, + 0x34c: 0x0013, 0x34d: 0x0013, 0x34e: 0x0012, 0x34f: 0x0012, 0x350: 0x0013, 0x351: 0x0013, + 0x352: 0x0013, 0x353: 0x0012, 0x355: 0x0013, + 0x359: 0x0013, 0x35a: 0x0013, 0x35b: 0x0013, 0x35c: 0x0013, 0x35d: 0x0013, + 0x364: 0x0013, 0x366: 0x70eb, 0x368: 0x0013, + 0x36a: 0x714b, 0x36b: 0x718b, 0x36c: 0x0013, 0x36d: 0x0013, 0x36f: 0x0012, + 0x370: 0x0013, 0x371: 0x0013, 0x372: 0x9d53, 0x373: 0x0013, 0x374: 0x0012, 0x375: 0x0010, + 0x376: 0x0010, 0x377: 0x0010, 0x378: 0x0010, 0x379: 0x0012, + 0x37c: 0x0012, 0x37d: 0x0012, 0x37e: 0x0013, 0x37f: 0x0013, + // Block 0xe, offset 0x380 + 0x380: 0x1a13, 0x381: 0x1a13, 0x382: 0x1e13, 0x383: 0x1e13, 0x384: 0x1a13, 0x385: 0x1a13, + 0x386: 0x2613, 0x387: 0x2613, 0x388: 0x2a13, 0x389: 0x2a13, 0x38a: 0x2e13, 0x38b: 0x2e13, + 0x38c: 0x2a13, 0x38d: 0x2a13, 0x38e: 0x2613, 0x38f: 0x2613, 0x390: 0xa052, 0x391: 0xa052, + 0x392: 0xa352, 0x393: 0xa352, 0x394: 0xa652, 0x395: 0xa652, 0x396: 0xa352, 0x397: 0xa352, + 0x398: 0xa052, 0x399: 0xa052, 0x39a: 0x1a12, 0x39b: 0x1a12, 0x39c: 0x1e12, 0x39d: 0x1e12, + 0x39e: 0x1a12, 0x39f: 0x1a12, 0x3a0: 0x2612, 0x3a1: 0x2612, 0x3a2: 0x2a12, 0x3a3: 0x2a12, + 0x3a4: 0x2e12, 0x3a5: 0x2e12, 0x3a6: 0x2a12, 0x3a7: 0x2a12, 0x3a8: 0x2612, 0x3a9: 0x2612, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x6552, 0x3c1: 0x6552, 0x3c2: 0x6552, 0x3c3: 0x6552, 0x3c4: 0x6552, 0x3c5: 0x6552, + 0x3c6: 0x6552, 0x3c7: 0x6552, 0x3c8: 0x6552, 0x3c9: 0x6552, 0x3ca: 0x6552, 0x3cb: 0x6552, + 0x3cc: 0x6552, 0x3cd: 0x6552, 0x3ce: 0x6552, 0x3cf: 0x6552, 0x3d0: 0xa952, 0x3d1: 0xa952, + 0x3d2: 0xa952, 0x3d3: 0xa952, 0x3d4: 0xa952, 0x3d5: 0xa952, 0x3d6: 0xa952, 0x3d7: 0xa952, + 0x3d8: 0xa952, 0x3d9: 0xa952, 0x3da: 0xa952, 0x3db: 0xa952, 0x3dc: 0xa952, 0x3dd: 0xa952, + 0x3de: 0xa952, 0x3e0: 0x0113, 0x3e1: 0x0112, 0x3e2: 0x71eb, 0x3e3: 0x8853, + 0x3e4: 0x724b, 0x3e5: 0x72aa, 0x3e6: 0x730a, 0x3e7: 0x0f13, 0x3e8: 0x0f12, 0x3e9: 0x0313, + 0x3ea: 0x0312, 0x3eb: 0x0713, 0x3ec: 0x0712, 0x3ed: 0x736b, 0x3ee: 0x73cb, 0x3ef: 0x742b, + 0x3f0: 0x748b, 0x3f1: 0x0012, 0x3f2: 0x0113, 0x3f3: 0x0112, 0x3f4: 0x0012, 0x3f5: 0x0313, + 0x3f6: 0x0312, 0x3f7: 0x0012, 0x3f8: 0x0012, 0x3f9: 0x0012, 0x3fa: 0x0012, 0x3fb: 0x0012, + 0x3fc: 0x0015, 0x3fd: 0x0015, 0x3fe: 0x74eb, 0x3ff: 0x754b, + // Block 0x10, offset 0x400 + 0x400: 0x0113, 0x401: 0x0112, 0x402: 0x0113, 0x403: 0x0112, 0x404: 0x0113, 0x405: 0x0112, + 0x406: 0x0113, 0x407: 0x0112, 0x408: 0x0014, 0x409: 0x0004, 0x40a: 0x0004, 0x40b: 0x0713, + 0x40c: 0x0712, 0x40d: 0x75ab, 0x40e: 0x0012, 0x40f: 0x0010, 0x410: 0x0113, 0x411: 0x0112, + 0x412: 0x0113, 0x413: 0x0112, 0x414: 0x0012, 0x415: 0x0012, 0x416: 0x0113, 0x417: 0x0112, + 0x418: 0x0113, 0x419: 0x0112, 0x41a: 0x0113, 0x41b: 0x0112, 0x41c: 0x0113, 0x41d: 0x0112, + 0x41e: 0x0113, 0x41f: 0x0112, 0x420: 0x0113, 0x421: 0x0112, 0x422: 0x0113, 0x423: 0x0112, + 0x424: 0x0113, 0x425: 0x0112, 0x426: 0x0113, 0x427: 0x0112, 0x428: 0x0113, 0x429: 0x0112, + 0x42a: 0x760b, 0x42b: 0x766b, 0x42c: 0x76cb, 0x42d: 0x772b, 0x42e: 0x778b, + 0x430: 0x77eb, 0x431: 0x784b, 0x432: 0x78ab, 0x433: 0xac53, 0x434: 0x0113, 0x435: 0x0112, + 0x436: 0x0113, 0x437: 0x0112, + // Block 0x11, offset 0x440 + 0x440: 0x790a, 0x441: 0x798a, 0x442: 0x7a0a, 0x443: 0x7a8a, 0x444: 0x7b3a, 0x445: 0x7bea, + 0x446: 0x7c6a, + 0x453: 0x7cea, 0x454: 0x7dca, 0x455: 0x7eaa, 0x456: 0x7f8a, 0x457: 0x806a, + 0x45d: 0x0010, + 0x45e: 0x0034, 0x45f: 0x0010, 0x460: 0x0010, 0x461: 0x0010, 0x462: 0x0010, 0x463: 0x0010, + 0x464: 0x0010, 0x465: 0x0010, 0x466: 0x0010, 0x467: 0x0010, 0x468: 0x0010, + 0x46a: 0x0010, 0x46b: 0x0010, 0x46c: 0x0010, 0x46d: 0x0010, 0x46e: 0x0010, 0x46f: 0x0010, + 0x470: 0x0010, 0x471: 0x0010, 0x472: 0x0010, 0x473: 0x0010, 0x474: 0x0010, 0x475: 0x0010, + 0x476: 0x0010, 0x478: 0x0010, 0x479: 0x0010, 0x47a: 0x0010, 0x47b: 0x0010, + 0x47c: 0x0010, 0x47e: 0x0010, + // Block 0x12, offset 0x480 + 0x480: 0x2213, 0x481: 0x2213, 0x482: 0x2613, 0x483: 0x2613, 0x484: 0x2213, 0x485: 0x2213, + 0x486: 0x2e13, 0x487: 0x2e13, 0x488: 0x2213, 0x489: 0x2213, 0x48a: 0x2613, 0x48b: 0x2613, + 0x48c: 0x2213, 0x48d: 0x2213, 0x48e: 0x3e13, 0x48f: 0x3e13, 0x490: 0x2213, 0x491: 0x2213, + 0x492: 0x2613, 0x493: 0x2613, 0x494: 0x2213, 0x495: 0x2213, 0x496: 0x2e13, 0x497: 0x2e13, + 0x498: 0x2213, 0x499: 0x2213, 0x49a: 0x2613, 0x49b: 0x2613, 0x49c: 0x2213, 0x49d: 0x2213, + 0x49e: 0xb553, 0x49f: 0xb553, 0x4a0: 0xb853, 0x4a1: 0xb853, 0x4a2: 0x2212, 0x4a3: 0x2212, + 0x4a4: 0x2612, 0x4a5: 0x2612, 0x4a6: 0x2212, 0x4a7: 0x2212, 0x4a8: 0x2e12, 0x4a9: 0x2e12, + 0x4aa: 0x2212, 0x4ab: 0x2212, 0x4ac: 0x2612, 0x4ad: 0x2612, 0x4ae: 0x2212, 0x4af: 0x2212, + 0x4b0: 0x3e12, 0x4b1: 0x3e12, 0x4b2: 0x2212, 0x4b3: 0x2212, 0x4b4: 0x2612, 0x4b5: 0x2612, + 0x4b6: 0x2212, 0x4b7: 0x2212, 0x4b8: 0x2e12, 0x4b9: 0x2e12, 0x4ba: 0x2212, 0x4bb: 0x2212, + 0x4bc: 0x2612, 0x4bd: 0x2612, 0x4be: 0x2212, 0x4bf: 0x2212, + // Block 0x13, offset 0x4c0 + 0x4c2: 0x0010, + 0x4c7: 0x0010, 0x4c9: 0x0010, 0x4cb: 0x0010, + 0x4cd: 0x0010, 0x4ce: 0x0010, 0x4cf: 0x0010, 0x4d1: 0x0010, + 0x4d2: 0x0010, 0x4d4: 0x0010, 0x4d7: 0x0010, + 0x4d9: 0x0010, 0x4db: 0x0010, 0x4dd: 0x0010, + 0x4df: 0x0010, 0x4e1: 0x0010, 0x4e2: 0x0010, + 0x4e4: 0x0010, 0x4e7: 0x0010, 0x4e8: 0x0010, 0x4e9: 0x0010, + 0x4ea: 0x0010, 0x4ec: 0x0010, 0x4ed: 0x0010, 0x4ee: 0x0010, 0x4ef: 0x0010, + 0x4f0: 0x0010, 0x4f1: 0x0010, 0x4f2: 0x0010, 0x4f4: 0x0010, 0x4f5: 0x0010, + 0x4f6: 0x0010, 0x4f7: 0x0010, 0x4f9: 0x0010, 0x4fa: 0x0010, 0x4fb: 0x0010, + 0x4fc: 0x0010, 0x4fe: 0x0010, +} + +// caseIndex: 25 blocks, 1600 entries, 3200 bytes +// Block 0 is the zero block. +var caseIndex = [1600]uint16{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x12, 0xc3: 0x13, 0xc4: 0x14, 0xc5: 0x15, 0xc6: 0x01, 0xc7: 0x02, + 0xc8: 0x16, 0xc9: 0x03, 0xca: 0x04, 0xcb: 0x17, 0xcc: 0x18, 0xcd: 0x05, 0xce: 0x06, 0xcf: 0x07, + 0xd0: 0x19, 0xd1: 0x1a, 0xd2: 0x1b, 0xd3: 0x1c, 0xd4: 0x1d, 0xd5: 0x1e, 0xd6: 0x1f, 0xd7: 0x20, + 0xd8: 0x21, 0xd9: 0x22, 0xda: 0x23, 0xdb: 0x24, 0xdc: 0x25, 0xdd: 0x26, 0xde: 0x27, 0xdf: 0x28, + 0xe0: 0x02, 0xe1: 0x03, 0xe2: 0x04, 0xe3: 0x05, + 0xea: 0x06, 0xeb: 0x07, 0xec: 0x07, 0xed: 0x08, 0xef: 0x09, + 0xf0: 0x14, 0xf3: 0x16, + // Block 0x4, offset 0x100 + 0x120: 0x29, 0x121: 0x2a, 0x122: 0x2b, 0x123: 0x2c, 0x124: 0x2d, 0x125: 0x2e, 0x126: 0x2f, 0x127: 0x30, + 0x128: 0x31, 0x129: 0x32, 0x12a: 0x33, 0x12b: 0x34, 0x12c: 0x35, 0x12d: 0x36, 0x12e: 0x37, 0x12f: 0x38, + 0x130: 0x39, 0x131: 0x3a, 0x132: 0x3b, 0x133: 0x3c, 0x134: 0x3d, 0x135: 0x3e, 0x136: 0x3f, 0x137: 0x40, + 0x138: 0x41, 0x139: 0x42, 0x13a: 0x43, 0x13b: 0x44, 0x13c: 0x45, 0x13d: 0x46, 0x13e: 0x47, 0x13f: 0x48, + // Block 0x5, offset 0x140 + 0x140: 0x49, 0x141: 0x4a, 0x142: 0x4b, 0x143: 0x4c, 0x144: 0x23, 0x145: 0x23, 0x146: 0x23, 0x147: 0x23, + 0x148: 0x23, 0x149: 0x4d, 0x14a: 0x4e, 0x14b: 0x4f, 0x14c: 0x50, 0x14d: 0x51, 0x14e: 0x52, 0x14f: 0x53, + 0x150: 0x54, 0x151: 0x23, 0x152: 0x23, 0x153: 0x23, 0x154: 0x23, 0x155: 0x23, 0x156: 0x23, 0x157: 0x23, + 0x158: 0x23, 0x159: 0x55, 0x15a: 0x56, 0x15b: 0x57, 0x15c: 0x58, 0x15d: 0x59, 0x15e: 0x5a, 0x15f: 0x5b, + 0x160: 0x5c, 0x161: 0x5d, 0x162: 0x5e, 0x163: 0x5f, 0x164: 0x60, 0x165: 0x61, 0x167: 0x62, + 0x168: 0x63, 0x169: 0x64, 0x16a: 0x65, 0x16c: 0x66, 0x16d: 0x67, 0x16e: 0x68, 0x16f: 0x69, + 0x170: 0x6a, 0x171: 0x6b, 0x172: 0x6c, 0x173: 0x6d, 0x174: 0x6e, 0x175: 0x6f, 0x176: 0x70, 0x177: 0x71, + 0x178: 0x72, 0x179: 0x72, 0x17a: 0x73, 0x17b: 0x72, 0x17c: 0x74, 0x17d: 0x08, 0x17e: 0x09, 0x17f: 0x0a, + // Block 0x6, offset 0x180 + 0x180: 0x75, 0x181: 0x76, 0x182: 0x77, 0x183: 0x78, 0x184: 0x0b, 0x185: 0x79, 0x186: 0x7a, + 0x192: 0x7b, 0x193: 0x0c, + 0x1b0: 0x7c, 0x1b1: 0x0d, 0x1b2: 0x72, 0x1b3: 0x7d, 0x1b4: 0x7e, 0x1b5: 0x7f, 0x1b6: 0x80, 0x1b7: 0x81, + 0x1b8: 0x82, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x83, 0x1c2: 0x84, 0x1c3: 0x85, 0x1c4: 0x86, 0x1c5: 0x23, 0x1c6: 0x87, + // Block 0x8, offset 0x200 + 0x200: 0x88, 0x201: 0x23, 0x202: 0x23, 0x203: 0x23, 0x204: 0x23, 0x205: 0x23, 0x206: 0x23, 0x207: 0x23, + 0x208: 0x23, 0x209: 0x23, 0x20a: 0x23, 0x20b: 0x23, 0x20c: 0x23, 0x20d: 0x23, 0x20e: 0x23, 0x20f: 0x23, + 0x210: 0x23, 0x211: 0x23, 0x212: 0x89, 0x213: 0x8a, 0x214: 0x23, 0x215: 0x23, 0x216: 0x23, 0x217: 0x23, + 0x218: 0x8b, 0x219: 0x8c, 0x21a: 0x8d, 0x21b: 0x8e, 0x21c: 0x8f, 0x21d: 0x90, 0x21e: 0x0e, 0x21f: 0x91, + 0x220: 0x92, 0x221: 0x93, 0x222: 0x23, 0x223: 0x94, 0x224: 0x95, 0x225: 0x96, 0x226: 0x97, 0x227: 0x98, + 0x228: 0x99, 0x229: 0x9a, 0x22a: 0x9b, 0x22b: 0x9c, 0x22c: 0x9d, 0x22d: 0x9e, 0x22e: 0x9f, 0x22f: 0xa0, + 0x230: 0x23, 0x231: 0x23, 0x232: 0x23, 0x233: 0x23, 0x234: 0x23, 0x235: 0x23, 0x236: 0x23, 0x237: 0x23, + 0x238: 0x23, 0x239: 0x23, 0x23a: 0x23, 0x23b: 0x23, 0x23c: 0x23, 0x23d: 0x23, 0x23e: 0x23, 0x23f: 0x23, + // Block 0x9, offset 0x240 + 0x240: 0x23, 0x241: 0x23, 0x242: 0x23, 0x243: 0x23, 0x244: 0x23, 0x245: 0x23, 0x246: 0x23, 0x247: 0x23, + 0x248: 0x23, 0x249: 0x23, 0x24a: 0x23, 0x24b: 0x23, 0x24c: 0x23, 0x24d: 0x23, 0x24e: 0x23, 0x24f: 0x23, + 0x250: 0x23, 0x251: 0x23, 0x252: 0x23, 0x253: 0x23, 0x254: 0x23, 0x255: 0x23, 0x256: 0x23, 0x257: 0x23, + 0x258: 0x23, 0x259: 0x23, 0x25a: 0x23, 0x25b: 0x23, 0x25c: 0x23, 0x25d: 0x23, 0x25e: 0x23, 0x25f: 0x23, + 0x260: 0x23, 0x261: 0x23, 0x262: 0x23, 0x263: 0x23, 0x264: 0x23, 0x265: 0x23, 0x266: 0x23, 0x267: 0x23, + 0x268: 0x23, 0x269: 0x23, 0x26a: 0x23, 0x26b: 0x23, 0x26c: 0x23, 0x26d: 0x23, 0x26e: 0x23, 0x26f: 0x23, + 0x270: 0x23, 0x271: 0x23, 0x272: 0x23, 0x273: 0x23, 0x274: 0x23, 0x275: 0x23, 0x276: 0x23, 0x277: 0x23, + 0x278: 0x23, 0x279: 0x23, 0x27a: 0x23, 0x27b: 0x23, 0x27c: 0x23, 0x27d: 0x23, 0x27e: 0x23, 0x27f: 0x23, + // Block 0xa, offset 0x280 + 0x280: 0x23, 0x281: 0x23, 0x282: 0x23, 0x283: 0x23, 0x284: 0x23, 0x285: 0x23, 0x286: 0x23, 0x287: 0x23, + 0x288: 0x23, 0x289: 0x23, 0x28a: 0x23, 0x28b: 0x23, 0x28c: 0x23, 0x28d: 0x23, 0x28e: 0x23, 0x28f: 0x23, + 0x290: 0x23, 0x291: 0x23, 0x292: 0x23, 0x293: 0x23, 0x294: 0x23, 0x295: 0x23, 0x296: 0x23, 0x297: 0x23, + 0x298: 0x23, 0x299: 0x23, 0x29a: 0x23, 0x29b: 0x23, 0x29c: 0x23, 0x29d: 0x23, 0x29e: 0xa1, 0x29f: 0xa2, + // Block 0xb, offset 0x2c0 + 0x2ec: 0x0f, 0x2ed: 0xa3, 0x2ee: 0xa4, 0x2ef: 0xa5, + 0x2f0: 0x23, 0x2f1: 0x23, 0x2f2: 0x23, 0x2f3: 0x23, 0x2f4: 0xa6, 0x2f5: 0xa7, 0x2f6: 0xa8, 0x2f7: 0xa9, + 0x2f8: 0xaa, 0x2f9: 0xab, 0x2fa: 0x23, 0x2fb: 0xac, 0x2fc: 0xad, 0x2fd: 0xae, 0x2fe: 0xaf, 0x2ff: 0xb0, + // Block 0xc, offset 0x300 + 0x300: 0xb1, 0x301: 0xb2, 0x302: 0x23, 0x303: 0xb3, 0x305: 0xb4, 0x307: 0xb5, + 0x30a: 0xb6, 0x30b: 0xb7, 0x30c: 0xb8, 0x30d: 0xb9, 0x30e: 0xba, 0x30f: 0xbb, + 0x310: 0xbc, 0x311: 0xbd, 0x312: 0xbe, 0x313: 0xbf, 0x314: 0xc0, 0x315: 0xc1, + 0x318: 0x23, 0x319: 0x23, 0x31a: 0x23, 0x31b: 0x23, 0x31c: 0xc2, 0x31d: 0xc3, + 0x320: 0xc4, 0x321: 0xc5, 0x322: 0xc6, 0x323: 0xc7, 0x324: 0xc8, 0x326: 0xc9, + 0x328: 0xca, 0x329: 0xcb, 0x32a: 0xcc, 0x32b: 0xcd, 0x32c: 0x5f, 0x32d: 0xce, 0x32e: 0xcf, + 0x330: 0x23, 0x331: 0xd0, 0x332: 0xd1, 0x333: 0xd2, + // Block 0xd, offset 0x340 + 0x340: 0xd3, 0x341: 0xd4, 0x342: 0xd5, 0x343: 0xd6, 0x344: 0xd7, 0x345: 0xd8, 0x346: 0xd9, 0x347: 0xda, + 0x348: 0xdb, 0x34a: 0xdc, 0x34b: 0xdd, 0x34c: 0xde, 0x34d: 0xdf, + 0x350: 0xe0, 0x351: 0xe1, 0x352: 0xe2, 0x353: 0xe3, 0x356: 0xe4, 0x357: 0xe5, + 0x358: 0xe6, 0x359: 0xe7, 0x35a: 0xe8, 0x35b: 0xe9, 0x35c: 0xea, + 0x362: 0xeb, 0x363: 0xec, + 0x36b: 0xed, + 0x370: 0xee, 0x371: 0xef, 0x372: 0xf0, + // Block 0xe, offset 0x380 + 0x380: 0x23, 0x381: 0x23, 0x382: 0x23, 0x383: 0x23, 0x384: 0x23, 0x385: 0x23, 0x386: 0x23, 0x387: 0x23, + 0x388: 0x23, 0x389: 0x23, 0x38a: 0x23, 0x38b: 0x23, 0x38c: 0x23, 0x38d: 0x23, 0x38e: 0xf1, + 0x390: 0x23, 0x391: 0xf2, 0x392: 0x23, 0x393: 0x23, 0x394: 0x23, 0x395: 0xf3, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x23, 0x3c1: 0x23, 0x3c2: 0x23, 0x3c3: 0x23, 0x3c4: 0x23, 0x3c5: 0x23, 0x3c6: 0x23, 0x3c7: 0x23, + 0x3c8: 0x23, 0x3c9: 0x23, 0x3ca: 0x23, 0x3cb: 0x23, 0x3cc: 0x23, 0x3cd: 0x23, 0x3ce: 0x23, 0x3cf: 0x23, + 0x3d0: 0xf2, + // Block 0x10, offset 0x400 + 0x410: 0x23, 0x411: 0x23, 0x412: 0x23, 0x413: 0x23, 0x414: 0x23, 0x415: 0x23, 0x416: 0x23, 0x417: 0x23, + 0x418: 0x23, 0x419: 0xf4, + // Block 0x11, offset 0x440 + 0x460: 0x23, 0x461: 0x23, 0x462: 0x23, 0x463: 0x23, 0x464: 0x23, 0x465: 0x23, 0x466: 0x23, 0x467: 0x23, + 0x468: 0xed, 0x469: 0xf5, 0x46b: 0xf6, 0x46c: 0xf7, 0x46d: 0xf8, 0x46e: 0xf9, + 0x47c: 0x23, 0x47d: 0xfa, 0x47e: 0xfb, 0x47f: 0xfc, + // Block 0x12, offset 0x480 + 0x4b0: 0x23, 0x4b1: 0xfd, 0x4b2: 0xfe, + // Block 0x13, offset 0x4c0 + 0x4c5: 0xff, 0x4c6: 0x100, + 0x4c9: 0x101, + 0x4d0: 0x102, 0x4d1: 0x103, 0x4d2: 0x104, 0x4d3: 0x105, 0x4d4: 0x106, 0x4d5: 0x107, 0x4d6: 0x108, 0x4d7: 0x109, + 0x4d8: 0x10a, 0x4d9: 0x10b, 0x4da: 0x10c, 0x4db: 0x10d, 0x4dc: 0x10e, 0x4dd: 0x10f, 0x4de: 0x110, 0x4df: 0x111, + 0x4e8: 0x112, 0x4e9: 0x113, 0x4ea: 0x114, + // Block 0x14, offset 0x500 + 0x500: 0x115, + 0x520: 0x23, 0x521: 0x23, 0x522: 0x23, 0x523: 0x116, 0x524: 0x10, 0x525: 0x117, + 0x538: 0x118, 0x539: 0x11, 0x53a: 0x119, + // Block 0x15, offset 0x540 + 0x544: 0x11a, 0x545: 0x11b, 0x546: 0x11c, + 0x54f: 0x11d, + // Block 0x16, offset 0x580 + 0x590: 0x0a, 0x591: 0x0b, 0x592: 0x0c, 0x593: 0x0d, 0x594: 0x0e, 0x596: 0x0f, + 0x59b: 0x10, 0x59d: 0x11, 0x59e: 0x12, 0x59f: 0x13, + // Block 0x17, offset 0x5c0 + 0x5c0: 0x11e, 0x5c1: 0x11f, 0x5c4: 0x11f, 0x5c5: 0x11f, 0x5c6: 0x11f, 0x5c7: 0x120, + // Block 0x18, offset 0x600 + 0x620: 0x15, +} + +// sparseOffsets: 272 entries, 544 bytes +var sparseOffsets = []uint16{0x0, 0x9, 0xf, 0x18, 0x24, 0x2e, 0x3a, 0x3d, 0x41, 0x44, 0x48, 0x52, 0x54, 0x59, 0x69, 0x70, 0x75, 0x83, 0x84, 0x92, 0xa1, 0xab, 0xae, 0xb4, 0xbc, 0xbe, 0xc0, 0xce, 0xd4, 0xe2, 0xed, 0xf8, 0x103, 0x10f, 0x119, 0x124, 0x12f, 0x13b, 0x147, 0x14f, 0x157, 0x161, 0x16c, 0x178, 0x17e, 0x189, 0x18e, 0x196, 0x199, 0x19e, 0x1a2, 0x1a6, 0x1ad, 0x1b6, 0x1be, 0x1bf, 0x1c8, 0x1cf, 0x1d7, 0x1dd, 0x1e3, 0x1e8, 0x1ec, 0x1ef, 0x1f1, 0x1f4, 0x1f9, 0x1fa, 0x1fc, 0x1fe, 0x200, 0x207, 0x20c, 0x210, 0x219, 0x21c, 0x21f, 0x225, 0x226, 0x231, 0x232, 0x233, 0x238, 0x245, 0x24d, 0x255, 0x25e, 0x267, 0x270, 0x275, 0x278, 0x281, 0x28e, 0x290, 0x297, 0x299, 0x2a4, 0x2a5, 0x2b0, 0x2b8, 0x2c0, 0x2c6, 0x2c7, 0x2d5, 0x2da, 0x2dd, 0x2e2, 0x2e6, 0x2ec, 0x2f1, 0x2f4, 0x2f9, 0x2fe, 0x2ff, 0x305, 0x307, 0x308, 0x30a, 0x30c, 0x30f, 0x310, 0x312, 0x315, 0x31b, 0x31f, 0x321, 0x327, 0x32e, 0x332, 0x33b, 0x33c, 0x344, 0x348, 0x34d, 0x355, 0x35b, 0x361, 0x36b, 0x370, 0x379, 0x37f, 0x386, 0x38a, 0x392, 0x394, 0x396, 0x399, 0x39b, 0x39d, 0x39e, 0x39f, 0x3a1, 0x3a3, 0x3a9, 0x3ae, 0x3b0, 0x3b6, 0x3b9, 0x3bb, 0x3c1, 0x3c6, 0x3c8, 0x3c9, 0x3ca, 0x3cb, 0x3cd, 0x3cf, 0x3d1, 0x3d4, 0x3d6, 0x3d9, 0x3e1, 0x3e4, 0x3e8, 0x3f0, 0x3f2, 0x3f3, 0x3f4, 0x3f6, 0x3fc, 0x3fe, 0x3ff, 0x401, 0x403, 0x405, 0x412, 0x413, 0x414, 0x418, 0x41a, 0x41b, 0x41c, 0x41d, 0x41e, 0x422, 0x426, 0x42c, 0x42e, 0x435, 0x438, 0x43c, 0x442, 0x44b, 0x451, 0x457, 0x461, 0x46b, 0x46d, 0x474, 0x47a, 0x480, 0x486, 0x489, 0x48f, 0x492, 0x49a, 0x49b, 0x4a2, 0x4a3, 0x4a6, 0x4a7, 0x4ad, 0x4b0, 0x4b8, 0x4b9, 0x4ba, 0x4bb, 0x4bc, 0x4be, 0x4c0, 0x4c2, 0x4c6, 0x4c7, 0x4c9, 0x4ca, 0x4cb, 0x4cd, 0x4d2, 0x4d7, 0x4db, 0x4dc, 0x4df, 0x4e3, 0x4ee, 0x4f2, 0x4fa, 0x4ff, 0x503, 0x506, 0x50a, 0x50d, 0x510, 0x515, 0x519, 0x51d, 0x521, 0x525, 0x527, 0x529, 0x52c, 0x531, 0x533, 0x538, 0x541, 0x546, 0x547, 0x54a, 0x54b, 0x54c, 0x54e, 0x54f, 0x550} + +// sparseValues: 1360 entries, 5440 bytes +var sparseValues = [1360]valueRange{ + // Block 0x0, offset 0x0 + {value: 0x0004, lo: 0xa8, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xaa}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0004, lo: 0xaf, hi: 0xaf}, + {value: 0x0004, lo: 0xb4, hi: 0xb4}, + {value: 0x001a, lo: 0xb5, hi: 0xb5}, + {value: 0x0054, lo: 0xb7, hi: 0xb7}, + {value: 0x0004, lo: 0xb8, hi: 0xb8}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + // Block 0x1, offset 0x9 + {value: 0x2013, lo: 0x80, hi: 0x96}, + {value: 0x2013, lo: 0x98, hi: 0x9e}, + {value: 0x009a, lo: 0x9f, hi: 0x9f}, + {value: 0x2012, lo: 0xa0, hi: 0xb6}, + {value: 0x2012, lo: 0xb8, hi: 0xbe}, + {value: 0x0252, lo: 0xbf, hi: 0xbf}, + // Block 0x2, offset 0xf + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x011b, lo: 0xb0, hi: 0xb0}, + {value: 0x019a, lo: 0xb1, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb7}, + {value: 0x0012, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x0553, lo: 0xbf, hi: 0xbf}, + // Block 0x3, offset 0x18 + {value: 0x0552, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x01da, lo: 0x89, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xb7}, + {value: 0x0253, lo: 0xb8, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x0316, lo: 0xbd, hi: 0xbe}, + {value: 0x028a, lo: 0xbf, hi: 0xbf}, + // Block 0x4, offset 0x24 + {value: 0x0117, lo: 0x80, hi: 0x9f}, + {value: 0x2f53, lo: 0xa0, hi: 0xa0}, + {value: 0x0012, lo: 0xa1, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xb3}, + {value: 0x0012, lo: 0xb4, hi: 0xb9}, + {value: 0x090b, lo: 0xba, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x2953, lo: 0xbd, hi: 0xbd}, + {value: 0x098b, lo: 0xbe, hi: 0xbe}, + {value: 0x0a0a, lo: 0xbf, hi: 0xbf}, + // Block 0x5, offset 0x2e + {value: 0x0015, lo: 0x80, hi: 0x81}, + {value: 0x0004, lo: 0x82, hi: 0x85}, + {value: 0x0014, lo: 0x86, hi: 0x91}, + {value: 0x0004, lo: 0x92, hi: 0x96}, + {value: 0x0054, lo: 0x97, hi: 0x97}, + {value: 0x0004, lo: 0x98, hi: 0x9f}, + {value: 0x0015, lo: 0xa0, hi: 0xa4}, + {value: 0x0004, lo: 0xa5, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xac}, + {value: 0x0004, lo: 0xad, hi: 0xad}, + {value: 0x0014, lo: 0xae, hi: 0xae}, + {value: 0x0004, lo: 0xaf, hi: 0xbf}, + // Block 0x6, offset 0x3a + {value: 0x0024, lo: 0x80, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbf}, + // Block 0x7, offset 0x3d + {value: 0x6553, lo: 0x80, hi: 0x8f}, + {value: 0x2013, lo: 0x90, hi: 0x9f}, + {value: 0x5f53, lo: 0xa0, hi: 0xaf}, + {value: 0x2012, lo: 0xb0, hi: 0xbf}, + // Block 0x8, offset 0x41 + {value: 0x5f52, lo: 0x80, hi: 0x8f}, + {value: 0x6552, lo: 0x90, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x9, offset 0x44 + {value: 0x0117, lo: 0x80, hi: 0x81}, + {value: 0x0024, lo: 0x83, hi: 0x87}, + {value: 0x0014, lo: 0x88, hi: 0x89}, + {value: 0x0117, lo: 0x8a, hi: 0xbf}, + // Block 0xa, offset 0x48 + {value: 0x0f13, lo: 0x80, hi: 0x80}, + {value: 0x0316, lo: 0x81, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0316, lo: 0x85, hi: 0x86}, + {value: 0x0f16, lo: 0x87, hi: 0x88}, + {value: 0x0316, lo: 0x89, hi: 0x8a}, + {value: 0x0716, lo: 0x8b, hi: 0x8c}, + {value: 0x0316, lo: 0x8d, hi: 0x8e}, + {value: 0x0f12, lo: 0x8f, hi: 0x8f}, + {value: 0x0117, lo: 0x90, hi: 0xbf}, + // Block 0xb, offset 0x52 + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x6553, lo: 0xb1, hi: 0xbf}, + // Block 0xc, offset 0x54 + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6853, lo: 0x90, hi: 0x96}, + {value: 0x0014, lo: 0x99, hi: 0x99}, + {value: 0x6552, lo: 0xa1, hi: 0xaf}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0xd, offset 0x59 + {value: 0x6852, lo: 0x80, hi: 0x86}, + {value: 0x198a, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x91, hi: 0x91}, + {value: 0x0024, lo: 0x92, hi: 0x95}, + {value: 0x0034, lo: 0x96, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x99}, + {value: 0x0034, lo: 0x9a, hi: 0x9b}, + {value: 0x0024, lo: 0x9c, hi: 0xa1}, + {value: 0x0034, lo: 0xa2, hi: 0xa7}, + {value: 0x0024, lo: 0xa8, hi: 0xa9}, + {value: 0x0034, lo: 0xaa, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xaf}, + {value: 0x0034, lo: 0xb0, hi: 0xbd}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xe, offset 0x69 + {value: 0x0034, lo: 0x81, hi: 0x82}, + {value: 0x0024, lo: 0x84, hi: 0x84}, + {value: 0x0034, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xb3}, + {value: 0x0054, lo: 0xb4, hi: 0xb4}, + // Block 0xf, offset 0x70 + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0024, lo: 0x90, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0014, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x10, offset 0x75 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x8a}, + {value: 0x0034, lo: 0x8b, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9c}, + {value: 0x0024, lo: 0x9d, hi: 0x9e}, + {value: 0x0034, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0034, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x11, offset 0x83 + {value: 0x0010, lo: 0x80, hi: 0xbf}, + // Block 0x12, offset 0x84 + {value: 0x0010, lo: 0x80, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0024, lo: 0x9f, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xaa, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x13, offset 0x92 + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0034, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0034, lo: 0xb1, hi: 0xb1}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbc}, + {value: 0x0024, lo: 0xbd, hi: 0xbd}, + {value: 0x0034, lo: 0xbe, hi: 0xbe}, + {value: 0x0024, lo: 0xbf, hi: 0xbf}, + // Block 0x14, offset 0xa1 + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0024, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x88}, + {value: 0x0024, lo: 0x89, hi: 0x8a}, + {value: 0x0010, lo: 0x8d, hi: 0xbf}, + // Block 0x15, offset 0xab + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x16, offset 0xae + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0024, lo: 0xab, hi: 0xb1}, + {value: 0x0034, lo: 0xb2, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + // Block 0x17, offset 0xb4 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0024, lo: 0x96, hi: 0x99}, + {value: 0x0014, lo: 0x9a, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0xa3}, + {value: 0x0014, lo: 0xa4, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xad}, + // Block 0x18, offset 0xbc + {value: 0x0010, lo: 0x80, hi: 0x98}, + {value: 0x0034, lo: 0x99, hi: 0x9b}, + // Block 0x19, offset 0xbe + {value: 0x0010, lo: 0xa0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbd}, + // Block 0x1a, offset 0xc0 + {value: 0x0024, lo: 0x94, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0034, lo: 0xa3, hi: 0xa3}, + {value: 0x0024, lo: 0xa4, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0024, lo: 0xaa, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xb2}, + {value: 0x0024, lo: 0xb3, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbf}, + // Block 0x1b, offset 0xce + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1c, offset 0xd4 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0024, lo: 0x93, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x98, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0x1d, offset 0xe2 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb6, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x1e, offset 0xed + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xb1}, + // Block 0x1f, offset 0xf8 + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x20, offset 0x103 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x91, hi: 0x91}, + {value: 0x0010, lo: 0x99, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x21, offset 0x10f + {value: 0x0014, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x22, offset 0x119 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x85}, + {value: 0x0014, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x89, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + // Block 0x23, offset 0x124 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x24, offset 0x12f + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9c, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + // Block 0x25, offset 0x13b + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8a}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + {value: 0x0010, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0010, lo: 0xa8, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x26, offset 0x147 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x82}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x27, offset 0x14f + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb9}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbf}, + // Block 0x28, offset 0x157 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x88}, + {value: 0x0014, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0034, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + // Block 0x29, offset 0x161 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x2a, offset 0x16c + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x95, hi: 0x96}, + {value: 0x0010, lo: 0x9e, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb1, hi: 0xb2}, + // Block 0x2b, offset 0x178 + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0xba}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x2c, offset 0x17e + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8e, hi: 0x8e}, + {value: 0x0010, lo: 0x94, hi: 0x97}, + {value: 0x0010, lo: 0x9f, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa3}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xba, hi: 0xbf}, + // Block 0x2d, offset 0x189 + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x96}, + {value: 0x0010, lo: 0x9a, hi: 0xb1}, + {value: 0x0010, lo: 0xb3, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x2e, offset 0x18e + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0010, lo: 0x8f, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x94}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9f}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + // Block 0x2f, offset 0x196 + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xba}, + // Block 0x30, offset 0x199 + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x87}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x31, offset 0x19e + {value: 0x0014, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb4, hi: 0xb7}, + {value: 0x0034, lo: 0xb8, hi: 0xb9}, + {value: 0x0014, lo: 0xbb, hi: 0xbc}, + // Block 0x32, offset 0x1a2 + {value: 0x0004, lo: 0x86, hi: 0x86}, + {value: 0x0034, lo: 0x88, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x33, offset 0x1a6 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0034, lo: 0x98, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0034, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x34, offset 0x1ad + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xac}, + {value: 0x0034, lo: 0xb1, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xba, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x35, offset 0x1b6 + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0024, lo: 0x82, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0024, lo: 0x86, hi: 0x87}, + {value: 0x0010, lo: 0x88, hi: 0x8c}, + {value: 0x0014, lo: 0x8d, hi: 0x97}, + {value: 0x0014, lo: 0x99, hi: 0xbc}, + // Block 0x36, offset 0x1be + {value: 0x0034, lo: 0x86, hi: 0x86}, + // Block 0x37, offset 0x1bf + {value: 0x0010, lo: 0xab, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + {value: 0x0010, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbe}, + // Block 0x38, offset 0x1c8 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x96, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x99}, + {value: 0x0014, lo: 0x9e, hi: 0xa0}, + {value: 0x0010, lo: 0xa2, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xad}, + {value: 0x0014, lo: 0xb1, hi: 0xb4}, + // Block 0x39, offset 0x1cf + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x6c53, lo: 0xa0, hi: 0xbf}, + // Block 0x3a, offset 0x1d7 + {value: 0x7053, lo: 0x80, hi: 0x85}, + {value: 0x7053, lo: 0x87, hi: 0x87}, + {value: 0x7053, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xba}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x3b, offset 0x1dd + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x9a, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x3c, offset 0x1e3 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x3d, offset 0x1e8 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x82, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3e, offset 0x1ec + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0010, lo: 0x92, hi: 0x95}, + {value: 0x0010, lo: 0x98, hi: 0xbf}, + // Block 0x3f, offset 0x1ef + {value: 0x0010, lo: 0x80, hi: 0x9a}, + {value: 0x0024, lo: 0x9d, hi: 0x9f}, + // Block 0x40, offset 0x1f1 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x7453, lo: 0xa0, hi: 0xaf}, + {value: 0x7853, lo: 0xb0, hi: 0xbf}, + // Block 0x41, offset 0x1f4 + {value: 0x7c53, lo: 0x80, hi: 0x8f}, + {value: 0x8053, lo: 0x90, hi: 0x9f}, + {value: 0x7c53, lo: 0xa0, hi: 0xaf}, + {value: 0x0813, lo: 0xb0, hi: 0xb5}, + {value: 0x0892, lo: 0xb8, hi: 0xbd}, + // Block 0x42, offset 0x1f9 + {value: 0x0010, lo: 0x81, hi: 0xbf}, + // Block 0x43, offset 0x1fa + {value: 0x0010, lo: 0x80, hi: 0xac}, + {value: 0x0010, lo: 0xaf, hi: 0xbf}, + // Block 0x44, offset 0x1fc + {value: 0x0010, lo: 0x81, hi: 0x9a}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x45, offset 0x1fe + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xae, hi: 0xb8}, + // Block 0x46, offset 0x200 + {value: 0x0010, lo: 0x80, hi: 0x8c}, + {value: 0x0010, lo: 0x8e, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0034, lo: 0x94, hi: 0x94}, + {value: 0x0010, lo: 0xa0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + // Block 0x47, offset 0x207 + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0014, lo: 0x92, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xac}, + {value: 0x0010, lo: 0xae, hi: 0xb0}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + // Block 0x48, offset 0x20c + {value: 0x0014, lo: 0xb4, hi: 0xb5}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0x49, offset 0x210 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0014, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0014, lo: 0x89, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x92}, + {value: 0x0014, lo: 0x93, hi: 0x93}, + {value: 0x0004, lo: 0x97, hi: 0x97}, + {value: 0x0024, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0x4a, offset 0x219 + {value: 0x0014, lo: 0x8b, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x4b, offset 0x21c + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb7}, + // Block 0x4c, offset 0x21f + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x4d, offset 0x225 + {value: 0x0010, lo: 0x80, hi: 0xb5}, + // Block 0x4e, offset 0x226 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xb9}, + {value: 0x0024, lo: 0xba, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbb}, + // Block 0x4f, offset 0x231 + {value: 0x0010, lo: 0x86, hi: 0x8f}, + // Block 0x50, offset 0x232 + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x51, offset 0x233 + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0024, lo: 0x97, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x98}, + {value: 0x0010, lo: 0x99, hi: 0x9a}, + {value: 0x0014, lo: 0x9b, hi: 0x9b}, + // Block 0x52, offset 0x238 + {value: 0x0010, lo: 0x95, hi: 0x95}, + {value: 0x0014, lo: 0x96, hi: 0x96}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0014, lo: 0x98, hi: 0x9e}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa2}, + {value: 0x0010, lo: 0xa3, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xac}, + {value: 0x0010, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0024, lo: 0xb5, hi: 0xbc}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x53, offset 0x245 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xa7, hi: 0xa7}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + {value: 0x0034, lo: 0xb5, hi: 0xba}, + {value: 0x0024, lo: 0xbb, hi: 0xbc}, + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0x54, offset 0x24d + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x55, offset 0x255 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x83}, + {value: 0x0030, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x8b}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xab, hi: 0xab}, + {value: 0x0034, lo: 0xac, hi: 0xac}, + {value: 0x0024, lo: 0xad, hi: 0xb3}, + // Block 0x56, offset 0x25e + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0030, lo: 0xaa, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xbf}, + // Block 0x57, offset 0x267 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa9}, + {value: 0x0010, lo: 0xaa, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0030, lo: 0xb2, hi: 0xb3}, + // Block 0x58, offset 0x270 + {value: 0x0010, lo: 0x80, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + // Block 0x59, offset 0x275 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8d, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x5a, offset 0x278 + {value: 0x1a6a, lo: 0x80, hi: 0x80}, + {value: 0x1aea, lo: 0x81, hi: 0x81}, + {value: 0x1b6a, lo: 0x82, hi: 0x82}, + {value: 0x1bea, lo: 0x83, hi: 0x83}, + {value: 0x1c6a, lo: 0x84, hi: 0x84}, + {value: 0x1cea, lo: 0x85, hi: 0x85}, + {value: 0x1d6a, lo: 0x86, hi: 0x86}, + {value: 0x1dea, lo: 0x87, hi: 0x87}, + {value: 0x1e6a, lo: 0x88, hi: 0x88}, + // Block 0x5b, offset 0x281 + {value: 0x0024, lo: 0x90, hi: 0x92}, + {value: 0x0034, lo: 0x94, hi: 0x99}, + {value: 0x0024, lo: 0x9a, hi: 0x9b}, + {value: 0x0034, lo: 0x9c, hi: 0x9f}, + {value: 0x0024, lo: 0xa0, hi: 0xa0}, + {value: 0x0010, lo: 0xa1, hi: 0xa1}, + {value: 0x0034, lo: 0xa2, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xb3}, + {value: 0x0024, lo: 0xb4, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb6}, + {value: 0x0024, lo: 0xb8, hi: 0xb9}, + // Block 0x5c, offset 0x28e + {value: 0x0012, lo: 0x80, hi: 0xab}, + {value: 0x0015, lo: 0xac, hi: 0xbf}, + // Block 0x5d, offset 0x290 + {value: 0x0015, lo: 0x80, hi: 0xaa}, + {value: 0x0012, lo: 0xab, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb8}, + {value: 0x8452, lo: 0xb9, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbc}, + {value: 0x8852, lo: 0xbd, hi: 0xbd}, + {value: 0x0012, lo: 0xbe, hi: 0xbf}, + // Block 0x5e, offset 0x297 + {value: 0x0012, lo: 0x80, hi: 0x9a}, + {value: 0x0015, lo: 0x9b, hi: 0xbf}, + // Block 0x5f, offset 0x299 + {value: 0x0024, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0024, lo: 0x83, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0024, lo: 0x8b, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x90}, + {value: 0x0024, lo: 0x91, hi: 0xb5}, + {value: 0x0024, lo: 0xbb, hi: 0xbb}, + {value: 0x0034, lo: 0xbc, hi: 0xbd}, + {value: 0x0024, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x60, offset 0x2a4 + {value: 0x0117, lo: 0x80, hi: 0xbf}, + // Block 0x61, offset 0x2a5 + {value: 0x0117, lo: 0x80, hi: 0x95}, + {value: 0x1f1a, lo: 0x96, hi: 0x96}, + {value: 0x1fca, lo: 0x97, hi: 0x97}, + {value: 0x207a, lo: 0x98, hi: 0x98}, + {value: 0x212a, lo: 0x99, hi: 0x99}, + {value: 0x21da, lo: 0x9a, hi: 0x9a}, + {value: 0x228a, lo: 0x9b, hi: 0x9b}, + {value: 0x0012, lo: 0x9c, hi: 0x9d}, + {value: 0x233b, lo: 0x9e, hi: 0x9e}, + {value: 0x0012, lo: 0x9f, hi: 0x9f}, + {value: 0x0117, lo: 0xa0, hi: 0xbf}, + // Block 0x62, offset 0x2b0 + {value: 0x0812, lo: 0x80, hi: 0x87}, + {value: 0x0813, lo: 0x88, hi: 0x8f}, + {value: 0x0812, lo: 0x90, hi: 0x95}, + {value: 0x0813, lo: 0x98, hi: 0x9d}, + {value: 0x0812, lo: 0xa0, hi: 0xa7}, + {value: 0x0813, lo: 0xa8, hi: 0xaf}, + {value: 0x0812, lo: 0xb0, hi: 0xb7}, + {value: 0x0813, lo: 0xb8, hi: 0xbf}, + // Block 0x63, offset 0x2b8 + {value: 0x0004, lo: 0x8b, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8f}, + {value: 0x0054, lo: 0x98, hi: 0x99}, + {value: 0x0054, lo: 0xa4, hi: 0xa4}, + {value: 0x0054, lo: 0xa7, hi: 0xa7}, + {value: 0x0014, lo: 0xaa, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xaf}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x64, offset 0x2c0 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x94, hi: 0x94}, + {value: 0x0014, lo: 0xa0, hi: 0xa4}, + {value: 0x0014, lo: 0xa6, hi: 0xaf}, + {value: 0x0015, lo: 0xb1, hi: 0xb1}, + {value: 0x0015, lo: 0xbf, hi: 0xbf}, + // Block 0x65, offset 0x2c6 + {value: 0x0015, lo: 0x90, hi: 0x9c}, + // Block 0x66, offset 0x2c7 + {value: 0x0024, lo: 0x90, hi: 0x91}, + {value: 0x0034, lo: 0x92, hi: 0x93}, + {value: 0x0024, lo: 0x94, hi: 0x97}, + {value: 0x0034, lo: 0x98, hi: 0x9a}, + {value: 0x0024, lo: 0x9b, hi: 0x9c}, + {value: 0x0014, lo: 0x9d, hi: 0xa0}, + {value: 0x0024, lo: 0xa1, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa4}, + {value: 0x0034, lo: 0xa5, hi: 0xa6}, + {value: 0x0024, lo: 0xa7, hi: 0xa7}, + {value: 0x0034, lo: 0xa8, hi: 0xa8}, + {value: 0x0024, lo: 0xa9, hi: 0xa9}, + {value: 0x0034, lo: 0xaa, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + // Block 0x67, offset 0x2d5 + {value: 0x0016, lo: 0x85, hi: 0x86}, + {value: 0x0012, lo: 0x87, hi: 0x89}, + {value: 0x9d52, lo: 0x8e, hi: 0x8e}, + {value: 0x1013, lo: 0xa0, hi: 0xaf}, + {value: 0x1012, lo: 0xb0, hi: 0xbf}, + // Block 0x68, offset 0x2da + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0716, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x88}, + // Block 0x69, offset 0x2dd + {value: 0xa053, lo: 0xb6, hi: 0xb7}, + {value: 0xa353, lo: 0xb8, hi: 0xb9}, + {value: 0xa653, lo: 0xba, hi: 0xbb}, + {value: 0xa353, lo: 0xbc, hi: 0xbd}, + {value: 0xa053, lo: 0xbe, hi: 0xbf}, + // Block 0x6a, offset 0x2e2 + {value: 0x3013, lo: 0x80, hi: 0x8f}, + {value: 0x6553, lo: 0x90, hi: 0x9f}, + {value: 0xa953, lo: 0xa0, hi: 0xae}, + {value: 0x3012, lo: 0xb0, hi: 0xbf}, + // Block 0x6b, offset 0x2e6 + {value: 0x0117, lo: 0x80, hi: 0xa3}, + {value: 0x0012, lo: 0xa4, hi: 0xa4}, + {value: 0x0716, lo: 0xab, hi: 0xac}, + {value: 0x0316, lo: 0xad, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xb3}, + // Block 0x6c, offset 0x2ec + {value: 0x6c52, lo: 0x80, hi: 0x9f}, + {value: 0x7052, lo: 0xa0, hi: 0xa5}, + {value: 0x7052, lo: 0xa7, hi: 0xa7}, + {value: 0x7052, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x6d, offset 0x2f1 + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0x6e, offset 0x2f4 + {value: 0x0010, lo: 0x80, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0010, lo: 0xb0, hi: 0xb6}, + {value: 0x0010, lo: 0xb8, hi: 0xbe}, + // Block 0x6f, offset 0x2f9 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x8e}, + {value: 0x0010, lo: 0x90, hi: 0x96}, + {value: 0x0010, lo: 0x98, hi: 0x9e}, + {value: 0x0024, lo: 0xa0, hi: 0xbf}, + // Block 0x70, offset 0x2fe + {value: 0x0014, lo: 0xaf, hi: 0xaf}, + // Block 0x71, offset 0x2ff + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0xaa, hi: 0xad}, + {value: 0x0030, lo: 0xae, hi: 0xaf}, + {value: 0x0004, lo: 0xb1, hi: 0xb5}, + {value: 0x0014, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + // Block 0x72, offset 0x305 + {value: 0x0034, lo: 0x99, hi: 0x9a}, + {value: 0x0004, lo: 0x9b, hi: 0x9e}, + // Block 0x73, offset 0x307 + {value: 0x0004, lo: 0xbc, hi: 0xbe}, + // Block 0x74, offset 0x308 + {value: 0x0010, lo: 0x85, hi: 0xad}, + {value: 0x0010, lo: 0xb1, hi: 0xbf}, + // Block 0x75, offset 0x30a + {value: 0x0010, lo: 0x80, hi: 0x8e}, + {value: 0x0010, lo: 0xa0, hi: 0xba}, + // Block 0x76, offset 0x30c + {value: 0x0010, lo: 0x80, hi: 0x94}, + {value: 0x0014, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0x96, hi: 0xbf}, + // Block 0x77, offset 0x30f + {value: 0x0010, lo: 0x80, hi: 0x8c}, + // Block 0x78, offset 0x310 + {value: 0x0010, lo: 0x90, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + // Block 0x79, offset 0x312 + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0xab}, + // Block 0x7a, offset 0x315 + {value: 0x0117, lo: 0x80, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xae}, + {value: 0x0024, lo: 0xaf, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb2}, + {value: 0x0024, lo: 0xb4, hi: 0xbd}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x7b, offset 0x31b + {value: 0x0117, lo: 0x80, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9d}, + {value: 0x0024, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0x7c, offset 0x31f + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb1}, + // Block 0x7d, offset 0x321 + {value: 0x0004, lo: 0x80, hi: 0x96}, + {value: 0x0014, lo: 0x97, hi: 0x9f}, + {value: 0x0004, lo: 0xa0, hi: 0xa1}, + {value: 0x0117, lo: 0xa2, hi: 0xaf}, + {value: 0x0012, lo: 0xb0, hi: 0xb1}, + {value: 0x0117, lo: 0xb2, hi: 0xbf}, + // Block 0x7e, offset 0x327 + {value: 0x0117, lo: 0x80, hi: 0xaf}, + {value: 0x0015, lo: 0xb0, hi: 0xb0}, + {value: 0x0012, lo: 0xb1, hi: 0xb8}, + {value: 0x0316, lo: 0xb9, hi: 0xba}, + {value: 0x0716, lo: 0xbb, hi: 0xbc}, + {value: 0x8453, lo: 0xbd, hi: 0xbd}, + {value: 0x0117, lo: 0xbe, hi: 0xbf}, + // Block 0x7f, offset 0x32e + {value: 0x0010, lo: 0xb7, hi: 0xb7}, + {value: 0x0015, lo: 0xb8, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbf}, + // Block 0x80, offset 0x332 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0014, lo: 0x82, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8b}, + {value: 0x0010, lo: 0x8c, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa6}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + // Block 0x81, offset 0x33b + {value: 0x0010, lo: 0x80, hi: 0xb3}, + // Block 0x82, offset 0x33c + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0034, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x85, hi: 0x85}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0024, lo: 0xa0, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb7}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x83, offset 0x344 + {value: 0x0010, lo: 0x80, hi: 0xa5}, + {value: 0x0014, lo: 0xa6, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x84, offset 0x348 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0014, lo: 0x87, hi: 0x91}, + {value: 0x0010, lo: 0x92, hi: 0x92}, + {value: 0x0030, lo: 0x93, hi: 0x93}, + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0x85, offset 0x34d + {value: 0x0014, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xb9}, + {value: 0x0010, lo: 0xba, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0x86, offset 0x355 + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0004, lo: 0xa6, hi: 0xa6}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x87, offset 0x35b + {value: 0x0010, lo: 0x80, hi: 0xa8}, + {value: 0x0014, lo: 0xa9, hi: 0xae}, + {value: 0x0010, lo: 0xaf, hi: 0xb0}, + {value: 0x0014, lo: 0xb1, hi: 0xb2}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0x88, offset 0x361 + {value: 0x0010, lo: 0x80, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x8b}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0010, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbd}, + // Block 0x89, offset 0x36b + {value: 0x0024, lo: 0xb0, hi: 0xb0}, + {value: 0x0024, lo: 0xb2, hi: 0xb3}, + {value: 0x0034, lo: 0xb4, hi: 0xb4}, + {value: 0x0024, lo: 0xb7, hi: 0xb8}, + {value: 0x0024, lo: 0xbe, hi: 0xbf}, + // Block 0x8a, offset 0x370 + {value: 0x0024, lo: 0x81, hi: 0x81}, + {value: 0x0004, lo: 0x9d, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xab}, + {value: 0x0014, lo: 0xac, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb2, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + // Block 0x8b, offset 0x379 + {value: 0x0010, lo: 0x81, hi: 0x86}, + {value: 0x0010, lo: 0x89, hi: 0x8e}, + {value: 0x0010, lo: 0x91, hi: 0x96}, + {value: 0x0010, lo: 0xa0, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0x8c, offset 0x37f + {value: 0x0012, lo: 0x80, hi: 0x92}, + {value: 0xac52, lo: 0x93, hi: 0x93}, + {value: 0x0012, lo: 0x94, hi: 0x9a}, + {value: 0x0004, lo: 0x9b, hi: 0x9b}, + {value: 0x0015, lo: 0x9c, hi: 0x9f}, + {value: 0x0012, lo: 0xa0, hi: 0xa5}, + {value: 0x74d2, lo: 0xb0, hi: 0xbf}, + // Block 0x8d, offset 0x386 + {value: 0x78d2, lo: 0x80, hi: 0x8f}, + {value: 0x7cd2, lo: 0x90, hi: 0x9f}, + {value: 0x80d2, lo: 0xa0, hi: 0xaf}, + {value: 0x7cd2, lo: 0xb0, hi: 0xbf}, + // Block 0x8e, offset 0x38a + {value: 0x0010, lo: 0x80, hi: 0xa4}, + {value: 0x0014, lo: 0xa5, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa7}, + {value: 0x0014, lo: 0xa8, hi: 0xa8}, + {value: 0x0010, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0034, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0x8f, offset 0x392 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0x90, offset 0x394 + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x8b, hi: 0xbb}, + // Block 0x91, offset 0x396 + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x86, hi: 0xbf}, + // Block 0x92, offset 0x399 + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0004, lo: 0xb2, hi: 0xbf}, + // Block 0x93, offset 0x39b + {value: 0x0004, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x93, hi: 0xbf}, + // Block 0x94, offset 0x39d + {value: 0x0010, lo: 0x80, hi: 0xbd}, + // Block 0x95, offset 0x39e + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0x96, offset 0x39f + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0xbf}, + // Block 0x97, offset 0x3a1 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0xb0, hi: 0xbb}, + // Block 0x98, offset 0x3a3 + {value: 0x0014, lo: 0x80, hi: 0x8f}, + {value: 0x0054, lo: 0x93, hi: 0x93}, + {value: 0x0024, lo: 0xa0, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xad}, + {value: 0x0024, lo: 0xae, hi: 0xaf}, + {value: 0x0010, lo: 0xb3, hi: 0xb4}, + // Block 0x99, offset 0x3a9 + {value: 0x0010, lo: 0x8d, hi: 0x8f}, + {value: 0x0054, lo: 0x92, hi: 0x92}, + {value: 0x0054, lo: 0x95, hi: 0x95}, + {value: 0x0010, lo: 0xb0, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0x9a, offset 0x3ae + {value: 0x0010, lo: 0x80, hi: 0xbc}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0x9b, offset 0x3b0 + {value: 0x0054, lo: 0x87, hi: 0x87}, + {value: 0x0054, lo: 0x8e, hi: 0x8e}, + {value: 0x0054, lo: 0x9a, hi: 0x9a}, + {value: 0x5f53, lo: 0xa1, hi: 0xba}, + {value: 0x0004, lo: 0xbe, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0x9c, offset 0x3b6 + {value: 0x0004, lo: 0x80, hi: 0x80}, + {value: 0x5f52, lo: 0x81, hi: 0x9a}, + {value: 0x0004, lo: 0xb0, hi: 0xb0}, + // Block 0x9d, offset 0x3b9 + {value: 0x0014, lo: 0x9e, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xbe}, + // Block 0x9e, offset 0x3bb + {value: 0x0010, lo: 0x82, hi: 0x87}, + {value: 0x0010, lo: 0x8a, hi: 0x8f}, + {value: 0x0010, lo: 0x92, hi: 0x97}, + {value: 0x0010, lo: 0x9a, hi: 0x9c}, + {value: 0x0004, lo: 0xa3, hi: 0xa3}, + {value: 0x0014, lo: 0xb9, hi: 0xbb}, + // Block 0x9f, offset 0x3c1 + {value: 0x0010, lo: 0x80, hi: 0x8b}, + {value: 0x0010, lo: 0x8d, hi: 0xa6}, + {value: 0x0010, lo: 0xa8, hi: 0xba}, + {value: 0x0010, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xa0, offset 0x3c6 + {value: 0x0010, lo: 0x80, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x9d}, + // Block 0xa1, offset 0x3c8 + {value: 0x0010, lo: 0x80, hi: 0xba}, + // Block 0xa2, offset 0x3c9 + {value: 0x0010, lo: 0x80, hi: 0xb4}, + // Block 0xa3, offset 0x3ca + {value: 0x0034, lo: 0xbd, hi: 0xbd}, + // Block 0xa4, offset 0x3cb + {value: 0x0010, lo: 0x80, hi: 0x9c}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa5, offset 0x3cd + {value: 0x0010, lo: 0x80, hi: 0x90}, + {value: 0x0034, lo: 0xa0, hi: 0xa0}, + // Block 0xa6, offset 0x3cf + {value: 0x0010, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xa7, offset 0x3d1 + {value: 0x0010, lo: 0x80, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0xb5}, + {value: 0x0024, lo: 0xb6, hi: 0xba}, + // Block 0xa8, offset 0x3d4 + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xbf}, + // Block 0xa9, offset 0x3d6 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x91, hi: 0x95}, + // Block 0xaa, offset 0x3d9 + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x97}, + {value: 0xaf53, lo: 0x98, hi: 0x9f}, + {value: 0xb253, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbf}, + // Block 0xab, offset 0x3e1 + {value: 0xaf52, lo: 0x80, hi: 0x87}, + {value: 0xb252, lo: 0x88, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0xbf}, + // Block 0xac, offset 0x3e4 + {value: 0x0010, lo: 0x80, hi: 0x9d}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0xb253, lo: 0xb0, hi: 0xb7}, + {value: 0xaf53, lo: 0xb8, hi: 0xbf}, + // Block 0xad, offset 0x3e8 + {value: 0x2813, lo: 0x80, hi: 0x87}, + {value: 0x3813, lo: 0x88, hi: 0x8f}, + {value: 0x2813, lo: 0x90, hi: 0x93}, + {value: 0xb252, lo: 0x98, hi: 0x9f}, + {value: 0xaf52, lo: 0xa0, hi: 0xa7}, + {value: 0x2812, lo: 0xa8, hi: 0xaf}, + {value: 0x3812, lo: 0xb0, hi: 0xb7}, + {value: 0x2812, lo: 0xb8, hi: 0xbb}, + // Block 0xae, offset 0x3f0 + {value: 0x0010, lo: 0x80, hi: 0xa7}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xaf, offset 0x3f2 + {value: 0x0010, lo: 0x80, hi: 0xa3}, + // Block 0xb0, offset 0x3f3 + {value: 0x0010, lo: 0x80, hi: 0xb6}, + // Block 0xb1, offset 0x3f4 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xa7}, + // Block 0xb2, offset 0x3f6 + {value: 0x0010, lo: 0x80, hi: 0x85}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xb5}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0010, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xb3, offset 0x3fc + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb6}, + // Block 0xb4, offset 0x3fe + {value: 0x0010, lo: 0x80, hi: 0x9e}, + // Block 0xb5, offset 0x3ff + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb5}, + // Block 0xb6, offset 0x401 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb9}, + // Block 0xb7, offset 0x403 + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0010, lo: 0xbe, hi: 0xbf}, + // Block 0xb8, offset 0x405 + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x83}, + {value: 0x0014, lo: 0x85, hi: 0x86}, + {value: 0x0014, lo: 0x8c, hi: 0x8c}, + {value: 0x0034, lo: 0x8d, hi: 0x8d}, + {value: 0x0014, lo: 0x8e, hi: 0x8e}, + {value: 0x0024, lo: 0x8f, hi: 0x8f}, + {value: 0x0010, lo: 0x90, hi: 0x93}, + {value: 0x0010, lo: 0x95, hi: 0x97}, + {value: 0x0010, lo: 0x99, hi: 0xb3}, + {value: 0x0024, lo: 0xb8, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xb9, offset 0x412 + {value: 0x0010, lo: 0xa0, hi: 0xbc}, + // Block 0xba, offset 0x413 + {value: 0x0010, lo: 0x80, hi: 0x9c}, + // Block 0xbb, offset 0x414 + {value: 0x0010, lo: 0x80, hi: 0x87}, + {value: 0x0010, lo: 0x89, hi: 0xa4}, + {value: 0x0024, lo: 0xa5, hi: 0xa5}, + {value: 0x0034, lo: 0xa6, hi: 0xa6}, + // Block 0xbc, offset 0x418 + {value: 0x0010, lo: 0x80, hi: 0x95}, + {value: 0x0010, lo: 0xa0, hi: 0xb2}, + // Block 0xbd, offset 0x41a + {value: 0x0010, lo: 0x80, hi: 0x91}, + // Block 0xbe, offset 0x41b + {value: 0x0010, lo: 0x80, hi: 0x88}, + // Block 0xbf, offset 0x41c + {value: 0x5653, lo: 0x80, hi: 0xb2}, + // Block 0xc0, offset 0x41d + {value: 0x5652, lo: 0x80, hi: 0xb2}, + // Block 0xc1, offset 0x41e + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xc2, offset 0x422 + {value: 0x0014, lo: 0x80, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0xa6, hi: 0xaf}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xc3, offset 0x426 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb6}, + {value: 0x0010, lo: 0xb7, hi: 0xb8}, + {value: 0x0034, lo: 0xb9, hi: 0xba}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + // Block 0xc4, offset 0x42c + {value: 0x0010, lo: 0x90, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xc5, offset 0x42e + {value: 0x0024, lo: 0x80, hi: 0x82}, + {value: 0x0010, lo: 0x83, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb4}, + {value: 0x0010, lo: 0xb6, hi: 0xbf}, + // Block 0xc6, offset 0x435 + {value: 0x0010, lo: 0x90, hi: 0xb2}, + {value: 0x0034, lo: 0xb3, hi: 0xb3}, + {value: 0x0010, lo: 0xb6, hi: 0xb6}, + // Block 0xc7, offset 0x438 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0xb5}, + {value: 0x0014, lo: 0xb6, hi: 0xbe}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xc8, offset 0x43c + {value: 0x0030, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0014, lo: 0x8b, hi: 0x8c}, + {value: 0x0010, lo: 0x90, hi: 0x9a}, + {value: 0x0010, lo: 0x9c, hi: 0x9c}, + // Block 0xc9, offset 0x442 + {value: 0x0010, lo: 0x80, hi: 0x91}, + {value: 0x0010, lo: 0x93, hi: 0xae}, + {value: 0x0014, lo: 0xaf, hi: 0xb1}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0014, lo: 0xb4, hi: 0xb4}, + {value: 0x0030, lo: 0xb5, hi: 0xb5}, + {value: 0x0034, lo: 0xb6, hi: 0xb6}, + {value: 0x0014, lo: 0xb7, hi: 0xb7}, + {value: 0x0014, lo: 0xbe, hi: 0xbe}, + // Block 0xca, offset 0x44b + {value: 0x0010, lo: 0x80, hi: 0x86}, + {value: 0x0010, lo: 0x88, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0x8d}, + {value: 0x0010, lo: 0x8f, hi: 0x9d}, + {value: 0x0010, lo: 0x9f, hi: 0xa8}, + {value: 0x0010, lo: 0xb0, hi: 0xbf}, + // Block 0xcb, offset 0x451 + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0014, lo: 0x9f, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa2}, + {value: 0x0014, lo: 0xa3, hi: 0xa8}, + {value: 0x0034, lo: 0xa9, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xcc, offset 0x457 + {value: 0x0014, lo: 0x80, hi: 0x81}, + {value: 0x0010, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x8c}, + {value: 0x0010, lo: 0x8f, hi: 0x90}, + {value: 0x0010, lo: 0x93, hi: 0xa8}, + {value: 0x0010, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb5, hi: 0xb9}, + {value: 0x0034, lo: 0xbc, hi: 0xbc}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xcd, offset 0x461 + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x84}, + {value: 0x0010, lo: 0x87, hi: 0x88}, + {value: 0x0010, lo: 0x8b, hi: 0x8c}, + {value: 0x0030, lo: 0x8d, hi: 0x8d}, + {value: 0x0010, lo: 0x90, hi: 0x90}, + {value: 0x0010, lo: 0x97, hi: 0x97}, + {value: 0x0010, lo: 0x9d, hi: 0xa3}, + {value: 0x0024, lo: 0xa6, hi: 0xac}, + {value: 0x0024, lo: 0xb0, hi: 0xb4}, + // Block 0xce, offset 0x46b + {value: 0x0010, lo: 0x80, hi: 0xb7}, + {value: 0x0014, lo: 0xb8, hi: 0xbf}, + // Block 0xcf, offset 0x46d + {value: 0x0010, lo: 0x80, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x82}, + {value: 0x0014, lo: 0x83, hi: 0x84}, + {value: 0x0010, lo: 0x85, hi: 0x85}, + {value: 0x0034, lo: 0x86, hi: 0x86}, + {value: 0x0010, lo: 0x87, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd0, offset 0x474 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xb8}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0014, lo: 0xba, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbe}, + {value: 0x0014, lo: 0xbf, hi: 0xbf}, + // Block 0xd1, offset 0x47a + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x81, hi: 0x81}, + {value: 0x0034, lo: 0x82, hi: 0x83}, + {value: 0x0010, lo: 0x84, hi: 0x85}, + {value: 0x0010, lo: 0x87, hi: 0x87}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd2, offset 0x480 + {value: 0x0010, lo: 0x80, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb5}, + {value: 0x0010, lo: 0xb8, hi: 0xbb}, + {value: 0x0014, lo: 0xbc, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd3, offset 0x486 + {value: 0x0034, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x98, hi: 0x9b}, + {value: 0x0014, lo: 0x9c, hi: 0x9d}, + // Block 0xd4, offset 0x489 + {value: 0x0010, lo: 0x80, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0010, lo: 0xbb, hi: 0xbc}, + {value: 0x0014, lo: 0xbd, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xd5, offset 0x48f + {value: 0x0014, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x84, hi: 0x84}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0xd6, offset 0x492 + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0014, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xac, hi: 0xac}, + {value: 0x0014, lo: 0xad, hi: 0xad}, + {value: 0x0010, lo: 0xae, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb5}, + {value: 0x0030, lo: 0xb6, hi: 0xb6}, + {value: 0x0034, lo: 0xb7, hi: 0xb7}, + // Block 0xd7, offset 0x49a + {value: 0x0010, lo: 0x80, hi: 0x89}, + // Block 0xd8, offset 0x49b + {value: 0x0014, lo: 0x9d, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa1}, + {value: 0x0014, lo: 0xa2, hi: 0xa5}, + {value: 0x0010, lo: 0xa6, hi: 0xa6}, + {value: 0x0014, lo: 0xa7, hi: 0xaa}, + {value: 0x0034, lo: 0xab, hi: 0xab}, + {value: 0x0010, lo: 0xb0, hi: 0xb9}, + // Block 0xd9, offset 0x4a2 + {value: 0x5f53, lo: 0xa0, hi: 0xbf}, + // Block 0xda, offset 0x4a3 + {value: 0x5f52, lo: 0x80, hi: 0x9f}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + {value: 0x0010, lo: 0xbf, hi: 0xbf}, + // Block 0xdb, offset 0x4a6 + {value: 0x0010, lo: 0x80, hi: 0xb8}, + // Block 0xdc, offset 0x4a7 + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x8a, hi: 0xaf}, + {value: 0x0014, lo: 0xb0, hi: 0xb6}, + {value: 0x0014, lo: 0xb8, hi: 0xbd}, + {value: 0x0010, lo: 0xbe, hi: 0xbe}, + {value: 0x0034, lo: 0xbf, hi: 0xbf}, + // Block 0xdd, offset 0x4ad + {value: 0x0010, lo: 0x80, hi: 0x80}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xb2, hi: 0xbf}, + // Block 0xde, offset 0x4b0 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + {value: 0x0014, lo: 0x92, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xa9}, + {value: 0x0014, lo: 0xaa, hi: 0xb0}, + {value: 0x0010, lo: 0xb1, hi: 0xb1}, + {value: 0x0014, lo: 0xb2, hi: 0xb3}, + {value: 0x0010, lo: 0xb4, hi: 0xb4}, + {value: 0x0014, lo: 0xb5, hi: 0xb6}, + // Block 0xdf, offset 0x4b8 + {value: 0x0010, lo: 0x80, hi: 0x99}, + // Block 0xe0, offset 0x4b9 + {value: 0x0010, lo: 0x80, hi: 0xae}, + // Block 0xe1, offset 0x4ba + {value: 0x0010, lo: 0x80, hi: 0x83}, + // Block 0xe2, offset 0x4bb + {value: 0x0010, lo: 0x80, hi: 0x86}, + // Block 0xe3, offset 0x4bc + {value: 0x0010, lo: 0x80, hi: 0x9e}, + {value: 0x0010, lo: 0xa0, hi: 0xa9}, + // Block 0xe4, offset 0x4be + {value: 0x0010, lo: 0x90, hi: 0xad}, + {value: 0x0034, lo: 0xb0, hi: 0xb4}, + // Block 0xe5, offset 0x4c0 + {value: 0x0010, lo: 0x80, hi: 0xaf}, + {value: 0x0024, lo: 0xb0, hi: 0xb6}, + // Block 0xe6, offset 0x4c2 + {value: 0x0014, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0010, lo: 0xa3, hi: 0xb7}, + {value: 0x0010, lo: 0xbd, hi: 0xbf}, + // Block 0xe7, offset 0x4c6 + {value: 0x0010, lo: 0x80, hi: 0x8f}, + // Block 0xe8, offset 0x4c7 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0010, lo: 0x90, hi: 0xbe}, + // Block 0xe9, offset 0x4c9 + {value: 0x0014, lo: 0x8f, hi: 0x9f}, + // Block 0xea, offset 0x4ca + {value: 0x0014, lo: 0xa0, hi: 0xa0}, + // Block 0xeb, offset 0x4cb + {value: 0x0010, lo: 0x80, hi: 0xaa}, + {value: 0x0010, lo: 0xb0, hi: 0xbc}, + // Block 0xec, offset 0x4cd + {value: 0x0010, lo: 0x80, hi: 0x88}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + {value: 0x0014, lo: 0x9d, hi: 0x9d}, + {value: 0x0034, lo: 0x9e, hi: 0x9e}, + {value: 0x0014, lo: 0xa0, hi: 0xa3}, + // Block 0xed, offset 0x4d2 + {value: 0x0030, lo: 0xa5, hi: 0xa6}, + {value: 0x0034, lo: 0xa7, hi: 0xa9}, + {value: 0x0030, lo: 0xad, hi: 0xb2}, + {value: 0x0014, lo: 0xb3, hi: 0xba}, + {value: 0x0034, lo: 0xbb, hi: 0xbf}, + // Block 0xee, offset 0x4d7 + {value: 0x0034, lo: 0x80, hi: 0x82}, + {value: 0x0024, lo: 0x85, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8b}, + {value: 0x0024, lo: 0xaa, hi: 0xad}, + // Block 0xef, offset 0x4db + {value: 0x0024, lo: 0x82, hi: 0x84}, + // Block 0xf0, offset 0x4dc + {value: 0x0013, lo: 0x80, hi: 0x99}, + {value: 0x0012, lo: 0x9a, hi: 0xb3}, + {value: 0x0013, lo: 0xb4, hi: 0xbf}, + // Block 0xf1, offset 0x4df + {value: 0x0013, lo: 0x80, hi: 0x8d}, + {value: 0x0012, lo: 0x8e, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0xa7}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0xf2, offset 0x4e3 + {value: 0x0013, lo: 0x80, hi: 0x81}, + {value: 0x0012, lo: 0x82, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0x9c}, + {value: 0x0013, lo: 0x9e, hi: 0x9f}, + {value: 0x0013, lo: 0xa2, hi: 0xa2}, + {value: 0x0013, lo: 0xa5, hi: 0xa6}, + {value: 0x0013, lo: 0xa9, hi: 0xac}, + {value: 0x0013, lo: 0xae, hi: 0xb5}, + {value: 0x0012, lo: 0xb6, hi: 0xb9}, + {value: 0x0012, lo: 0xbb, hi: 0xbb}, + {value: 0x0012, lo: 0xbd, hi: 0xbf}, + // Block 0xf3, offset 0x4ee + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0012, lo: 0x85, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0xf4, offset 0x4f2 + {value: 0x0012, lo: 0x80, hi: 0x83}, + {value: 0x0013, lo: 0x84, hi: 0x85}, + {value: 0x0013, lo: 0x87, hi: 0x8a}, + {value: 0x0013, lo: 0x8d, hi: 0x94}, + {value: 0x0013, lo: 0x96, hi: 0x9c}, + {value: 0x0012, lo: 0x9e, hi: 0xb7}, + {value: 0x0013, lo: 0xb8, hi: 0xb9}, + {value: 0x0013, lo: 0xbb, hi: 0xbe}, + // Block 0xf5, offset 0x4fa + {value: 0x0013, lo: 0x80, hi: 0x84}, + {value: 0x0013, lo: 0x86, hi: 0x86}, + {value: 0x0013, lo: 0x8a, hi: 0x90}, + {value: 0x0012, lo: 0x92, hi: 0xab}, + {value: 0x0013, lo: 0xac, hi: 0xbf}, + // Block 0xf6, offset 0x4ff + {value: 0x0013, lo: 0x80, hi: 0x85}, + {value: 0x0012, lo: 0x86, hi: 0x9f}, + {value: 0x0013, lo: 0xa0, hi: 0xb9}, + {value: 0x0012, lo: 0xba, hi: 0xbf}, + // Block 0xf7, offset 0x503 + {value: 0x0012, lo: 0x80, hi: 0x93}, + {value: 0x0013, lo: 0x94, hi: 0xad}, + {value: 0x0012, lo: 0xae, hi: 0xbf}, + // Block 0xf8, offset 0x506 + {value: 0x0012, lo: 0x80, hi: 0x87}, + {value: 0x0013, lo: 0x88, hi: 0xa1}, + {value: 0x0012, lo: 0xa2, hi: 0xbb}, + {value: 0x0013, lo: 0xbc, hi: 0xbf}, + // Block 0xf9, offset 0x50a + {value: 0x0013, lo: 0x80, hi: 0x95}, + {value: 0x0012, lo: 0x96, hi: 0xaf}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0xfa, offset 0x50d + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0012, lo: 0x8a, hi: 0xa5}, + {value: 0x0013, lo: 0xa8, hi: 0xbf}, + // Block 0xfb, offset 0x510 + {value: 0x0013, lo: 0x80, hi: 0x80}, + {value: 0x0012, lo: 0x82, hi: 0x9a}, + {value: 0x0012, lo: 0x9c, hi: 0xa1}, + {value: 0x0013, lo: 0xa2, hi: 0xba}, + {value: 0x0012, lo: 0xbc, hi: 0xbf}, + // Block 0xfc, offset 0x515 + {value: 0x0012, lo: 0x80, hi: 0x94}, + {value: 0x0012, lo: 0x96, hi: 0x9b}, + {value: 0x0013, lo: 0x9c, hi: 0xb4}, + {value: 0x0012, lo: 0xb6, hi: 0xbf}, + // Block 0xfd, offset 0x519 + {value: 0x0012, lo: 0x80, hi: 0x8e}, + {value: 0x0012, lo: 0x90, hi: 0x95}, + {value: 0x0013, lo: 0x96, hi: 0xae}, + {value: 0x0012, lo: 0xb0, hi: 0xbf}, + // Block 0xfe, offset 0x51d + {value: 0x0012, lo: 0x80, hi: 0x88}, + {value: 0x0012, lo: 0x8a, hi: 0x8f}, + {value: 0x0013, lo: 0x90, hi: 0xa8}, + {value: 0x0012, lo: 0xaa, hi: 0xbf}, + // Block 0xff, offset 0x521 + {value: 0x0012, lo: 0x80, hi: 0x82}, + {value: 0x0012, lo: 0x84, hi: 0x89}, + {value: 0x0017, lo: 0x8a, hi: 0x8b}, + {value: 0x0010, lo: 0x8e, hi: 0xbf}, + // Block 0x100, offset 0x525 + {value: 0x0014, lo: 0x80, hi: 0xb6}, + {value: 0x0014, lo: 0xbb, hi: 0xbf}, + // Block 0x101, offset 0x527 + {value: 0x0014, lo: 0x80, hi: 0xac}, + {value: 0x0014, lo: 0xb5, hi: 0xb5}, + // Block 0x102, offset 0x529 + {value: 0x0014, lo: 0x84, hi: 0x84}, + {value: 0x0014, lo: 0x9b, hi: 0x9f}, + {value: 0x0014, lo: 0xa1, hi: 0xaf}, + // Block 0x103, offset 0x52c + {value: 0x0024, lo: 0x80, hi: 0x86}, + {value: 0x0024, lo: 0x88, hi: 0x98}, + {value: 0x0024, lo: 0x9b, hi: 0xa1}, + {value: 0x0024, lo: 0xa3, hi: 0xa4}, + {value: 0x0024, lo: 0xa6, hi: 0xaa}, + // Block 0x104, offset 0x531 + {value: 0x0010, lo: 0x80, hi: 0x84}, + {value: 0x0034, lo: 0x90, hi: 0x96}, + // Block 0x105, offset 0x533 + {value: 0xb552, lo: 0x80, hi: 0x81}, + {value: 0xb852, lo: 0x82, hi: 0x83}, + {value: 0x0024, lo: 0x84, hi: 0x89}, + {value: 0x0034, lo: 0x8a, hi: 0x8a}, + {value: 0x0010, lo: 0x90, hi: 0x99}, + // Block 0x106, offset 0x538 + {value: 0x0010, lo: 0x80, hi: 0x83}, + {value: 0x0010, lo: 0x85, hi: 0x9f}, + {value: 0x0010, lo: 0xa1, hi: 0xa2}, + {value: 0x0010, lo: 0xa4, hi: 0xa4}, + {value: 0x0010, lo: 0xa7, hi: 0xa7}, + {value: 0x0010, lo: 0xa9, hi: 0xb2}, + {value: 0x0010, lo: 0xb4, hi: 0xb7}, + {value: 0x0010, lo: 0xb9, hi: 0xb9}, + {value: 0x0010, lo: 0xbb, hi: 0xbb}, + // Block 0x107, offset 0x541 + {value: 0x0010, lo: 0x80, hi: 0x89}, + {value: 0x0010, lo: 0x8b, hi: 0x9b}, + {value: 0x0010, lo: 0xa1, hi: 0xa3}, + {value: 0x0010, lo: 0xa5, hi: 0xa9}, + {value: 0x0010, lo: 0xab, hi: 0xbb}, + // Block 0x108, offset 0x546 + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x109, offset 0x547 + {value: 0x0013, lo: 0x80, hi: 0x89}, + {value: 0x0013, lo: 0x90, hi: 0xa9}, + {value: 0x0013, lo: 0xb0, hi: 0xbf}, + // Block 0x10a, offset 0x54a + {value: 0x0013, lo: 0x80, hi: 0x89}, + // Block 0x10b, offset 0x54b + {value: 0x0004, lo: 0xbb, hi: 0xbf}, + // Block 0x10c, offset 0x54c + {value: 0x0014, lo: 0x81, hi: 0x81}, + {value: 0x0014, lo: 0xa0, hi: 0xbf}, + // Block 0x10d, offset 0x54e + {value: 0x0014, lo: 0x80, hi: 0xbf}, + // Block 0x10e, offset 0x54f + {value: 0x0014, lo: 0x80, hi: 0xaf}, +} + +// Total table size 14027 bytes (13KiB); checksum: F17D40E8 diff --git a/vendor/golang.org/x/text/cases/trieval.go b/vendor/golang.org/x/text/cases/trieval.go new file mode 100644 index 000000000..99e039628 --- /dev/null +++ b/vendor/golang.org/x/text/cases/trieval.go @@ -0,0 +1,214 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +package cases + +// This file contains definitions for interpreting the trie value of the case +// trie generated by "go run gen*.go". It is shared by both the generator +// program and the resultant package. Sharing is achieved by the generator +// copying gen_trieval.go to trieval.go and changing what's above this comment. + +// info holds case information for a single rune. It is the value returned +// by a trie lookup. Most mapping information can be stored in a single 16-bit +// value. If not, for example when a rune is mapped to multiple runes, the value +// stores some basic case data and an index into an array with additional data. +// +// The per-rune values have the following format: +// +// if (exception) { +// 15..4 unsigned exception index +// } else { +// 15..8 XOR pattern or index to XOR pattern for case mapping +// Only 13..8 are used for XOR patterns. +// 7 inverseFold (fold to upper, not to lower) +// 6 index: interpret the XOR pattern as an index +// or isMid if case mode is cIgnorableUncased. +// 5..4 CCC: zero (normal or break), above or other +// } +// 3 exception: interpret this value as an exception index +// (TODO: is this bit necessary? Probably implied from case mode.) +// 2..0 case mode +// +// For the non-exceptional cases, a rune must be either uncased, lowercase or +// uppercase. If the rune is cased, the XOR pattern maps either a lowercase +// rune to uppercase or an uppercase rune to lowercase (applied to the 10 +// least-significant bits of the rune). +// +// See the definitions below for a more detailed description of the various +// bits. +type info uint16 + +const ( + casedMask = 0x0003 + fullCasedMask = 0x0007 + ignorableMask = 0x0006 + ignorableValue = 0x0004 + + inverseFoldBit = 1 << 7 + isMidBit = 1 << 6 + + exceptionBit = 1 << 3 + exceptionShift = 4 + numExceptionBits = 12 + + xorIndexBit = 1 << 6 + xorShift = 8 + + // There is no mapping if all xor bits and the exception bit are zero. + hasMappingMask = 0xff80 | exceptionBit +) + +// The case mode bits encodes the case type of a rune. This includes uncased, +// title, upper and lower case and case ignorable. (For a definition of these +// terms see Chapter 3 of The Unicode Standard Core Specification.) In some rare +// cases, a rune can be both cased and case-ignorable. This is encoded by +// cIgnorableCased. A rune of this type is always lower case. Some runes are +// cased while not having a mapping. +// +// A common pattern for scripts in the Unicode standard is for upper and lower +// case runes to alternate for increasing rune values (e.g. the accented Latin +// ranges starting from U+0100 and U+1E00 among others and some Cyrillic +// characters). We use this property by defining a cXORCase mode, where the case +// mode (always upper or lower case) is derived from the rune value. As the XOR +// pattern for case mappings is often identical for successive runes, using +// cXORCase can result in large series of identical trie values. This, in turn, +// allows us to better compress the trie blocks. +const ( + cUncased info = iota // 000 + cTitle // 001 + cLower // 010 + cUpper // 011 + cIgnorableUncased // 100 + cIgnorableCased // 101 // lower case if mappings exist + cXORCase // 11x // case is cLower | ((rune&1) ^ x) + + maxCaseMode = cUpper +) + +func (c info) isCased() bool { + return c&casedMask != 0 +} + +func (c info) isCaseIgnorable() bool { + return c&ignorableMask == ignorableValue +} + +func (c info) isNotCasedAndNotCaseIgnorable() bool { + return c&fullCasedMask == 0 +} + +func (c info) isCaseIgnorableAndNotCased() bool { + return c&fullCasedMask == cIgnorableUncased +} + +func (c info) isMid() bool { + return c&(fullCasedMask|isMidBit) == isMidBit|cIgnorableUncased +} + +// The case mapping implementation will need to know about various Canonical +// Combining Class (CCC) values. We encode two of these in the trie value: +// cccZero (0) and cccAbove (230). If the value is cccOther, it means that +// CCC(r) > 0, but not 230. A value of cccBreak means that CCC(r) == 0 and that +// the rune also has the break category Break (see below). +const ( + cccBreak info = iota << 4 + cccZero + cccAbove + cccOther + + cccMask = cccBreak | cccZero | cccAbove | cccOther +) + +const ( + starter = 0 + above = 230 + iotaSubscript = 240 +) + +// The exceptions slice holds data that does not fit in a normal info entry. +// The entry is pointed to by the exception index in an entry. It has the +// following format: +// +// Header +// byte 0: +// 7..6 unused +// 5..4 CCC type (same bits as entry) +// 3 unused +// 2..0 length of fold +// +// byte 1: +// 7..6 unused +// 5..3 length of 1st mapping of case type +// 2..0 length of 2nd mapping of case type +// +// case 1st 2nd +// lower -> upper, title +// upper -> lower, title +// title -> lower, upper +// +// Lengths with the value 0x7 indicate no value and implies no change. +// A length of 0 indicates a mapping to zero-length string. +// +// Body bytes: +// case folding bytes +// lowercase mapping bytes +// uppercase mapping bytes +// titlecase mapping bytes +// closure mapping bytes (for NFKC_Casefold). (TODO) +// +// Fallbacks: +// missing fold -> lower +// missing title -> upper +// all missing -> original rune +// +// exceptions starts with a dummy byte to enforce that there is no zero index +// value. +const ( + lengthMask = 0x07 + lengthBits = 3 + noChange = 0 +) + +// References to generated trie. + +var trie = newCaseTrie(0) + +var sparse = sparseBlocks{ + values: sparseValues[:], + offsets: sparseOffsets[:], +} + +// Sparse block lookup code. + +// valueRange is an entry in a sparse block. +type valueRange struct { + value uint16 + lo, hi byte +} + +type sparseBlocks struct { + values []valueRange + offsets []uint16 +} + +// lookup returns the value from values block n for byte b using binary search. +func (s *sparseBlocks) lookup(n uint32, b byte) uint16 { + lo := s.offsets[n] + hi := s.offsets[n+1] + for lo < hi { + m := lo + (hi-lo)/2 + r := s.values[m] + if r.lo <= b && b <= r.hi { + return r.value + } + if b < r.lo { + hi = m + } else { + lo = m + 1 + } + } + return 0 +} + +// lastRuneForTesting is the last rune used for testing. Everything after this +// is boring. +const lastRuneForTesting = rune(0x1FFFF) diff --git a/vendor/golang.org/x/text/internal/internal.go b/vendor/golang.org/x/text/internal/internal.go new file mode 100644 index 000000000..3cddbbdda --- /dev/null +++ b/vendor/golang.org/x/text/internal/internal.go @@ -0,0 +1,49 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package internal contains non-exported functionality that are used by +// packages in the text repository. +package internal // import "golang.org/x/text/internal" + +import ( + "sort" + + "golang.org/x/text/language" +) + +// SortTags sorts tags in place. +func SortTags(tags []language.Tag) { + sort.Sort(sorter(tags)) +} + +type sorter []language.Tag + +func (s sorter) Len() int { + return len(s) +} + +func (s sorter) Swap(i, j int) { + s[i], s[j] = s[j], s[i] +} + +func (s sorter) Less(i, j int) bool { + return s[i].String() < s[j].String() +} + +// UniqueTags sorts and filters duplicate tags in place and returns a slice with +// only unique tags. +func UniqueTags(tags []language.Tag) []language.Tag { + if len(tags) <= 1 { + return tags + } + SortTags(tags) + k := 0 + for i := 1; i < len(tags); i++ { + if tags[k].String() < tags[i].String() { + k++ + tags[k] = tags[i] + } + } + return tags[:k+1] +} diff --git a/vendor/golang.org/x/text/internal/match.go b/vendor/golang.org/x/text/internal/match.go new file mode 100644 index 000000000..1cc004a6d --- /dev/null +++ b/vendor/golang.org/x/text/internal/match.go @@ -0,0 +1,67 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package internal + +// This file contains matchers that implement CLDR inheritance. +// +// See https://unicode.org/reports/tr35/#Locale_Inheritance. +// +// Some of the inheritance described in this document is already handled by +// the cldr package. + +import ( + "golang.org/x/text/language" +) + +// TODO: consider if (some of the) matching algorithm needs to be public after +// getting some feel about what is generic and what is specific. + +// NewInheritanceMatcher returns a matcher that matches based on the inheritance +// chain. +// +// The matcher uses canonicalization and the parent relationship to find a +// match. The resulting match will always be either Und or a language with the +// same language and script as the requested language. It will not match +// languages for which there is understood to be mutual or one-directional +// intelligibility. +// +// A Match will indicate an Exact match if the language matches after +// canonicalization and High if the matched tag is a parent. +func NewInheritanceMatcher(t []language.Tag) *InheritanceMatcher { + tags := &InheritanceMatcher{make(map[language.Tag]int)} + for i, tag := range t { + ct, err := language.All.Canonicalize(tag) + if err != nil { + ct = tag + } + tags.index[ct] = i + } + return tags +} + +type InheritanceMatcher struct { + index map[language.Tag]int +} + +func (m InheritanceMatcher) Match(want ...language.Tag) (language.Tag, int, language.Confidence) { + for _, t := range want { + ct, err := language.All.Canonicalize(t) + if err != nil { + ct = t + } + conf := language.Exact + for { + if index, ok := m.index[ct]; ok { + return ct, index, conf + } + if ct == language.Und { + break + } + ct = ct.Parent() + conf = language.High + } + } + return language.Und, 0, language.No +} diff --git a/vendor/gopkg.in/ini.v1/.editorconfig b/vendor/gopkg.in/ini.v1/.editorconfig new file mode 100644 index 000000000..4a2d9180f --- /dev/null +++ b/vendor/gopkg.in/ini.v1/.editorconfig @@ -0,0 +1,12 @@ +# http://editorconfig.org + +root = true + +[*] +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*_test.go] +trim_trailing_whitespace = false diff --git a/vendor/gopkg.in/ini.v1/.gitignore b/vendor/gopkg.in/ini.v1/.gitignore index 12411127b..588388bda 100644 --- a/vendor/gopkg.in/ini.v1/.gitignore +++ b/vendor/gopkg.in/ini.v1/.gitignore @@ -4,3 +4,4 @@ ini.sublime-workspace testdata/conf_reflect.ini .idea /.vscode +.DS_Store diff --git a/vendor/gopkg.in/ini.v1/file.go b/vendor/gopkg.in/ini.v1/file.go index 7b4e560d1..9d91c31a6 100644 --- a/vendor/gopkg.in/ini.v1/file.go +++ b/vendor/gopkg.in/ini.v1/file.go @@ -442,16 +442,16 @@ func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) { kname = `"""` + kname + `"""` } - for _, val := range key.ValueWithShadows() { + writeKeyValue := func(val string) (bool, error) { if _, err := buf.WriteString(kname); err != nil { - return nil, err + return false, err } if key.isBooleanType { if kname != sec.keyList[len(sec.keyList)-1] { buf.WriteString(LineBreak) } - continue KeyList + return true, nil } // Write out alignment spaces before "=" sign @@ -468,10 +468,27 @@ func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) { val = `"` + val + `"` } if _, err := buf.WriteString(equalSign + val + LineBreak); err != nil { + return false, err + } + return false, nil + } + + shadows := key.ValueWithShadows() + if len(shadows) == 0 { + if _, err := writeKeyValue(""); err != nil { return nil, err } } + for _, val := range shadows { + exitLoop, err := writeKeyValue(val) + if err != nil { + return nil, err + } else if exitLoop { + continue KeyList + } + } + for _, val := range key.nestedValues { if _, err := buf.WriteString(indent + " " + val + LineBreak); err != nil { return nil, err diff --git a/vendor/gopkg.in/ini.v1/key.go b/vendor/gopkg.in/ini.v1/key.go index 0302c291b..a19d9f38e 100644 --- a/vendor/gopkg.in/ini.v1/key.go +++ b/vendor/gopkg.in/ini.v1/key.go @@ -110,15 +110,24 @@ func (k *Key) Value() string { return k.value } -// ValueWithShadows returns raw values of key and its shadows if any. +// ValueWithShadows returns raw values of key and its shadows if any. Shadow +// keys with empty values are ignored from the returned list. func (k *Key) ValueWithShadows() []string { if len(k.shadows) == 0 { + if k.value == "" { + return []string{} + } return []string{k.value} } - vals := make([]string, len(k.shadows)+1) - vals[0] = k.value - for i := range k.shadows { - vals[i+1] = k.shadows[i].value + + vals := make([]string, 0, len(k.shadows)+1) + if k.value != "" { + vals = append(vals, k.value) + } + for _, s := range k.shadows { + if s.value != "" { + vals = append(vals, s.value) + } } return vals } diff --git a/vendor/honnef.co/go/tools/analysis/code/code.go b/vendor/honnef.co/go/tools/analysis/code/code.go index 27b01c4ec..3480152f7 100644 --- a/vendor/honnef.co/go/tools/analysis/code/code.go +++ b/vendor/honnef.co/go/tools/analysis/code/code.go @@ -13,7 +13,9 @@ import ( "honnef.co/go/tools/analysis/facts" "honnef.co/go/tools/go/ast/astutil" "honnef.co/go/tools/go/types/typeutil" + "honnef.co/go/tools/pattern" + "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" ) @@ -73,7 +75,11 @@ func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string { } panic(fmt.Sprintf("unsupported selector: %v", expr)) } - return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) + if v, ok := sel.Obj().(*types.Var); ok && v.IsField() { + return fmt.Sprintf("(%s).%s", typeutil.DereferenceR(sel.Recv()), sel.Obj().Name()) + } else { + return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) + } } func IsNil(pass *analysis.Pass, expr ast.Expr) bool { @@ -132,7 +138,19 @@ func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) { } func CallName(pass *analysis.Pass, call *ast.CallExpr) string { - switch fun := astutil.Unparen(call.Fun).(type) { + fun := astutil.Unparen(call.Fun) + + // Instantiating a function cannot return another generic function, so doing this once is enough + switch idx := fun.(type) { + case *ast.IndexExpr: + fun = idx.X + case *typeparams.IndexListExpr: + fun = idx.X + } + + // (foo)[T] is not a valid instantiationg, so no need to unparen again. + + switch fun := fun.(type) { case *ast.SelectorExpr: fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func) if !ok { @@ -257,6 +275,18 @@ func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity facts.PurityR return false case *ast.IndexExpr: return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Index, purity) + case *typeparams.IndexListExpr: + // In theory, none of the checks are necessary, as IndexListExpr only involves types. But there is no harm in + // being safe. + if MayHaveSideEffects(pass, expr.X, purity) { + return true + } + for _, idx := range expr.Indices { + if MayHaveSideEffects(pass, idx, purity) { + return true + } + } + return false case *ast.KeyValueExpr: return MayHaveSideEffects(pass, expr.Key, purity) || MayHaveSideEffects(pass, expr.Value, purity) case *ast.SelectorExpr: @@ -274,7 +304,7 @@ func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity facts.PurityR if MayHaveSideEffects(pass, expr.X, purity) { return true } - return expr.Op == token.ARROW + return expr.Op == token.ARROW || expr.Op == token.AND case *ast.ParenExpr: return MayHaveSideEffects(pass, expr.X, purity) case nil: @@ -292,3 +322,21 @@ func IsGoVersion(pass *analysis.Pass, minor int) bool { version := f.Get().(int) return version >= minor } + +var integerLiteralQ = pattern.MustParse(`(IntegerLiteral tv)`) + +func IntegerLiteral(pass *analysis.Pass, node ast.Node) (types.TypeAndValue, bool) { + m, ok := Match(pass, integerLiteralQ, node) + if !ok { + return types.TypeAndValue{}, false + } + return m.State["tv"].(types.TypeAndValue), true +} + +func IsIntegerLiteral(pass *analysis.Pass, node ast.Node, value constant.Value) bool { + tv, ok := IntegerLiteral(pass, node) + if !ok { + return false + } + return constant.Compare(tv.Value, token.EQL, value) +} diff --git a/vendor/honnef.co/go/tools/analysis/edit/edit.go b/vendor/honnef.co/go/tools/analysis/edit/edit.go index 90bc5f8cc..b118bdc02 100644 --- a/vendor/honnef.co/go/tools/analysis/edit/edit.go +++ b/vendor/honnef.co/go/tools/analysis/edit/edit.go @@ -1,3 +1,4 @@ +// Package edit contains helpers for creating suggested fixes. package edit import ( @@ -10,17 +11,21 @@ import ( "honnef.co/go/tools/pattern" ) +// Ranger describes values that have a start and end position. +// In most cases these are either ast.Node or manually constructed ranges. type Ranger interface { Pos() token.Pos End() token.Pos } +// Range implements the Ranger interface. type Range [2]token.Pos func (r Range) Pos() token.Pos { return r[0] } func (r Range) End() token.Pos { return r[1] } -func ReplaceWithString(fset *token.FileSet, old Ranger, new string) analysis.TextEdit { +// ReplaceWithString replaces a range with a string. +func ReplaceWithString(old Ranger, new string) analysis.TextEdit { return analysis.TextEdit{ Pos: old.Pos(), End: old.End(), @@ -28,6 +33,7 @@ func ReplaceWithString(fset *token.FileSet, old Ranger, new string) analysis.Tex } } +// ReplaceWithNode replaces a range with an AST node. func ReplaceWithNode(fset *token.FileSet, old Ranger, new ast.Node) analysis.TextEdit { buf := &bytes.Buffer{} if err := format.Node(buf, fset, new); err != nil { @@ -40,17 +46,19 @@ func ReplaceWithNode(fset *token.FileSet, old Ranger, new ast.Node) analysis.Tex } } -func ReplaceWithPattern(pass *analysis.Pass, after pattern.Pattern, state pattern.State, node Ranger) analysis.TextEdit { - r := pattern.NodeToAST(after.Root, state) +// ReplaceWithPattern replaces a range with the result of executing a pattern. +func ReplaceWithPattern(fset *token.FileSet, old Ranger, new pattern.Pattern, state pattern.State) analysis.TextEdit { + r := pattern.NodeToAST(new.Root, state) buf := &bytes.Buffer{} - format.Node(buf, pass.Fset, r) + format.Node(buf, fset, r) return analysis.TextEdit{ - Pos: node.Pos(), - End: node.End(), + Pos: old.Pos(), + End: old.End(), NewText: buf.Bytes(), } } +// Delete deletes a range of code. func Delete(old Ranger) analysis.TextEdit { return analysis.TextEdit{ Pos: old.Pos(), @@ -66,6 +74,7 @@ func Fix(msg string, edits ...analysis.TextEdit) analysis.SuggestedFix { } } +// Selector creates a new selector expression. func Selector(x, sel string) *ast.SelectorExpr { return &ast.SelectorExpr{ X: &ast.Ident{Name: x}, diff --git a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go index b7386352e..eb4c1bab4 100644 --- a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go +++ b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go @@ -133,7 +133,7 @@ func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{ case *ir.Convert: return mightReturnNil(v.X) case *ir.SliceToArrayPointer: - if v.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Array).Len() == 0 { + if typeutil.CoreType(v.Type()).(*types.Pointer).Elem().Underlying().(*types.Array).Len() == 0 { return mightReturnNil(v.X) } else { // converting a slice to an array pointer of length > 0 panics if the slice is nil @@ -161,7 +161,7 @@ func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{ } else { return nilly } - case *ir.TypeAssert, *ir.Next, *ir.Select, *ir.MapLookup, *ir.TypeSwitch, *ir.Recv: + case *ir.TypeAssert, *ir.Next, *ir.Select, *ir.MapLookup, *ir.TypeSwitch, *ir.Recv, *ir.Sigma: // we don't need to look at the Extract's index // because we've already checked its type. return nilly @@ -226,7 +226,9 @@ func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{ return onlyGlobal } return nilly - case *ir.TypeAssert, *ir.ChangeInterface, *ir.Field, *ir.Const, *ir.Index, *ir.MapLookup, *ir.Parameter, *ir.Recv, *ir.TypeSwitch: + case *ir.AggregateConst: + return neverNil + case *ir.TypeAssert, *ir.ChangeInterface, *ir.Field, *ir.Const, *ir.GenericConst, *ir.Index, *ir.MapLookup, *ir.Parameter, *ir.Recv, *ir.TypeSwitch: return nilly default: panic(fmt.Sprintf("internal error: unhandled type %T", v)) diff --git a/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go b/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go index 5e5644711..3bbe20603 100644 --- a/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go +++ b/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go @@ -10,6 +10,7 @@ import ( "honnef.co/go/tools/go/ir/irutil" "honnef.co/go/tools/internal/passes/buildir" + "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" ) @@ -211,6 +212,15 @@ func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{ } return true case *ir.MakeInterface: + terms, err := typeparams.NormalTerms(v.X.Type()) + if len(terms) == 0 || err != nil { + // Type is a type parameter with no type terms (or we couldn't determine the terms). Such a type + // _can_ be nil when put in an interface value. + // + // There is no instruction that can create a guaranteed non-nil instance of a type parameter without + // type constraints, so we return false right away, without checking v.X's typedness. + return false + } return true case *ir.TypeAssert: // type assertions fail for untyped nils. Either we have a @@ -232,7 +242,8 @@ func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{ ret := fn.Exit.Control().(*ir.Return) for i, v := range ret.Results { - if _, ok := fn.Signature.Results().At(i).Type().Underlying().(*types.Interface); ok { + typ := fn.Signature.Results().At(i).Type() + if _, ok := typ.Underlying().(*types.Interface); ok && !typeparams.IsTypeParam(typ) { if do(v, map[ir.Value]struct{}{}) { out |= 1 << i } diff --git a/vendor/honnef.co/go/tools/analysis/lint/lint.go b/vendor/honnef.co/go/tools/analysis/lint/lint.go index fdc4cb5dc..b4e37d6f4 100644 --- a/vendor/honnef.co/go/tools/analysis/lint/lint.go +++ b/vendor/honnef.co/go/tools/analysis/lint/lint.go @@ -1,4 +1,5 @@ // Package lint provides abstractions on top of go/analysis. +// These abstractions add extra information to analyzes, such as structured documentation and severities. package lint import ( @@ -13,6 +14,7 @@ import ( "golang.org/x/tools/go/analysis" ) +// Analyzer wraps a go/analysis.Analyzer and provides structured documentation. type Analyzer struct { // The analyzer's documentation. Unlike go/analysis.Analyzer.Doc, // this field is structured, providing access to severity, options @@ -30,6 +32,8 @@ func (a *Analyzer) initialize() { } } +// InitializeAnalyzers takes a map of documentation and a map of go/analysis.Analyzers and returns a slice of Analyzers. +// The map keys are the analyzer names. func InitializeAnalyzers(docs map[string]*Documentation, analyzers map[string]*analysis.Analyzer) []*Analyzer { out := make([]*Analyzer, 0, len(analyzers)) for k, v := range analyzers { @@ -44,6 +48,7 @@ func InitializeAnalyzers(docs map[string]*Documentation, analyzers map[string]*a return out } +// Severity describes the severity of diagnostics reported by an analyzer. type Severity int const ( @@ -55,57 +60,136 @@ const ( SeverityHint ) -type Documentation struct { +// MergeStrategy sets how merge mode should behave for diagnostics of an analyzer. +type MergeStrategy int + +const ( + MergeIfAny MergeStrategy = iota + MergeIfAll +) + +type RawDocumentation struct { Title string Text string + Before string + After string + Since string + NonDefault bool + Options []string + Severity Severity + MergeIf MergeStrategy +} + +type Documentation struct { + Title string + Text string + + TitleMarkdown string + TextMarkdown string + + Before string + After string Since string NonDefault bool Options []string Severity Severity + MergeIf MergeStrategy } -func Markdownify(m map[string]*Documentation) map[string]*Documentation { - for _, v := range m { - v.Title = toMarkdown(v.Title) - v.Text = toMarkdown(v.Text) +func Markdownify(m map[string]*RawDocumentation) map[string]*Documentation { + out := make(map[string]*Documentation, len(m)) + for k, v := range m { + out[k] = &Documentation{ + Title: strings.TrimSpace(stripMarkdown(v.Title)), + Text: strings.TrimSpace(stripMarkdown(v.Text)), + + TitleMarkdown: strings.TrimSpace(toMarkdown(v.Title)), + TextMarkdown: strings.TrimSpace(toMarkdown(v.Text)), + + Before: strings.TrimSpace(v.Before), + After: strings.TrimSpace(v.After), + Since: v.Since, + NonDefault: v.NonDefault, + Options: v.Options, + Severity: v.Severity, + MergeIf: v.MergeIf, + } } - return m + return out } func toMarkdown(s string) string { - return strings.ReplaceAll(s, `\'`, "`") + return strings.NewReplacer(`\'`, "`", `\"`, "`").Replace(s) } -func (doc *Documentation) String() string { - if doc == nil { - return "Error: No documentation." - } +func stripMarkdown(s string) string { + return strings.NewReplacer(`\'`, "", `\"`, "'").Replace(s) +} + +func (doc *Documentation) Format(metadata bool) string { + return doc.format(false, metadata) +} + +func (doc *Documentation) FormatMarkdown(metadata bool) string { + return doc.format(true, metadata) +} +func (doc *Documentation) format(markdown bool, metadata bool) string { b := &strings.Builder{} - fmt.Fprintf(b, "%s\n\n", doc.Title) - if doc.Text != "" { - fmt.Fprintf(b, "%s\n\n", doc.Text) - } - fmt.Fprint(b, "Available since\n ") - if doc.Since == "" { - fmt.Fprint(b, "unreleased") + if markdown { + fmt.Fprintf(b, "%s\n\n", doc.TitleMarkdown) + if doc.Text != "" { + fmt.Fprintf(b, "%s\n\n", doc.TextMarkdown) + } } else { - fmt.Fprintf(b, "%s", doc.Since) + fmt.Fprintf(b, "%s\n\n", doc.Title) + if doc.Text != "" { + fmt.Fprintf(b, "%s\n\n", doc.Text) + } } - if doc.NonDefault { - fmt.Fprint(b, ", non-default") + + if doc.Before != "" { + fmt.Fprintln(b, "Before:") + fmt.Fprintln(b, "") + for _, line := range strings.Split(doc.Before, "\n") { + fmt.Fprint(b, " ", line, "\n") + } + fmt.Fprintln(b, "") + fmt.Fprintln(b, "After:") + fmt.Fprintln(b, "") + for _, line := range strings.Split(doc.After, "\n") { + fmt.Fprint(b, " ", line, "\n") + } + fmt.Fprintln(b, "") } - fmt.Fprint(b, "\n") - if len(doc.Options) > 0 { - fmt.Fprintf(b, "\nOptions\n") - for _, opt := range doc.Options { - fmt.Fprintf(b, " %s", opt) + + if metadata { + fmt.Fprint(b, "Available since\n ") + if doc.Since == "" { + fmt.Fprint(b, "unreleased") + } else { + fmt.Fprintf(b, "%s", doc.Since) + } + if doc.NonDefault { + fmt.Fprint(b, ", non-default") } fmt.Fprint(b, "\n") + if len(doc.Options) > 0 { + fmt.Fprintf(b, "\nOptions\n") + for _, opt := range doc.Options { + fmt.Fprintf(b, " %s", opt) + } + fmt.Fprint(b, "\n") + } } + return b.String() } +func (doc *Documentation) String() string { + return doc.Format(true) +} + func newVersionFlag() flag.Getter { tags := build.Default.ReleaseTags v := tags[len(tags)-1][2:] @@ -169,6 +253,7 @@ func parseDirective(s string) (cmd string, args []string) { return fields[0], fields[1:] } +// ParseDirectives extracts all directives from a list of Go files. func ParseDirectives(files []*ast.File, fset *token.FileSet) []Directive { var dirs []Directive for _, f := range files { diff --git a/vendor/honnef.co/go/tools/config/config.go b/vendor/honnef.co/go/tools/config/config.go index 14de76f4c..8d9f084cf 100644 --- a/vendor/honnef.co/go/tools/config/config.go +++ b/vendor/honnef.co/go/tools/config/config.go @@ -156,8 +156,14 @@ func (c Config) String() string { return buf.String() } +// DefaultConfig is the default configuration. +// Its initial value describes the majority of the default configuration, +// but the Checks field can be updated at runtime based on the analyzers being used, to disable non-default checks. +// For cmd/staticcheck, this is handled by (*lintcmd.Command).Run. +// +// Note that DefaultConfig shouldn't be modified while analyzers are executing. var DefaultConfig = Config{ - Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022", "-ST1023"}, + Checks: []string{"all"}, Initialisms: []string{ "ACL", "API", "ASCII", "CPU", "CSS", "DNS", "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", @@ -167,12 +173,21 @@ var DefaultConfig = Config{ "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", "XSS", "SIP", "RTP", "AMQP", "DB", "TS", }, - DotImportWhitelist: []string{}, + DotImportWhitelist: []string{ + "github.com/mmcloughlin/avo/build", + "github.com/mmcloughlin/avo/operand", + "github.com/mmcloughlin/avo/reg", + }, HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"}, } const ConfigName = "staticcheck.conf" +type ParseError struct { + Filename string + toml.ParseError +} + func parseConfigs(dir string) ([]Config, error) { var out []Config @@ -194,6 +209,12 @@ func parseConfigs(dir string) ([]Config, error) { _, err = toml.DecodeReader(f, &cfg) f.Close() if err != nil { + if err, ok := err.(toml.ParseError); ok { + return nil, ParseError{ + Filename: filepath.Join(dir, ConfigName), + ParseError: err, + } + } return nil, err } out = append(out, cfg) diff --git a/vendor/honnef.co/go/tools/config/example.conf b/vendor/honnef.co/go/tools/config/example.conf index 106da5bcb..b25c3a815 100644 --- a/vendor/honnef.co/go/tools/config/example.conf +++ b/vendor/honnef.co/go/tools/config/example.conf @@ -6,5 +6,9 @@ initialisms = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", "UDP", "UI", "GID", "UID", "UUID", "URI", "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", "XSS", "SIP", "RTP", "AMQP", "DB", "TS"] -dot_import_whitelist = [] +dot_import_whitelist = [ + "github.com/mmcloughlin/avo/build", + "github.com/mmcloughlin/avo/operand", + "github.com/mmcloughlin/avo/reg", +] http_status_code_whitelist = ["200", "400", "404", "500"] diff --git a/vendor/honnef.co/go/tools/go/ast/astutil/util.go b/vendor/honnef.co/go/tools/go/ast/astutil/util.go index fac33780d..176bcde1d 100644 --- a/vendor/honnef.co/go/tools/go/ast/astutil/util.go +++ b/vendor/honnef.co/go/tools/go/ast/astutil/util.go @@ -6,6 +6,8 @@ import ( "go/token" "reflect" "strings" + + "golang.org/x/exp/typeparams" ) func IsIdent(expr ast.Expr, ident string) bool { @@ -20,6 +22,7 @@ func IsBlank(id ast.Expr) bool { return ident != nil && ident.Name == "_" } +// Deprecated: use code.IsIntegerLiteral instead. func IsIntLiteral(expr ast.Expr, literal string) bool { lit, ok := expr.(*ast.BasicLit) return ok && lit.Kind == token.INT && lit.Value == literal @@ -77,103 +80,154 @@ func Unparen(e ast.Expr) ast.Expr { } } -func CopyExpr(node ast.Expr) ast.Expr { +// CopyExpr creates a deep copy of an expression. +// It doesn't support copying FuncLits and returns ok == false when encountering one. +func CopyExpr(node ast.Expr) (ast.Expr, bool) { switch node := node.(type) { case *ast.BasicLit: cp := *node - return &cp + return &cp, true case *ast.BinaryExpr: cp := *node - cp.X = CopyExpr(cp.X) - cp.Y = CopyExpr(cp.Y) - return &cp + var ok1, ok2 bool + cp.X, ok1 = CopyExpr(cp.X) + cp.Y, ok2 = CopyExpr(cp.Y) + return &cp, ok1 && ok2 case *ast.CallExpr: + var ok bool cp := *node - cp.Fun = CopyExpr(cp.Fun) + cp.Fun, ok = CopyExpr(cp.Fun) + if !ok { + return nil, false + } cp.Args = make([]ast.Expr, len(node.Args)) for i, v := range node.Args { - cp.Args[i] = CopyExpr(v) + cp.Args[i], ok = CopyExpr(v) + if !ok { + return nil, false + } } - return &cp + return &cp, true case *ast.CompositeLit: + var ok bool cp := *node - cp.Type = CopyExpr(cp.Type) + cp.Type, ok = CopyExpr(cp.Type) + if !ok { + return nil, false + } cp.Elts = make([]ast.Expr, len(node.Elts)) for i, v := range node.Elts { - cp.Elts[i] = CopyExpr(v) + cp.Elts[i], ok = CopyExpr(v) + if !ok { + return nil, false + } } - return &cp + return &cp, true case *ast.Ident: cp := *node - return &cp + return &cp, true case *ast.IndexExpr: + var ok1, ok2 bool cp := *node - cp.X = CopyExpr(cp.X) - cp.Index = CopyExpr(cp.Index) - return &cp + cp.X, ok1 = CopyExpr(cp.X) + cp.Index, ok2 = CopyExpr(cp.Index) + return &cp, ok1 && ok2 + case *typeparams.IndexListExpr: + var ok bool + cp := *node + cp.X, ok = CopyExpr(cp.X) + if !ok { + return nil, false + } + for i, v := range node.Indices { + cp.Indices[i], ok = CopyExpr(v) + if !ok { + return nil, false + } + } + return &cp, true case *ast.KeyValueExpr: + var ok1, ok2 bool cp := *node - cp.Key = CopyExpr(cp.Key) - cp.Value = CopyExpr(cp.Value) - return &cp + cp.Key, ok1 = CopyExpr(cp.Key) + cp.Value, ok2 = CopyExpr(cp.Value) + return &cp, ok1 && ok2 case *ast.ParenExpr: + var ok bool cp := *node - cp.X = CopyExpr(cp.X) - return &cp + cp.X, ok = CopyExpr(cp.X) + return &cp, ok case *ast.SelectorExpr: + var ok bool cp := *node - cp.X = CopyExpr(cp.X) - cp.Sel = CopyExpr(cp.Sel).(*ast.Ident) - return &cp + cp.X, ok = CopyExpr(cp.X) + if !ok { + return nil, false + } + sel, ok := CopyExpr(cp.Sel) + if !ok { + // this is impossible + return nil, false + } + cp.Sel = sel.(*ast.Ident) + return &cp, true case *ast.SliceExpr: + var ok1, ok2, ok3, ok4 bool cp := *node - cp.X = CopyExpr(cp.X) - cp.Low = CopyExpr(cp.Low) - cp.High = CopyExpr(cp.High) - cp.Max = CopyExpr(cp.Max) - return &cp + cp.X, ok1 = CopyExpr(cp.X) + cp.Low, ok2 = CopyExpr(cp.Low) + cp.High, ok3 = CopyExpr(cp.High) + cp.Max, ok4 = CopyExpr(cp.Max) + return &cp, ok1 && ok2 && ok3 && ok4 case *ast.StarExpr: + var ok bool cp := *node - cp.X = CopyExpr(cp.X) - return &cp + cp.X, ok = CopyExpr(cp.X) + return &cp, ok case *ast.TypeAssertExpr: + var ok1, ok2 bool cp := *node - cp.X = CopyExpr(cp.X) - cp.Type = CopyExpr(cp.Type) - return &cp + cp.X, ok1 = CopyExpr(cp.X) + cp.Type, ok2 = CopyExpr(cp.Type) + return &cp, ok1 && ok2 case *ast.UnaryExpr: + var ok bool cp := *node - cp.X = CopyExpr(cp.X) - return &cp + cp.X, ok = CopyExpr(cp.X) + return &cp, ok case *ast.MapType: + var ok1, ok2 bool cp := *node - cp.Key = CopyExpr(cp.Key) - cp.Value = CopyExpr(cp.Value) - return &cp + cp.Key, ok1 = CopyExpr(cp.Key) + cp.Value, ok2 = CopyExpr(cp.Value) + return &cp, ok1 && ok2 case *ast.ArrayType: + var ok1, ok2 bool cp := *node - cp.Len = CopyExpr(cp.Len) - cp.Elt = CopyExpr(cp.Elt) - return &cp + cp.Len, ok1 = CopyExpr(cp.Len) + cp.Elt, ok2 = CopyExpr(cp.Elt) + return &cp, ok1 && ok2 case *ast.Ellipsis: + var ok bool cp := *node - cp.Elt = CopyExpr(cp.Elt) - return &cp + cp.Elt, ok = CopyExpr(cp.Elt) + return &cp, ok case *ast.InterfaceType: cp := *node - return &cp + return &cp, true case *ast.StructType: cp := *node - return &cp - case *ast.FuncLit: - // TODO(dh): implement copying of function literals. - return nil + return &cp, true + case *ast.FuncLit, *ast.FuncType: + // TODO(dh): implement copying of function literals and types. + return nil, false case *ast.ChanType: + var ok bool cp := *node - cp.Value = CopyExpr(cp.Value) - return &cp + cp.Value, ok = CopyExpr(cp.Value) + return &cp, ok case nil: - return nil + return nil, true default: panic(fmt.Sprintf("unreachable: %T", node)) } @@ -226,6 +280,17 @@ func Equal(a, b ast.Node) bool { case *ast.IndexExpr: b := b.(*ast.IndexExpr) return Equal(a.X, b.X) && Equal(a.Index, b.Index) + case *typeparams.IndexListExpr: + b := b.(*typeparams.IndexListExpr) + if len(a.Indices) != len(b.Indices) { + return false + } + for i, v := range a.Indices { + if !Equal(v, b.Indices[i]) { + return false + } + } + return Equal(a.X, b.X) case *ast.KeyValueExpr: b := b.(*ast.KeyValueExpr) return Equal(a.Key, b.Key) && Equal(a.Value, b.Value) diff --git a/vendor/honnef.co/go/tools/go/ir/builder.go b/vendor/honnef.co/go/tools/go/ir/builder.go index 0cc1bedcd..7cef5f1e1 100644 --- a/vendor/honnef.co/go/tools/go/ir/builder.go +++ b/vendor/honnef.co/go/tools/go/ir/builder.go @@ -31,14 +31,12 @@ import ( "go/token" "go/types" "os" -) -type opaqueType struct { - types.Type - name string -} + "honnef.co/go/tools/analysis/lint" + "honnef.co/go/tools/go/types/typeutil" -func (t *opaqueType) String() string { return t.name } + "golang.org/x/exp/typeparams" +) var ( varOk = newVar("ok", tBool) @@ -46,12 +44,10 @@ var ( // Type constants. tBool = types.Typ[types.Bool] - tByte = types.Typ[types.Byte] tInt = types.Typ[types.Int] tInvalid = types.Typ[types.Invalid] tString = types.Typ[types.String] tUntypedNil = types.Typ[types.UntypedNil] - tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators tEface = types.NewInterfaceType(nil, nil).Complete() ) @@ -182,7 +178,7 @@ func (b *builder) exprN(fn *Function, e ast.Expr) Value { return fn.emit(&c, e) case *ast.IndexExpr: - mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + mapt := typeutil.CoreType(fn.Pkg.typeOf(e.X)).Underlying().(*types.Map) lookup := &MapLookup{ X: b.expr(fn, e.X), Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e), @@ -211,7 +207,15 @@ func (b *builder) exprN(fn *Function, e ast.Expr) Value { func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, source ast.Node) Value { switch obj.Name() { case "make": - switch typ.Underlying().(type) { + styp := typ.Underlying() + if _, ok := typ.Underlying().(*types.Interface); ok { + // This must be a type parameter with a core type. + // Set styp to the core type and generate instructions based on it. + assert(typeparams.IsTypeParam(typ)) + styp = typeutil.CoreType(typ) + assert(styp != nil) + } + switch styp.(type) { case *types.Slice: n := b.expr(fn, args[1]) m := n @@ -221,7 +225,7 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ if m, ok := m.(*Const); ok { // treat make([]T, n, m) as new([m]T)[:n] cap := m.Int64() - at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) + at := types.NewArray(styp.Underlying().(*types.Slice).Elem(), cap) alloc := emitNew(fn, at, source) v := &Slice{ X: alloc, @@ -254,6 +258,9 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ v := &MakeChan{Size: sz} v.setType(typ) return fn.emit(v, source) + + default: + lint.ExhaustiveTypeSwitch(typ.Underlying()) } case "new": @@ -261,11 +268,15 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ return alloc case "len", "cap": - // Special case: len or cap of an array or *array is - // based on the type, not the value which may be nil. - // We must still evaluate the value, though. (If it - // was side-effect free, the whole call would have - // been constant-folded.) + // Special case: len or cap of an array or *array is based on the type, not the value which may be nil. We must + // still evaluate the value, though. (If it was side-effect free, the whole call would have been + // constant-folded.) + // + // For example, for len(gen()), we need to evaluate gen() for its side-effects, but don't need the returned + // value to determine the length of the array, which is constant. + // + // This never applies to type parameters. Even if the constraint has a structural type, len/cap on a type + // parameter aren't constant. t := deref(fn.Pkg.typeOf(args[0])).Underlying() if at, ok := t.(*types.Array); ok { b.expr(fn, args[0]) // for effects only @@ -307,7 +318,7 @@ func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ // - a[:] iff a is an array (not *array) // - references to variables in lexically enclosing functions. // -func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { +func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) (RET lvalue) { switch e := e.(type) { case *ast.Ident: if isBlankIdent(e) { @@ -356,28 +367,58 @@ func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { case *ast.IndexExpr: var x Value var et types.Type - switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { - case *types.Array: + xt := fn.Pkg.typeOf(e.X) + + // Indexing doesn't need a core type, it only requires all types to be similar enough. For example, []int64 | + // [5]int64 can be indexed. The element types do have to match though. + + terms, err := typeparams.NormalTerms(xt) + if err != nil { + panic(fmt.Sprintf("unexpected error: %s", err)) + } + isArrayLike := func() (types.Type, bool) { + for _, term := range terms { + arr, ok := term.Type().Underlying().(*types.Array) + if ok { + return arr.Elem(), true + } + } + return nil, false + } + + isSliceLike := func() (types.Type, bool) { + for _, term := range terms { + switch t := term.Type().Underlying().(type) { + case *types.Slice: + return t.Elem(), true + case *types.Pointer: + return t.Elem().Underlying().(*types.Array).Elem(), true + } + } + return nil, false + } + + if elem, ok := isArrayLike(); ok { + // array x = b.addr(fn, e.X, escaping).address(fn) - et = types.NewPointer(t.Elem()) - case *types.Pointer: // *array - x = b.expr(fn, e.X) - et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) - case *types.Slice: + et = types.NewPointer(elem) + } else if elem, ok := isSliceLike(); ok { + // slice or *array x = b.expr(fn, e.X) - et = types.NewPointer(t.Elem()) - case *types.Map: + et = types.NewPointer(elem) + } else if t, ok := typeutil.CoreType(xt).Underlying().(*types.Map); ok { return &element{ m: b.expr(fn, e.X), k: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index), t: t.Elem(), } - default: + } else { panic("unexpected container type in IndexExpr: " + t.String()) } + v := &IndexAddr{ X: x, - Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index), + Index: b.expr(fn, e.Index), } v.setType(et) return &address{addr: fn.emit(v, e), expr: e} @@ -393,17 +434,28 @@ type store struct { lhs lvalue rhs Value source ast.Node + + // if debugRef is set no other fields will be set + debugRef *DebugRef } type storebuf struct{ stores []store } func (sb *storebuf) store(lhs lvalue, rhs Value, source ast.Node) { - sb.stores = append(sb.stores, store{lhs, rhs, source}) + sb.stores = append(sb.stores, store{lhs, rhs, source, nil}) +} + +func (sb *storebuf) storeDebugRef(ref *DebugRef) { + sb.stores = append(sb.stores, store{debugRef: ref}) } func (sb *storebuf) emit(fn *Function) { for _, s := range sb.stores { - s.lhs.store(fn, s.rhs, s.source) + if s.debugRef == nil { + s.lhs.store(fn, s.rhs, s.source) + } else { + fn.emit(s.debugRef, nil) + } } } @@ -429,6 +481,13 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * // an &-operation is implied. if _, ok := loc.(blank); !ok { // avoid calling blank.typ() if isPointer(loc.typ()) { + // Example input that hits this code: + // + // type S1 struct{ X int } + // x := []*S1{ + // {1}, // <-- & is implied + // } + // _ = x ptr := b.addr(fn, e, true).address(fn) // copy address if sb != nil { @@ -441,7 +500,7 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * } if _, ok := loc.(*address); ok { - if isInterface(loc.typ()) { + if isInterface(loc.typ()) && !typeparams.IsTypeParam(loc.typ()) { // e.g. var x interface{} = T{...} // Can't in-place initialize an interface value. // Fall back to copying. @@ -458,9 +517,16 @@ func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb * // Subtle: emit debug ref for aggregate types only; // slice and map are handled by store ops in compLit. - switch loc.typ().Underlying().(type) { + switch typeutil.CoreType(loc.typ()).Underlying().(type) { case *types.Struct, *types.Array: - emitDebugRef(fn, e, addr, true) + if sb != nil { + // Make sure we don't emit DebugRefs before the store has actually occured + if ref := makeDebugRef(fn, e, addr, true); ref != nil { + sb.storeDebugRef(ref) + } + } else { + emitDebugRef(fn, e, addr, true) + } } return @@ -605,7 +671,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { case *ast.SliceExpr: var low, high, max Value var x Value - switch fn.Pkg.typeOf(e.X).Underlying().(type) { + switch typeutil.CoreType(fn.Pkg.typeOf(e.X)).Underlying().(type) { case *types.Array: // Potentially escaping. x = b.addr(fn, e.X, true).address(fn) @@ -646,6 +712,11 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { if _, ok := obj.(*types.Var); ok { return emitLoad(fn, v, e) // var (address) } + instances := typeparams.GetInstances(fn.Pkg.info) + if instance, ok := instances[e]; ok { + // Instantiated generic function + return makeInstance(fn.Prog, v.(*Function), instance.Type.(*types.Signature), instance.TypeArgs) + } return v // (func) } // Local var. @@ -701,43 +772,68 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { panic("unexpected expression-relative selector") case *ast.IndexExpr: - switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { - case *types.Array: - // Non-addressable array (in a register). - v := &Index{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index), + // IndexExpr might either be an actual indexing operation, or an instantiation + xt := fn.Pkg.typeOf(e.X) + + terms, err := typeparams.NormalTerms(xt) + if err != nil { + panic(fmt.Sprintf("unexpected error: %s", err)) + } + isNonAddressableIndexable := func() (types.Type, bool) { + for _, term := range terms { + switch t := term.Type().Underlying().(type) { + case *types.Array: + return t.Elem(), true + case *types.Basic: + // a string + return types.Universe.Lookup("byte").Type(), true + } } - v.setType(t.Elem()) - return fn.emit(v, e) + return nil, false + } - case *types.Map: - // Maps are not addressable. - mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) - v := &MapLookup{ - X: b.expr(fn, e.X), - Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e.Index), + isAddressableIndexable := func() (types.Type, bool) { + for _, term := range terms { + switch t := term.Type().Underlying().(type) { + case *types.Slice: + return t.Elem(), true + case *types.Pointer: + return t.Elem().Underlying().(*types.Array).Elem(), true + } } - v.setType(mapt.Elem()) - return fn.emit(v, e) + return nil, false + } - case *types.Basic: // => string - // Strings are not addressable. - v := &StringLookup{ + if elem, ok := isNonAddressableIndexable(); ok { + // At least one of the types is non-addressable + v := &Index{ X: b.expr(fn, e.X), Index: b.expr(fn, e.Index), } - v.setType(tByte) + v.setType(elem) return fn.emit(v, e) - - case *types.Slice, *types.Pointer: // *array - // Addressable slice/array; use IndexAddr and Load. + } else if _, ok := isAddressableIndexable(); ok { + // All types are addressable (otherwise the previous branch would've fired) return b.addr(fn, e, false).load(fn, e) - - default: + } else if t, ok := typeutil.CoreType(xt).Underlying().(*types.Map); ok { + // Maps are not addressable. + v := &MapLookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index), + } + v.setType(t.Elem()) + return fn.emit(v, e) + } else if _, ok := xt.Underlying().(*types.Signature); ok { + // Instantiating a generic function + return b.expr(fn, e.X) + } else { panic("unexpected container type in IndexExpr: " + t.String()) } + case *typeparams.IndexListExpr: + // Instantiating a generic function + return b.expr(fn, e.X) + case *ast.CompositeLit, *ast.StarExpr: // Addressable types (lvalues) return b.addr(fn, e, false).load(fn, e) @@ -796,10 +892,15 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { v := b.receiver(fn, selector.X, wantAddr, escaping, sel, selector) if isInterface(recv) { // Invoke-mode call. + + // Methods in interfaces cannot have their own type parameters, so we needn't do anything for type + // parameters. c.Value = v c.Method = obj } else { // "Call"-mode call. + + // declaredFunc takes care of creating wrappers for functions with type parameters. c.Value = fn.Prog.declaredFunc(obj) c.Args = append(c.Args, v) } @@ -836,8 +937,9 @@ func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { // - apply implicit field selections. // - use MethodVal logic to populate fields of c. } - // Evaluate the function operand in the usual way. + // + // Code in expr takes care of creating wrappers for functions with type parameters. c.Value = b.expr(fn, e.Fun) } @@ -921,7 +1023,7 @@ func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { b.setCallFunc(fn, e, c) // Then append the other actual parameters. - sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) + sig, _ := typeutil.CoreType(fn.Pkg.typeOf(e.Fun)).(*types.Signature) if sig == nil { panic(fmt.Sprintf("no signature for call of %s", e.Fun)) } @@ -1057,7 +1159,7 @@ func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { // func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { typ := deref(fn.Pkg.typeOf(e)) - switch t := typ.Underlying().(type) { + switch t := typeutil.CoreType(typ).(type) { case *types.Struct: if !isZero && len(e.Elts) != t.NumFields() { // memclear @@ -1115,7 +1217,7 @@ func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero if idx != nil { idxval = idx.Int64() + 1 } - idx = emitConst(fn, intConst(idxval)) + idx = emitConst(fn, intConst(idxval)).(*Const) } iaddr := &IndexAddr{ X: array, @@ -1599,7 +1701,7 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no Dir: types.SendOnly, Chan: ch, Send: emitConv(fn, b.expr(fn, comm.Value), - ch.Type().Underlying().(*types.Chan).Elem(), comm), + typeutil.CoreType(ch.Type()).Underlying().(*types.Chan).Elem(), comm), Pos: comm.Arrow, } if debugInfo { @@ -1642,7 +1744,7 @@ func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (no vars = append(vars, varIndex, varOk) for _, st := range states { if st.Dir == types.RecvOnly { - tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() + tElem := typeutil.CoreType(st.Chan.Type()).Underlying().(*types.Chan).Elem() vars = append(vars, anonVar(tElem)) } } @@ -1806,23 +1908,28 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast. // jump loop // done: (target of break) + // We store in an Alloc and load it on each iteration so that lifting produces the necessary σ nodes + xAlloc := newVariable(fn, x.Type(), source) + xAlloc.store(x) + // Determine number of iterations. - var length Value - if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { - // For array or *array, the number of iterations is - // known statically thanks to the type. We avoid a - // data dependence upon x, permitting later dead-code - // elimination if x is pure, static unrolling, etc. - // Ranging over a nil *array may have >0 iterations. - // We still generate code for x, in case it has effects. - length = emitConst(fn, intConst(arr.Len())) + // + // We store the length in an Alloc and load it on each iteration so that lifting produces the necessary σ nodes + length := newVariable(fn, tInt, source) + if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok && !typeparams.IsTypeParam(x.Type()) { + // For array or *array, the number of iterations is known statically thanks to the type. We avoid a data + // dependence upon x, permitting later dead-code elimination if x is pure, static unrolling, etc. Ranging over a + // nil *array may have >0 iterations. We still generate code for x, in case it has effects. + // + // This intentionally misses type parameters with core types, because their length isn't technically constant. + length.store(emitConst(fn, intConst(arr.Len()))) } else { // length = len(x). var c Call c.Call.Value = makeLen(x.Type()) c.Call.Args = []Value{x} c.setType(tInt) - length = fn.emit(&c, source) + length.store(fn.emit(&c, source)) } index := fn.addLocal(tInt, source) @@ -1842,12 +1949,13 @@ func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast. body := fn.newBasicBlock("rangeindex.body") done = fn.newBasicBlock("rangeindex.done") - emitIf(fn, emitCompare(fn, token.LSS, incr, length, source), body, done, source) + emitIf(fn, emitCompare(fn, token.LSS, incr, length.load(), source), body, done, source) fn.currentBlock = body k = emitLoad(fn, index, source) if tv != nil { - switch t := x.Type().Underlying().(type) { + x := xAlloc.load() + switch t := typeutil.CoreType(x.Type()).Underlying().(type) { case *types.Array: instr := &Index{ X: x, @@ -1907,36 +2015,40 @@ func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast } rng := &Range{X: x} - rng.setType(tRangeIter) - it := fn.emit(rng, source) + rng.setType(typeutil.NewIterator(types.NewTuple( + varOk, + newVar("k", tk), + newVar("v", tv), + ))) + it := newVariable(fn, rng.typ, source) + it.store(fn.emit(rng, source)) loop = fn.newBasicBlock("rangeiter.loop") emitJump(fn, loop, source) fn.currentBlock = loop - _, isString := x.Type().Underlying().(*types.Basic) + // Go doesn't currently allow ranging over string|[]byte, so isString is decidable. + _, isString := typeutil.CoreType(x.Type()).Underlying().(*types.Basic) - okv := &Next{ - Iter: it, + okvInstr := &Next{ + Iter: it.load(), IsString: isString, } - okv.setType(types.NewTuple( - varOk, - newVar("k", tk), - newVar("v", tv), - )) - fn.emit(okv, source) + okvInstr.setType(rng.typ.(*typeutil.Iterator).Elem()) + fn.emit(okvInstr, source) + okv := newVariable(fn, okvInstr.Type(), source) + okv.store(okvInstr) body := fn.newBasicBlock("rangeiter.body") done = fn.newBasicBlock("rangeiter.done") - emitIf(fn, emitExtract(fn, okv, 0, source), body, done, source) + emitIf(fn, emitExtract(fn, okv.load(), 0, source), body, done, source) fn.currentBlock = body if tk != tInvalid { - k = emitExtract(fn, okv, 1, source) + k = emitExtract(fn, okv.load(), 1, source) } if tv != tInvalid { - v = emitExtract(fn, okv, 2, source) + v = emitExtract(fn, okv.load(), 2, source) } return } @@ -1962,17 +2074,47 @@ func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Nod loop = fn.newBasicBlock("rangechan.loop") emitJump(fn, loop, source) fn.currentBlock = loop - retv := emitRecv(fn, x, true, types.NewTuple(newVar("k", x.Type().Underlying().(*types.Chan).Elem()), varOk), source) + + recv := emitRecv(fn, x, true, types.NewTuple(newVar("k", typeutil.CoreType(x.Type()).Underlying().(*types.Chan).Elem()), varOk), source) + retv := newVariable(fn, recv.Type(), source) + retv.store(recv) + body := fn.newBasicBlock("rangechan.body") done = fn.newBasicBlock("rangechan.done") - emitIf(fn, emitExtract(fn, retv, 1, source), body, done, source) + emitIf(fn, emitExtract(fn, retv.load(), 1, source), body, done, source) fn.currentBlock = body if tk != nil { - k = emitExtract(fn, retv, 0, source) + k = emitExtract(fn, retv.load(), 0, source) } return } +type variable struct { + alloc *Alloc + fn *Function + source ast.Node +} + +func newVariable(fn *Function, typ types.Type, source ast.Node) *variable { + alloc := &Alloc{} + alloc.setType(types.NewPointer(typ)) + fn.emit(alloc, source) + fn.Locals = append(fn.Locals, alloc) + return &variable{ + alloc: alloc, + fn: fn, + source: source, + } +} + +func (v *variable) store(sv Value) { + emitStore(v.fn, v.alloc, sv, v.source) +} + +func (v *variable) load() Value { + return emitLoad(v.fn, v.alloc, v.source) +} + // rangeStmt emits to fn code for the range statement s, optionally // labelled by label. // @@ -2004,7 +2146,7 @@ func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, sourc var k, v Value var loop, done *BasicBlock - switch rt := x.Type().Underlying().(type) { + switch rt := typeutil.CoreType(x.Type()).Underlying().(type) { case *types.Slice, *types.Array, *types.Pointer: // *array k, v, loop, done = b.rangeIndexed(fn, x, tv, source) @@ -2085,7 +2227,7 @@ start: instr := &Send{ Chan: b.expr(fn, s.Chan), X: emitConv(fn, b.expr(fn, s.Value), - fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem(), s), + typeutil.CoreType(fn.Pkg.typeOf(s.Chan)).Underlying().(*types.Chan).Elem(), s), } fn.emit(instr, s) diff --git a/vendor/honnef.co/go/tools/go/ir/const.go b/vendor/honnef.co/go/tools/go/ir/const.go index 7cdf006e8..9dd7e83b9 100644 --- a/vendor/honnef.co/go/tools/go/ir/const.go +++ b/vendor/honnef.co/go/tools/go/ir/const.go @@ -11,6 +11,10 @@ import ( "go/constant" "go/types" "strconv" + "strings" + + "golang.org/x/exp/typeparams" + "honnef.co/go/tools/go/types/typeutil" ) // NewConst returns a new constant of the specified value and type. @@ -43,35 +47,85 @@ func stringConst(s string) *Const { return NewConst(constant.MakeString(s), tString) } -// zeroConst returns a new "zero" constant of the specified type, -// which must not be an array or struct type: the zero values of -// aggregates are well-defined but cannot be represented by Const. -// -func zeroConst(t types.Type) *Const { - switch t := t.(type) { - case *types.Basic: - switch { - case t.Info()&types.IsBoolean != 0: - return NewConst(constant.MakeBool(false), t) - case t.Info()&types.IsNumeric != 0: - return NewConst(constant.MakeInt64(0), t) - case t.Info()&types.IsString != 0: - return NewConst(constant.MakeString(""), t) - case t.Kind() == types.UnsafePointer: - fallthrough - case t.Kind() == types.UntypedNil: - return nilConst(t) +// zeroConst returns a new "zero" constant of the specified type. +func zeroConst(t types.Type) Constant { + if _, ok := t.Underlying().(*types.Interface); ok && !typeparams.IsTypeParam(t) { + // Handle non-generic interface early to simplify following code. + return nilConst(t) + } + + tset := typeutil.NewTypeSet(t) + + switch typ := tset.CoreType().(type) { + case *types.Struct: + values := make([]Constant, typ.NumFields()) + for i := 0; i < typ.NumFields(); i++ { + values[i] = zeroConst(typ.Field(i).Type()) + } + return &AggregateConst{ + register: register{typ: t}, + Values: values, + } + case *types.Tuple: + values := make([]Constant, typ.Len()) + for i := 0; i < typ.Len(); i++ { + values[i] = zeroConst(typ.At(i).Type()) + } + return &AggregateConst{ + register: register{typ: t}, + Values: values, + } + } + + isNillable := func(term *typeparams.Term) bool { + switch typ := term.Type().Underlying().(type) { + case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature, *typeutil.Iterator: + return true + case *types.Basic: + switch typ.Kind() { + case types.UnsafePointer, types.UntypedNil: + return true + default: + return false + } default: - panic(fmt.Sprint("zeroConst for unexpected type:", t)) + return false } - case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: + } + + isInfo := func(info types.BasicInfo) func(*typeparams.Term) bool { + return func(term *typeparams.Term) bool { + basic, ok := term.Type().Underlying().(*types.Basic) + if !ok { + return false + } + return (basic.Info() & info) != 0 + } + } + + isArray := func(term *typeparams.Term) bool { + _, ok := term.Type().Underlying().(*types.Array) + return ok + } + + switch { + case tset.All(isInfo(types.IsNumeric)): + return NewConst(constant.MakeInt64(0), t) + case tset.All(isInfo(types.IsString)): + return NewConst(constant.MakeString(""), t) + case tset.All(isInfo(types.IsBoolean)): + return NewConst(constant.MakeBool(false), t) + case tset.All(isNillable): return nilConst(t) - case *types.Named: - return NewConst(zeroConst(t.Underlying()).Value, t) - case *types.Array, *types.Struct, *types.Tuple: - panic(fmt.Sprint("zeroConst applied to aggregate:", t)) + case tset.All(isArray): + var k ArrayConst + k.setType(t) + return &k + default: + var k GenericConst + k.setType(t) + return &k } - panic(fmt.Sprint("zeroConst: unexpected ", t)) } func (c *Const) RelString(from *types.Package) string { @@ -96,6 +150,38 @@ func (c *Const) String() string { return c.RelString(c.Parent().pkg()) } +func (v *ArrayConst) RelString(pkg *types.Package) string { + return fmt.Sprintf("ArrayConst <%s>", relType(v.Type(), pkg)) +} + +func (v *ArrayConst) String() string { + return v.RelString(v.Parent().pkg()) +} + +func (v *AggregateConst) RelString(pkg *types.Package) string { + values := make([]string, len(v.Values)) + for i, v := range v.Values { + if v != nil { + values[i] = v.RelString(pkg) + } else { + values[i] = "nil" + } + } + return fmt.Sprintf("AggregateConst <%s> (%s)", relType(v.Type(), pkg), strings.Join(values, ", ")) +} + +func (v *GenericConst) RelString(pkg *types.Package) string { + return fmt.Sprintf("GenericConst <%s>", relType(v.Type(), pkg)) +} + +func (v *GenericConst) String() string { + return v.RelString(v.Parent().pkg()) +} + +func (v *AggregateConst) String() string { + return v.RelString(v.Parent().pkg()) +} + // IsNil returns true if this constant represents a typed or untyped nil value. func (c *Const) IsNil() bool { return c.Value == nil @@ -151,3 +237,39 @@ func (c *Const) Complex128() complex128 { im, _ := constant.Float64Val(constant.Imag(c.Value)) return complex(re, im) } + +func (c *Const) equal(o Constant) bool { + // TODO(dh): don't use == for types, this will miss identical pointer types, among others + oc, ok := o.(*Const) + if !ok { + return false + } + return c.typ == oc.typ && c.Value == oc.Value +} + +func (c *AggregateConst) equal(o Constant) bool { + oc, ok := o.(*AggregateConst) + if !ok { + return false + } + // TODO(dh): don't use == for types, this will miss identical pointer types, among others + return c.typ == oc.typ +} + +func (c *ArrayConst) equal(o Constant) bool { + oc, ok := o.(*ArrayConst) + if !ok { + return false + } + // TODO(dh): don't use == for types, this will miss identical pointer types, among others + return c.typ == oc.typ +} + +func (c *GenericConst) equal(o Constant) bool { + oc, ok := o.(*GenericConst) + if !ok { + return false + } + // TODO(dh): don't use == for types, this will miss identical pointer types, among others + return c.typ == oc.typ +} diff --git a/vendor/honnef.co/go/tools/go/ir/emit.go b/vendor/honnef.co/go/tools/go/ir/emit.go index 49a9778d3..7b23041f3 100644 --- a/vendor/honnef.co/go/tools/go/ir/emit.go +++ b/vendor/honnef.co/go/tools/go/ir/emit.go @@ -12,6 +12,10 @@ import ( "go/constant" "go/token" "go/types" + + "honnef.co/go/tools/go/types/typeutil" + + "golang.org/x/exp/typeparams" ) // emitNew emits to f a new (heap Alloc) instruction allocating an @@ -47,8 +51,16 @@ func emitRecv(f *Function, ch Value, commaOk bool, typ types.Type, source ast.No // expression e with value v. // func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { + ref := makeDebugRef(f, e, v, isAddr) + if ref == nil { + return + } + f.emit(ref, nil) +} + +func makeDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) *DebugRef { if !f.debugInfo() { - return // debugging not enabled + return nil // debugging not enabled } if v == nil || e == nil { panic("nil") @@ -57,20 +69,20 @@ func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { e = unparen(e) if id, ok := e.(*ast.Ident); ok { if isBlankIdent(id) { - return + return nil } obj = f.Pkg.objectOf(id) switch obj.(type) { case *types.Nil, *types.Const, *types.Builtin: - return + return nil } } - f.emit(&DebugRef{ + return &DebugRef{ X: v, Expr: e, IsAddr: isAddr, object: obj, - }, nil) + } } // emitArith emits to f code to compute the binary operation op(x, y) @@ -83,9 +95,14 @@ func emitArith(f *Function, op token.Token, x, y Value, t types.Type, source ast case token.SHL, token.SHR: x = emitConv(f, x, t, source) // y may be signed or an 'untyped' constant. - // TODO(adonovan): whence signed values? - if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 { - y = emitConv(f, y, types.Typ[types.Uint64], source) + // There is a runtime panic if y is signed and <0. Instead of inserting a check for y<0 + // and converting to an unsigned value (like the compiler) leave y as is. + if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { + // Untyped conversion: + // Spec https://go.dev/ref/spec#Operators: + // The right operand in a shift expression must have integer type or be an untyped constant + // representable by a value of type uint. + y = emitConv(f, y, types.Typ[types.Uint], source) } case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: @@ -127,9 +144,9 @@ func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value if types.Identical(xt, yt) { // no conversion necessary - } else if _, ok := xt.(*types.Interface); ok { + } else if _, ok := xt.(*types.Interface); ok && !typeparams.IsTypeParam(x.Type()) { y = emitConv(f, y, x.Type(), source) - } else if _, ok := yt.(*types.Interface); ok { + } else if _, ok := yt.(*types.Interface); ok && !typeparams.IsTypeParam(y.Type()) { x = emitConv(f, x, y.Type(), source) } else if _, ok := x.(*Const); ok { x = emitConv(f, x, y.Type(), source) @@ -155,7 +172,7 @@ func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value // func isValuePreserving(ut_src, ut_dst types.Type) bool { // Identical underlying types? - if structTypesIdentical(ut_dst, ut_src) { + if types.IdenticalIgnoreTags(ut_dst, ut_src) { return true } @@ -178,36 +195,43 @@ func isValuePreserving(ut_src, ut_dst types.Type) bool { // by language assignability rules in assignments, parameter passing, // etc. // -func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value { +func emitConv(f *Function, val Value, t_dst types.Type, source ast.Node) Value { t_src := val.Type() // Identical types? Conversion is a no-op. - if types.Identical(t_src, typ) { + if types.Identical(t_src, t_dst) { return val } - ut_dst := typ.Underlying() + ut_dst := t_dst.Underlying() ut_src := t_src.Underlying() + tset_dst := typeutil.NewTypeSet(ut_dst) + tset_src := typeutil.NewTypeSet(ut_src) + // Just a change of type, but not value or representation? - if isValuePreserving(ut_src, ut_dst) { + if tset_src.All(func(termSrc *typeparams.Term) bool { + return tset_dst.All(func(termDst *typeparams.Term) bool { + return isValuePreserving(termSrc.Type().Underlying(), termDst.Type().Underlying()) + }) + }) { c := &ChangeType{X: val} - c.setType(typ) + c.setType(t_dst) return f.emit(c, source) } // Conversion to, or construction of a value of, an interface type? - if _, ok := ut_dst.(*types.Interface); ok { + if _, ok := ut_dst.(*types.Interface); ok && !typeparams.IsTypeParam(t_dst) { // Assignment from one interface type to another? - if _, ok := ut_src.(*types.Interface); ok { + if _, ok := ut_src.(*types.Interface); ok && !typeparams.IsTypeParam(t_src) { c := &ChangeInterface{X: val} - c.setType(typ) + c.setType(t_dst) return f.emit(c, source) } // Untyped nil constant? Return interface-typed nil constant. if ut_src == tUntypedNil { - return emitConst(f, nilConst(typ)) + return emitConst(f, nilConst(t_dst)) } // Convert (non-nil) "untyped" literals to their default type. @@ -217,11 +241,12 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value { f.Pkg.Prog.needMethodsOf(val.Type()) mi := &MakeInterface{X: val} - mi.setType(typ) + mi.setType(t_dst) return f.emit(mi, source) } - // Conversion of a compile-time constant value? + // Conversion of a compile-time constant value? Note that converting a constant to a type parameter never results in + // a constant value. if c, ok := val.(*Const); ok { if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() { // Conversion of a compile-time constant to @@ -229,7 +254,7 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value { // constant of the destination type and // (initially) the same abstract value. // We don't truncate the value yet. - return emitConst(f, NewConst(c.Value, typ)) + return emitConst(f, NewConst(c.Value, t_dst)) } // We're converting from constant to non-constant type, @@ -237,28 +262,35 @@ func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value { } // Conversion from slice to array pointer? - if slice, ok := ut_src.(*types.Slice); ok { - if ptr, ok := ut_dst.(*types.Pointer); ok { - if arr, ok := ptr.Elem().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { - c := &SliceToArrayPointer{X: val} - c.setType(ut_dst) - return f.emit(c, source) + if tset_src.All(func(termSrc *typeparams.Term) bool { + return tset_dst.All(func(termDst *typeparams.Term) bool { + if slice, ok := termSrc.Type().Underlying().(*types.Slice); ok { + if ptr, ok := termDst.Type().Underlying().(*types.Pointer); ok { + if arr, ok := ptr.Elem().Underlying().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { + return true + } + } } - } + return false + }) + }) { + c := &SliceToArrayPointer{X: val} + c.setType(t_dst) + return f.emit(c, source) } // A representation-changing conversion? // At least one of {ut_src,ut_dst} must be *Basic. // (The other may be []byte or []rune.) - _, ok1 := ut_src.(*types.Basic) - _, ok2 := ut_dst.(*types.Basic) + ok1 := tset_src.Any(func(term *typeparams.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) + ok2 := tset_dst.Any(func(term *typeparams.Term) bool { _, ok := term.Type().Underlying().(*types.Basic); return ok }) if ok1 || ok2 { c := &Convert{X: val} - c.setType(typ) + c.setType(t_dst) return f.emit(c, source) } - panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) + panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), t_dst)) } // emitStore emits to f an instruction to store value val at location @@ -384,7 +416,10 @@ func emitTailCall(f *Function, call *Call, source ast.Node) { // func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node) Value { for _, index := range indices { - fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + // We may have a generic type containing a pointer, or a pointer to a generic type containing a struct. A + // pointer to a generic containing a pointer to a struct shouldn't be possible because the outer pointer gets + // dereferenced implicitly before we get here. + fld := typeutil.CoreType(deref(v.Type())).Underlying().(*types.Struct).Field(index) if isPointer(v.Type()) { instr := &FieldAddr{ @@ -417,7 +452,11 @@ func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node // Ident id is used for position and debug info. // func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { - fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + // We may have a generic type containing a pointer, or a pointer to a generic type containing a struct. A + // pointer to a generic containing a pointer to a struct shouldn't be possible because the outer pointer gets + // dereferenced implicitly before we get here. + vut := typeutil.CoreType(deref(v.Type())).Underlying().(*types.Struct) + fld := vut.Field(index) if isPointer(v.Type()) { instr := &FieldAddr{ X: v, @@ -447,15 +486,10 @@ func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast. // and returns it. // func zeroValue(f *Function, t types.Type, source ast.Node) Value { - switch t.Underlying().(type) { - case *types.Struct, *types.Array: - return emitLoad(f, f.addLocal(t, source), source) - default: - return emitConst(f, zeroConst(t)) - } + return emitConst(f, zeroConst(t)) } -func emitConst(f *Function, c *Const) *Const { +func emitConst(f *Function, c Constant) Constant { f.consts = append(f.consts, c) return c } diff --git a/vendor/honnef.co/go/tools/go/ir/func.go b/vendor/honnef.co/go/tools/go/ir/func.go index 69e381cd6..ca42b5c0e 100644 --- a/vendor/honnef.co/go/tools/go/ir/func.go +++ b/vendor/honnef.co/go/tools/go/ir/func.go @@ -191,13 +191,14 @@ type lblock struct { // specified label, creating it if needed. // func (f *Function) labelledBlock(label *ast.Ident) *lblock { - lb := f.lblocks[label.Obj] + obj := f.Pkg.objectOf(label) + lb := f.lblocks[obj] if lb == nil { lb = &lblock{_goto: f.newBasicBlock(label.Name)} if f.lblocks == nil { - f.lblocks = make(map[*ast.Object]*lblock) + f.lblocks = make(map[types.Object]*lblock) } - f.lblocks[label.Obj] = lb + f.lblocks[obj] = lb } return lb } @@ -415,14 +416,14 @@ func (f *Function) emitConsts() { } func (f *Function) emitConstsFew() { - dedup := make([]*Const, 0, 32) + dedup := make([]Constant, 0, 32) for _, c := range f.consts { if len(*c.Referrers()) == 0 { continue } found := false for _, d := range dedup { - if c.typ == d.typ && c.Value == d.Value { + if c.equal(d) { replaceAll(c, d) found = true break @@ -458,9 +459,26 @@ func (f *Function) emitConstsMany() { continue } + var typ types.Type + var val constant.Value + switch c := c.(type) { + case *Const: + typ = c.typ + val = c.Value + case *ArrayConst: + // ArrayConst can only encode zero constants, so all we need is the type + typ = c.typ + case *AggregateConst: + // ArrayConst can only encode zero constants, so all we need is the type + typ = c.typ + case *GenericConst: + typ = c.typ + default: + panic(fmt.Sprintf("unexpected type %T", c)) + } k := constKey{ - typ: c.typ, - value: c.Value, + typ: typ, + value: val, } if dup, ok := m[k]; !ok { m[k] = c @@ -578,9 +596,14 @@ func (f *Function) finishBody() { lift(f) } - // emit constants after lifting, because lifting may produce new constants. + // emit constants after lifting, because lifting may produce new constants, but before other variable splitting, + // because it expects constants to have been deduplicated. f.emitConsts() + if f.Prog.mode&SplitAfterNewInformation != 0 { + splitOnNewInformation(f.Blocks[0], &StackMap{}) + } + f.namedResults = nil // (used by lifting) f.implicitResults = nil diff --git a/vendor/honnef.co/go/tools/go/ir/identical.go b/vendor/honnef.co/go/tools/go/ir/identical.go deleted file mode 100644 index 4dbe44717..000000000 --- a/vendor/honnef.co/go/tools/go/ir/identical.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build go1.8 - -package ir - -import "go/types" - -var structTypesIdentical = types.IdenticalIgnoreTags diff --git a/vendor/honnef.co/go/tools/go/ir/identical_17.go b/vendor/honnef.co/go/tools/go/ir/identical_17.go deleted file mode 100644 index 3968fa782..000000000 --- a/vendor/honnef.co/go/tools/go/ir/identical_17.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build !go1.8 - -package ir - -import "go/types" - -var structTypesIdentical = types.Identical diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/util.go b/vendor/honnef.co/go/tools/go/ir/irutil/util.go index 49b978a96..d2f10948d 100644 --- a/vendor/honnef.co/go/tools/go/ir/irutil/util.go +++ b/vendor/honnef.co/go/tools/go/ir/irutil/util.go @@ -46,29 +46,42 @@ func Walk(b *ir.BasicBlock, fn func(*ir.BasicBlock) bool) { func Vararg(x *ir.Slice) ([]ir.Value, bool) { var out []ir.Value - slice, ok := x.X.(*ir.Alloc) + alloc, ok := ir.Unwrap(x.X).(*ir.Alloc) if !ok { return nil, false } - for _, ref := range *slice.Referrers() { - if ref == x { - continue - } - if ref.Block() != x.Block() { - return nil, false - } - idx, ok := ref.(*ir.IndexAddr) - if !ok { - return nil, false - } - if len(*idx.Referrers()) != 1 { - return nil, false - } - store, ok := (*idx.Referrers())[0].(*ir.Store) - if !ok { - return nil, false + var checkAlloc func(alloc ir.Value) bool + checkAlloc = func(alloc ir.Value) bool { + for _, ref := range *alloc.Referrers() { + if ref == x { + continue + } + if ref.Block() != x.Block() { + return false + } + switch ref := ref.(type) { + case *ir.IndexAddr: + idx := ref + if len(*idx.Referrers()) != 1 { + return false + } + store, ok := (*idx.Referrers())[0].(*ir.Store) + if !ok { + return false + } + out = append(out, store.Val) + case *ir.Copy: + if !checkAlloc(ref) { + return false + } + default: + return false + } } - out = append(out, store.Val) + return true + } + if !checkAlloc(alloc) { + return nil, false } return out, true } diff --git a/vendor/honnef.co/go/tools/go/ir/lift.go b/vendor/honnef.co/go/tools/go/ir/lift.go index 336470464..8ab67eb8e 100644 --- a/vendor/honnef.co/go/tools/go/ir/lift.go +++ b/vendor/honnef.co/go/tools/go/ir/lift.go @@ -43,8 +43,8 @@ package ir // Also see many other "TODO: opt" suggestions in the code. import ( + "encoding/binary" "fmt" - "go/types" "os" ) @@ -214,7 +214,6 @@ func lift(fn *Function) { instr.index = -1 continue } - index := -1 if numAllocs == 0 { df = buildDomFrontier(fn) rdf = buildPostDomFrontier(fn) @@ -238,9 +237,8 @@ func lift(fn *Function) { } } liftAlloc(closure, df, rdf, instr, newPhis, newSigmas) - index = numAllocs + instr.index = numAllocs numAllocs++ - instr.index = index case *Defer: usesDefer = true case *RunDefers: @@ -260,7 +258,7 @@ func lift(fn *Function) { // Renaming. rename(fn.Blocks[0], renaming, newPhis, newSigmas) - simplifyPhis(newPhis) + simplifyPhisAndSigmas(newPhis, newSigmas) // Eliminate dead φ- and σ-nodes. markLiveNodes(fn.Blocks, newPhis, newSigmas) @@ -397,6 +395,9 @@ func hasDirectReferrer(instr Instruction) bool { } func markLiveNodes(blocks []*BasicBlock, newPhis newPhiMap, newSigmas newSigmaMap) { + // Phis and sigmas may become dead due to optimization passes. We may also insert more nodes than strictly + // necessary, e.g. sigma nodes for constants, which will never be used. + // Phi and sigma nodes are considered live if a non-phi, non-sigma // node uses them. Once we find a node that is live, we mark all // of its operands as used, too. @@ -456,10 +457,21 @@ func markLiveSigma(sigma *Sigma) { } } -// simplifyPhis replaces trivial phis with non-phi alternatives. Phi +// simplifyPhisAndSigmas removes duplicate phi and sigma nodes, +// and replaces trivial phis with non-phi alternatives. Phi // nodes where all edges are identical, or consist of only the phi // itself and one other value, may be replaced with the value. -func simplifyPhis(newPhis newPhiMap) { +func simplifyPhisAndSigmas(newPhis newPhiMap, newSigmas newSigmaMap) { + // temporary numbering of values used in phis so that we can build map keys + var id ID + for _, npList := range newPhis { + for _, np := range npList { + for _, edge := range np.phi.Edges { + edge.setID(id) + id++ + } + } + } // find all phis that are trivial and can be replaced with a // non-phi value. run until we reach a fixpoint, because replacing // a phi may make other phis trivial. @@ -468,7 +480,7 @@ func simplifyPhis(newPhis newPhiMap) { for _, npList := range newPhis { for _, np := range npList { if np.phi.live { - // we're reusing 'live' to mean 'dead' in the context of simplifyPhis + // we're reusing 'live' to mean 'dead' in the context of simplifyPhisAndSigmas continue } if r, ok := isUselessPhi(np.phi); ok { @@ -481,11 +493,83 @@ func simplifyPhis(newPhis newPhiMap) { } } } + + // Replace duplicate sigma nodes with a single node. These nodes exist when multiple allocs get replaced with the + // same dominating store. + for _, sigmaList := range newSigmas { + primarySigmas := map[struct { + succ int + v Value + }]*Sigma{} + for _, sigmas := range sigmaList { + for succ, sigma := range sigmas.sigmas { + if sigma == nil { + continue + } + if sigma.live { + // we're reusing 'live' to mean 'dead' in the context of simplifyPhisAndSigmas + continue + } + key := struct { + succ int + v Value + }{succ, sigma.X} + if alt, ok := primarySigmas[key]; ok { + replaceAll(sigma, alt) + sigma.live = true + changed = true + } else { + primarySigmas[key] = sigma + } + } + } + } + + // Replace duplicate phi nodes with a single node. As far as we know, these duplicate nodes only ever exist + // because of the previous sigma deduplication. + keyb := make([]byte, 0, 4*8) + for _, npList := range newPhis { + primaryPhis := map[string]*Phi{} + for _, np := range npList { + if np.phi.live { + continue + } + if n := len(np.phi.Edges) * 8; cap(keyb) >= n { + keyb = keyb[:n] + } else { + keyb = make([]byte, n, n*2) + } + for i, e := range np.phi.Edges { + binary.LittleEndian.PutUint64(keyb[i*8:i*8+8], uint64(e.ID())) + } + if alt, ok := primaryPhis[string(keyb)]; ok { + replaceAll(np.phi, alt) + np.phi.live = true + changed = true + } else { + primaryPhis[string(keyb)] = np.phi + } + } + } + } for _, npList := range newPhis { for _, np := range npList { np.phi.live = false + for _, edge := range np.phi.Edges { + edge.setID(0) + } + } + } + + for _, sigmaList := range newSigmas { + for _, sigmas := range sigmaList { + for _, sigma := range sigmas.sigmas { + if sigma != nil { + sigma.live = false + } + } } } } @@ -685,13 +769,6 @@ type newPhiMap [][]newPhi type newSigmaMap [][]newSigma func liftable(alloc *Alloc) bool { - // Don't lift aggregates into registers, because we don't have - // a way to express their zero-constants. - switch deref(alloc.Type()).Underlying().(type) { - case *types.Array, *types.Struct: - return false - } - fn := alloc.Parent() // Don't lift named return values in functions that defer // calls that may recover from panic. @@ -727,9 +804,7 @@ func liftable(alloc *Alloc) bool { return true } -// liftAlloc determines whether alloc can be lifted into registers, -// and if so, it populates newPhis with all the φ-nodes it may require -// and returns true. +// liftAlloc lifts alloc into registers and populates newPhis and newSigmas with all the φ- and σ-nodes it may require. func liftAlloc(closure *closure, df domFrontier, rdf postDomFrontier, alloc *Alloc, newPhis newPhiMap, newSigmas newSigmaMap) { fn := alloc.Parent() @@ -742,9 +817,6 @@ func liftAlloc(closure *closure, df domFrontier, rdf postDomFrontier, alloc *All // Compute defblocks, the set of blocks containing a // definition of the alloc cell. for _, instr := range *alloc.Referrers() { - // Bail out if we discover the alloc is not liftable; - // the only operations permitted to use the alloc are - // loads/stores into the cell, and DebugRef. switch instr := instr.(type) { case *Store: defblocks.Add(instr.Block()) @@ -899,6 +971,28 @@ func replaceAll(x, y Value) { *pxrefs = nil // x is now unreferenced } +func replace(instr Instruction, x, y Value) { + args := instr.Operands(nil) + matched := false + for _, arg := range args { + if *arg == x { + *arg = y + matched = true + } + } + if matched { + yrefs := y.Referrers() + if yrefs != nil { + *yrefs = append(*yrefs, instr) + } + + xrefs := x.Referrers() + if xrefs != nil { + *xrefs = removeInstr(*xrefs, instr) + } + } +} + // renamed returns the value to which alloc is being renamed, // constructing it lazily if it's the implicit zero initialization. // @@ -911,6 +1005,196 @@ func renamed(fn *Function, renaming []Value, alloc *Alloc) Value { return v } +func copyValue(v Value, why Instruction, info CopyInfo) *Copy { + c := &Copy{ + X: v, + Why: why, + Info: info, + } + if refs := v.Referrers(); refs != nil { + *refs = append(*refs, c) + } + c.setType(v.Type()) + c.setSource(v.Source()) + return c +} + +func splitOnNewInformation(u *BasicBlock, renaming *StackMap) { + renaming.Push() + defer renaming.Pop() + + rename := func(v Value, why Instruction, info CopyInfo, i int) { + c := copyValue(v, why, info) + c.setBlock(u) + renaming.Set(v, c) + u.Instrs = append(u.Instrs, nil) + copy(u.Instrs[i+2:], u.Instrs[i+1:]) + u.Instrs[i+1] = c + } + + replacement := func(v Value) (Value, bool) { + r, ok := renaming.Get(v) + if !ok { + return nil, false + } + for { + rr, ok := renaming.Get(r) + if !ok { + // Store replacement in the map so that future calls to replacement(v) don't have to go through the + // iterative process again. + renaming.Set(v, r) + return r, true + } + r = rr + } + } + + var hasInfo func(v Value, info CopyInfo) bool + hasInfo = func(v Value, info CopyInfo) bool { + switch v := v.(type) { + case *Copy: + return (v.Info&info) == info || hasInfo(v.X, info) + case *FieldAddr, *IndexAddr, *TypeAssert, *MakeChan, *MakeMap, *MakeSlice, *Alloc: + return info == CopyInfoNotNil + case Member, *Builtin: + return info == CopyInfoNotNil + case *Sigma: + return hasInfo(v.X, info) + default: + return false + } + } + + var args []*Value + for i := 0; i < len(u.Instrs); i++ { + instr := u.Instrs[i] + if instr == nil { + continue + } + args = instr.Operands(args[:0]) + for _, arg := range args { + if *arg == nil { + continue + } + if r, ok := replacement(*arg); ok { + *arg = r + replace(instr, *arg, r) + } + } + + // TODO write some bits on why we copy values instead of encoding the actual control flow and panics + + switch instr := instr.(type) { + case *IndexAddr: + // Note that we rename instr.Index and instr.X even if they're already copies, because unique combinations + // of X and Index may lead to unique information. + + // OPT we should rename both variables at once and avoid one memmove + rename(instr.Index, instr, CopyInfoNotNegative, i) + rename(instr.X, instr, CopyInfoNotNil, i) + i += 2 // skip over instructions we just inserted + case *FieldAddr: + if !hasInfo(instr.X, CopyInfoNotNil) { + rename(instr.X, instr, CopyInfoNotNil, i) + i++ + } + case *TypeAssert: + // If we've already type asserted instr.X without comma-ok before, then it can only contain a single type, + // and successive type assertions, no matter the type, don't tell us anything new. + if !hasInfo(instr.X, CopyInfoNotNil|CopyInfoSingleConcreteType) { + rename(instr.X, instr, CopyInfoNotNil|CopyInfoSingleConcreteType, i) + i++ // skip over instruction we just inserted + } + case *Load: + if !hasInfo(instr.X, CopyInfoNotNil) { + rename(instr.X, instr, CopyInfoNotNil, i) + i++ + } + case *Store: + if !hasInfo(instr.Addr, CopyInfoNotNil) { + rename(instr.Addr, instr, CopyInfoNotNil, i) + i++ + } + case *MapUpdate: + if !hasInfo(instr.Map, CopyInfoNotNil) { + rename(instr.Map, instr, CopyInfoNotNil, i) + i++ + } + case CallInstruction: + off := 0 + if !instr.Common().IsInvoke() && !hasInfo(instr.Common().Value, CopyInfoNotNil) { + rename(instr.Common().Value, instr, CopyInfoNotNil, i) + off++ + } + if f, ok := instr.Common().Value.(*Builtin); ok { + switch f.name { + case "close": + arg := instr.Common().Args[0] + if !hasInfo(arg, CopyInfoNotNil|CopyInfoClosed) { + rename(arg, instr, CopyInfoNotNil|CopyInfoClosed, i) + off++ + } + } + } + i += off + case *SliceToArrayPointer: + // A slice to array pointer conversion tells us the minimum length of the slice + rename(instr.X, instr, CopyInfoUnspecified, i) + i++ + case *Slice: + // Slicing tells us about some of the bounds + off := 0 + if instr.Low == nil && instr.High == nil && instr.Max == nil { + // If all indices are unspecified, then we can only learn something about instr.X if it might've been + // nil. + if !hasInfo(instr.X, CopyInfoNotNil) { + rename(instr.X, instr, CopyInfoUnspecified, i) + off++ + } + } else { + rename(instr.X, instr, CopyInfoUnspecified, i) + off++ + } + // We copy the indices even if we already know they are not negative, because we can associate numeric + // ranges with them. + if instr.Low != nil { + rename(instr.Low, instr, CopyInfoNotNegative, i) + off++ + } + if instr.High != nil { + rename(instr.High, instr, CopyInfoNotNegative, i) + off++ + } + if instr.Max != nil { + rename(instr.Max, instr, CopyInfoNotNegative, i) + off++ + } + i += off + case *StringLookup: + rename(instr.X, instr, CopyInfoUnspecified, i) + rename(instr.Index, instr, CopyInfoNotNegative, i) + i += 2 + case *Recv: + if !hasInfo(instr.Chan, CopyInfoNotNil) { + // Receiving from a nil channel never completes + rename(instr.Chan, instr, CopyInfoNotNil, i) + i++ + } + case *Send: + if !hasInfo(instr.Chan, CopyInfoNotNil) { + // Sending to a nil channel never completes. Sending to a closed channel panics, but whether a channel + // is closed isn't local to this function, so we didn't learn anything. + rename(instr.Chan, instr, CopyInfoNotNil, i) + i++ + } + } + } + + for _, v := range u.dom.children { + splitOnNewInformation(v, renaming) + } +} + // rename implements the Cytron et al-based SSI renaming algorithm, a // preorder traversal of the dominator tree replacing all loads of // Alloc cells with the value stored to that cell by the dominating @@ -964,21 +1248,19 @@ func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap, newSigmas newSig case *Load: if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell - // In theory, we wouldn't be able to replace loads - // directly, because a loaded value could be used in - // different branches, in which case it should be - // replaced with different sigma nodes. But we can't - // simply defer replacement, either, because then - // later stores might incorrectly affect this load. + // In theory, we wouldn't be able to replace loads directly, because a loaded value could be used in + // different branches, in which case it should be replaced with different sigma nodes. But we can't + // simply defer replacement, either, because then later stores might incorrectly affect this load. + // + // To avoid doing renaming on _all_ values (instead of just loads and stores like we're doing), we make + // sure during code generation that each load is only used in one block. For example, in constant switch + // statements, where the tag is only evaluated once, we store it in a temporary and load it for each + // comparison, so that we have individual loads to replace. // - // To avoid doing renaming on _all_ values (instead of - // just loads and stores like we're doing), we make - // sure during code generation that each load is only - // used in one block. For example, in constant switch - // statements, where the tag is only evaluated once, - // we store it in a temporary and load it for each - // comparison, so that we have individual loads to - // replace. + // Because we only rename stores and loads, the end result will not contain sigma nodes for all + // constants. Some constants may be used directly, e.g. in comparisons such as 'x == 5'. We may still + // end up inserting dead sigma nodes in branches, but these will never get used in renaming and will be + // cleaned up when we remove dead phis and sigmas. newval := renamed(u.Parent(), renaming, alloc) if debugLifting { fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", diff --git a/vendor/honnef.co/go/tools/go/ir/methods.go b/vendor/honnef.co/go/tools/go/ir/methods.go index 517f448b8..fa45d1b9e 100644 --- a/vendor/honnef.co/go/tools/go/ir/methods.go +++ b/vendor/honnef.co/go/tools/go/ir/methods.go @@ -9,6 +9,10 @@ package ir import ( "fmt" "go/types" + + "honnef.co/go/tools/analysis/lint" + + "golang.org/x/exp/typeparams" ) // MethodValue returns the Function implementing method sel, building @@ -116,10 +120,15 @@ func (prog *Program) RuntimeTypes() []types.Type { // declaredFunc returns the concrete function/method denoted by obj. // Panic ensues if there is none. -// func (prog *Program) declaredFunc(obj *types.Func) *Function { - if v := prog.packageLevelValue(obj); v != nil { - return v.(*Function) + if origin := typeparams.OriginMethod(obj); origin != obj { + // Calling method on instantiated type, create a wrapper that calls the generic type's method + base := prog.packageLevelValue(origin) + return makeInstance(prog, base.(*Function), obj.Type().(*types.Signature), nil) + } else { + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Function) + } } panic("no concrete method: " + obj.String()) } @@ -186,7 +195,7 @@ func (prog *Program) needMethods(T types.Type, skip bool) { case *types.Basic: // nop - case *types.Interface: + case *types.Interface, *typeparams.TypeParam: // nop---handled by recursion over method set. case *types.Pointer: @@ -212,7 +221,7 @@ func (prog *Program) needMethods(T types.Type, skip bool) { case *types.Named: // A pointer-to-named type can be derived from a named // type via reflection. It may have methods too. - prog.needMethods(types.NewPointer(T), false) + prog.needMethods(types.NewPointer(t), false) // Consider 'type T struct{S}' where S has methods. // Reflection provides no way to get from T to struct{S}, @@ -234,6 +243,6 @@ func (prog *Program) needMethods(T types.Type, skip bool) { } default: - panic(T) + lint.ExhaustiveTypeSwitch(T) } } diff --git a/vendor/honnef.co/go/tools/go/ir/mode.go b/vendor/honnef.co/go/tools/go/ir/mode.go index da548fdbb..b0b2c9291 100644 --- a/vendor/honnef.co/go/tools/go/ir/mode.go +++ b/vendor/honnef.co/go/tools/go/ir/mode.go @@ -21,17 +21,18 @@ import ( type BuilderMode uint const ( - PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout - PrintFunctions // Print function IR code to stdout - PrintSource // Print source code when printing function IR - LogSource // Log source locations as IR builder progresses - SanityCheckFunctions // Perform sanity checking of function bodies - NaiveForm // Build naïve IR form: don't replace local loads/stores with registers - GlobalDebug // Enable debug info for all packages + PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout + PrintFunctions // Print function IR code to stdout + PrintSource // Print source code when printing function IR + LogSource // Log source locations as IR builder progresses + SanityCheckFunctions // Perform sanity checking of function bodies + NaiveForm // Build naïve IR form: don't replace local loads/stores with registers + GlobalDebug // Enable debug info for all packages + SplitAfterNewInformation // Split live range after we learn something new about a value ) const BuilderModeDoc = `Options controlling the IR builder. -The value is a sequence of zero or more of these letters: +The value is a sequence of zero or more of these symbols: C perform sanity [C]hecking of the IR form. D include [D]ebug info for every function. P print [P]ackage inventory. @@ -39,6 +40,7 @@ F print [F]unction IR code. A print [A]ST nodes responsible for IR instructions S log [S]ource locations as IR builder progresses. N build [N]aive IR form: don't replace local loads/stores with registers. +I Split live range after a value is used as slice or array index ` func (m BuilderMode) String() string { @@ -64,6 +66,9 @@ func (m BuilderMode) String() string { if m&NaiveForm != 0 { buf.WriteByte('N') } + if m&SplitAfterNewInformation != 0 { + buf.WriteByte('I') + } return buf.String() } @@ -86,6 +91,8 @@ func (m *BuilderMode) Set(s string) error { mode |= SanityCheckFunctions case 'N': mode |= NaiveForm + case 'I': + mode |= SplitAfterNewInformation default: return fmt.Errorf("unknown BuilderMode option: %q", c) } diff --git a/vendor/honnef.co/go/tools/go/ir/print.go b/vendor/honnef.co/go/tools/go/ir/print.go index befba6617..c5b51ca8a 100644 --- a/vendor/honnef.co/go/tools/go/ir/print.go +++ b/vendor/honnef.co/go/tools/go/ir/print.go @@ -127,6 +127,10 @@ func printCall(v *CallCommon, prefix string, instr Instruction) string { fmt.Fprintf(&b, "%sInvoke %s.%s", prefix, relName(v.Value, instr), v.Method.Name()) } } + for _, arg := range v.TypeArgs { + b.WriteString(" ") + b.WriteString(relType(arg, instr.Parent().pkg())) + } for _, arg := range v.Args { b.WriteString(" ") b.WriteString(relName(arg, instr)) @@ -154,6 +158,10 @@ func (v *Load) String() string { return fmt.Sprintf("Load <%s> %s", relType(v.Type(), v.Parent().pkg()), relName(v.X, v)) } +func (v *Copy) String() string { + return fmt.Sprintf("Copy <%s> %s", relType(v.Type(), v.Parent().pkg()), relName(v.X, v)) +} + func printConv(prefix string, v, x Value) string { from := v.Parent().pkg() return fmt.Sprintf("%s <%s> %s", @@ -211,7 +219,8 @@ func (v *MakeChan) String() string { func (v *FieldAddr) String() string { from := v.Parent().pkg() - st := deref(v.X.Type()).Underlying().(*types.Struct) + // v.X.Type() might be a pointer to a type parameter whose core type is a pointer to a struct + st := deref(typeutil.CoreType(deref(v.X.Type()))).Underlying().(*types.Struct) // Be robust against a bad index. name := "?" if 0 <= v.Field && v.Field < st.NumFields() { @@ -221,7 +230,7 @@ func (v *FieldAddr) String() string { } func (v *Field) String() string { - st := v.X.Type().Underlying().(*types.Struct) + st := typeutil.CoreType(v.X.Type()).Underlying().(*types.Struct) // Be robust against a bad index. name := "?" if 0 <= v.Field && v.Field < st.NumFields() { diff --git a/vendor/honnef.co/go/tools/go/ir/sanity.go b/vendor/honnef.co/go/tools/go/ir/sanity.go index a8b82d638..1788d0f13 100644 --- a/vendor/honnef.co/go/tools/go/ir/sanity.go +++ b/vendor/honnef.co/go/tools/go/ir/sanity.go @@ -189,6 +189,9 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { case *Load: case *Parameter: case *Const: + case *AggregateConst: + case *ArrayConst: + case *GenericConst: case *Recv: case *TypeSwitch: default: @@ -207,8 +210,6 @@ func (s *sanity) checkInstr(idx int, instr Instruction) { t := v.Type() if t == nil { s.errorf("no type: %s = %s", v.Name(), v) - } else if t == tRangeIter { - // not a proper type; ignore. } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { if _, ok := v.(*Const); !ok { s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) @@ -445,7 +446,7 @@ func (s *sanity) checkFunction(fn *Function) bool { // separate-compilation model), and error.Error. if fn.Pkg == nil { switch fn.Synthetic { - case SyntheticWrapper, SyntheticBound, SyntheticThunk: + case SyntheticWrapper, SyntheticBound, SyntheticThunk, SyntheticGeneric: default: if !strings.HasSuffix(fn.name, "Error") { s.errorf("nil Pkg") diff --git a/vendor/honnef.co/go/tools/go/ir/source.go b/vendor/honnef.co/go/tools/go/ir/source.go index 93d1ccbd2..677eefbd7 100644 --- a/vendor/honnef.co/go/tools/go/ir/source.go +++ b/vendor/honnef.co/go/tools/go/ir/source.go @@ -14,6 +14,8 @@ import ( "go/ast" "go/token" "go/types" + + "golang.org/x/exp/typeparams" ) // EnclosingFunction returns the function that contains the syntax @@ -176,6 +178,7 @@ func (prog *Program) packageLevelValue(obj types.Object) Value { // result's Signature, both in the params/results and in the receiver. // func (prog *Program) FuncValue(obj *types.Func) *Function { + obj = typeparams.OriginMethod(obj) fn, _ := prog.packageLevelValue(obj).(*Function) return fn } diff --git a/vendor/honnef.co/go/tools/go/ir/ssa.go b/vendor/honnef.co/go/tools/go/ir/ssa.go index 3d089e0c3..c51dd67d3 100644 --- a/vendor/honnef.co/go/tools/go/ir/ssa.go +++ b/vendor/honnef.co/go/tools/go/ir/ssa.go @@ -15,6 +15,7 @@ import ( "go/types" "sync" + "golang.org/x/exp/typeparams" "honnef.co/go/tools/go/types/typeutil" ) @@ -287,6 +288,7 @@ const ( SyntheticThunk SyntheticWrapper SyntheticBound + SyntheticGeneric ) func (syn Synthetic) String() string { @@ -301,6 +303,8 @@ func (syn Synthetic) String() string { return "wrapper" case SyntheticBound: return "bound" + case SyntheticGeneric: + return "generic" default: return fmt.Sprintf("Synthetic(%d)", syn) } @@ -348,6 +352,7 @@ type Function struct { object types.Object // a declared *types.Func or one of its wrappers method *types.Selection // info about provenance of synthetic methods Signature *types.Signature + generics instanceWrapperMap Synthetic Synthetic parent *Function // enclosing function if anon; nil if global @@ -365,6 +370,83 @@ type Function struct { *functionBody } +type instanceWrapperMap struct { + h typeutil.Hasher + entries map[uint32][]struct { + key *typeparams.TypeList + val *Function + } + len int +} + +func typeListIdentical(l1, l2 *typeparams.TypeList) bool { + if l1.Len() != l2.Len() { + return false + } + for i := 0; i < l1.Len(); i++ { + t1 := l1.At(i) + t2 := l2.At(i) + if !types.Identical(t1, t2) { + return false + } + } + return true +} + +func (m *instanceWrapperMap) At(key *typeparams.TypeList) *Function { + if m.entries == nil { + m.entries = make(map[uint32][]struct { + key *typeparams.TypeList + val *Function + }) + m.h = typeutil.MakeHasher() + } + + var hash uint32 + for i := 0; i < key.Len(); i++ { + t := key.At(i) + hash += m.h.Hash(t) + } + + for _, e := range m.entries[hash] { + if typeListIdentical(e.key, key) { + return e.val + } + } + return nil +} + +func (m *instanceWrapperMap) Set(key *typeparams.TypeList, val *Function) { + if m.entries == nil { + m.entries = make(map[uint32][]struct { + key *typeparams.TypeList + val *Function + }) + m.h = typeutil.MakeHasher() + } + + var hash uint32 + for i := 0; i < key.Len(); i++ { + t := key.At(i) + hash += m.h.Hash(t) + } + for i, e := range m.entries[hash] { + if typeListIdentical(e.key, key) { + m.entries[hash][i].val = val + return + } + } + m.entries[hash] = append(m.entries[hash], struct { + key *typeparams.TypeList + val *Function + }{key, val}) + m.len++ +} + +func (m *instanceWrapperMap) Len() int { + return m.len +} + type NoReturn uint8 const ( @@ -377,13 +459,13 @@ const ( type functionBody struct { // The following fields are set transiently during building, // then cleared. - currentBlock *BasicBlock // where to emit code - objects map[types.Object]Value // addresses of local variables - namedResults []*Alloc // tuple of named results - implicitResults []*Alloc // tuple of results - targets *targets // linked stack of branch targets - lblocks map[*ast.Object]*lblock // labelled blocks - consts []*Const + currentBlock *BasicBlock // where to emit code + objects map[types.Object]Value // addresses of local variables + namedResults []*Alloc // tuple of named results + implicitResults []*Alloc // tuple of results + targets *targets // linked stack of branch targets + lblocks map[types.Object]*lblock // labelled blocks + consts []Constant wr *HTMLWriter fakeExits BlockSet blocksets [5]BlockSet @@ -501,6 +583,35 @@ type Const struct { Value constant.Value } +type AggregateConst struct { + register + + Values []Constant +} + +// TODO add the element's zero constant to ArrayConst +type ArrayConst struct { + register +} + +type GenericConst struct { + register +} + +type Constant interface { + Instruction + Value + aConstant() + RelString(*types.Package) string + equal(Constant) bool + setType(types.Type) +} + +func (*Const) aConstant() {} +func (*AggregateConst) aConstant() {} +func (*ArrayConst) aConstant() {} +func (*GenericConst) aConstant() {} + // A Global is a named Value holding the address of a package-level // variable. // @@ -610,6 +721,24 @@ type Sigma struct { live bool // used during lifting } +type CopyInfo uint64 + +const ( + CopyInfoUnspecified CopyInfo = 0 + CopyInfoNotNil CopyInfo = 1 << iota + CopyInfoNotZeroLength + CopyInfoNotNegative + CopyInfoSingleConcreteType + CopyInfoClosed +) + +type Copy struct { + register + X Value + Why Instruction + Info CopyInfo +} + // The Phi instruction represents an SSA φ-node, which combines values // that differ across incoming control-flow edges and yields a new // value. Within a block, all φ-nodes must appear before all non-φ, non-σ @@ -1525,10 +1654,11 @@ type anInstruction struct { // the last element of Args is a slice. // type CallCommon struct { - Value Value // receiver (invoke mode) or func value (call mode) - Method *types.Func // abstract method (invoke mode) - Args []Value // actual parameters (in static method call, includes receiver) - Results Value + Value Value // receiver (invoke mode) or func value (call mode) + Method *types.Func // abstract method (invoke mode) + Args []Value // actual parameters (in static method call, includes receiver) + TypeArgs []types.Type + Results Value } // IsInvoke returns true if this call has "invoke" (not "call") mode. @@ -1548,7 +1678,7 @@ func (c *CallCommon) Signature() *types.Signature { if c.Method != nil { return c.Method.Type().(*types.Signature) } - return c.Value.Type().Underlying().(*types.Signature) + return typeutil.CoreType(c.Value.Type()).(*types.Signature) } // StaticCallee returns the callee if this is a trivially static @@ -1754,6 +1884,10 @@ func (s *DebugRef) Operands(rands []*Value) []*Value { return append(rands, &s.X) } +func (s *Copy) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + func (v *Extract) Operands(rands []*Value) []*Value { return append(rands, &v.Tuple) } @@ -1909,9 +2043,12 @@ func (v *Load) Operands(rands []*Value) []*Value { } // Non-Instruction Values: -func (v *Builtin) Operands(rands []*Value) []*Value { return rands } -func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } -func (v *Const) Operands(rands []*Value) []*Value { return rands } -func (v *Function) Operands(rands []*Value) []*Value { return rands } -func (v *Global) Operands(rands []*Value) []*Value { return rands } -func (v *Parameter) Operands(rands []*Value) []*Value { return rands } +func (v *Builtin) Operands(rands []*Value) []*Value { return rands } +func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } +func (v *Const) Operands(rands []*Value) []*Value { return rands } +func (v *ArrayConst) Operands(rands []*Value) []*Value { return rands } +func (v *AggregateConst) Operands(rands []*Value) []*Value { return rands } +func (v *GenericConst) Operands(rands []*Value) []*Value { return rands } +func (v *Function) Operands(rands []*Value) []*Value { return rands } +func (v *Global) Operands(rands []*Value) []*Value { return rands } +func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/honnef.co/go/tools/go/ir/util.go b/vendor/honnef.co/go/tools/go/ir/util.go index 343a6320a..550f6c9d8 100644 --- a/vendor/honnef.co/go/tools/go/ir/util.go +++ b/vendor/honnef.co/go/tools/go/ir/util.go @@ -15,6 +15,9 @@ import ( "os" "honnef.co/go/tools/go/ast/astutil" + "honnef.co/go/tools/go/types/typeutil" + + "golang.org/x/exp/typeparams" ) //// AST utilities @@ -31,8 +34,13 @@ func isBlankIdent(e ast.Expr) bool { //// Type utilities. Some of these belong in go/types. -// isPointer returns true for types whose underlying type is a pointer. +// isPointer returns true for types whose underlying type is a pointer, +// and for type parameters whose core type is a pointer. func isPointer(typ types.Type) bool { + if ctyp := typeutil.CoreType(typ); ctyp != nil { + _, ok := ctyp.(*types.Pointer) + return ok + } _, ok := typ.Underlying().(*types.Pointer) return ok } @@ -41,10 +49,17 @@ func isInterface(T types.Type) bool { return types.IsInterface(T) } // deref returns a pointer's element type; otherwise it returns typ. func deref(typ types.Type) types.Type { + orig := typ + + if t, ok := typ.(*typeparams.TypeParam); ok { + if ctyp := typeutil.CoreType(t); ctyp != nil { + typ = ctyp + } + } if p, ok := typ.Underlying().(*types.Pointer); ok { return p.Elem() } - return typ + return orig } // recvType returns the receiver type of method obj. @@ -87,3 +102,48 @@ func makeLen(T types.Type) *Builtin { sig: types.NewSignature(nil, lenParams, lenResults, false), } } + +type StackMap struct { + m []map[Value]Value +} + +func (m *StackMap) Push() { + m.m = append(m.m, map[Value]Value{}) +} + +func (m *StackMap) Pop() { + m.m = m.m[:len(m.m)-1] +} + +func (m *StackMap) Get(key Value) (Value, bool) { + for i := len(m.m) - 1; i >= 0; i-- { + if v, ok := m.m[i][key]; ok { + return v, true + } + } + return nil, false +} + +func (m *StackMap) Set(k Value, v Value) { + m.m[len(m.m)-1][k] = v +} + +// Unwrap recursively unwraps Sigma and Copy nodes. +func Unwrap(v Value) Value { + for { + switch vv := v.(type) { + case *Sigma: + v = vv.X + case *Copy: + v = vv.X + default: + return v + } + } +} + +func assert(x bool) { + if !x { + panic("failed assertion") + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/wrappers.go b/vendor/honnef.co/go/tools/go/ir/wrappers.go index 1d51b5dc9..6082d07e1 100644 --- a/vendor/honnef.co/go/tools/go/ir/wrappers.go +++ b/vendor/honnef.co/go/tools/go/ir/wrappers.go @@ -21,8 +21,9 @@ package ir import ( "fmt" - "go/types" + + "golang.org/x/exp/typeparams" ) // -- wrappers ----------------------------------------------------------- @@ -288,3 +289,99 @@ type selectionKey struct { index string indirect bool } + +// makeInstance creates a wrapper function with signature sig that calls the generic function fn. +// If targs is not nil, fn is a function and targs describes the concrete type arguments. +// If targs is nil, fn is a method and the type arguments are derived from the receiver. +func makeInstance(prog *Program, fn *Function, sig *types.Signature, targs *typeparams.TypeList) *Function { + if sig.Recv() != nil { + assert(targs == nil) + // Methods don't have their own type parameters, but the receiver does + targs = typeparams.NamedTypeArgs(deref(sig.Recv().Type()).(*types.Named)) + } else { + assert(targs != nil) + } + + wrapper := fn.generics.At(targs) + if wrapper != nil { + return wrapper + } + + var name string + if sig.Recv() != nil { + name = fn.name + } else { + name = fmt.Sprintf("%s$generic#%d", fn.name, fn.generics.Len()) + } + w := &Function{ + name: name, + object: fn.object, + Signature: sig, + Synthetic: SyntheticGeneric, + Prog: prog, + functionBody: new(functionBody), + } + w.initHTML(prog.PrintFunc) + w.startBody() + if sig.Recv() != nil { + w.addParamObj(sig.Recv(), nil) + } + createParams(w, 0) + var c Call + c.Call.Value = fn + tresults := fn.Signature.Results() + if tresults.Len() == 1 { + c.typ = tresults.At(0).Type() + } else { + c.typ = tresults + } + + changeType := func(v Value, typ types.Type) Value { + if types.Identical(v.Type(), typ) { + return v + } + var c ChangeType + c.X = v + c.typ = typ + return w.emit(&c, nil) + } + + for i, arg := range w.Params { + if sig.Recv() != nil { + if i == 0 { + c.Call.Args = append(c.Call.Args, changeType(w.Params[0], fn.Signature.Recv().Type())) + } else { + c.Call.Args = append(c.Call.Args, changeType(arg, fn.Signature.Params().At(i-1).Type())) + } + } else { + c.Call.Args = append(c.Call.Args, changeType(arg, fn.Signature.Params().At(i).Type())) + } + } + for i := 0; i < targs.Len(); i++ { + arg := targs.At(i) + c.Call.TypeArgs = append(c.Call.TypeArgs, arg) + } + results := w.emit(&c, nil) + var ret Return + switch tresults.Len() { + case 0: + case 1: + ret.Results = []Value{changeType(results, sig.Results().At(0).Type())} + default: + for i := 0; i < tresults.Len(); i++ { + v := emitExtract(w, results, i, nil) + ret.Results = append(ret.Results, changeType(v, sig.Results().At(i).Type())) + } + } + + w.Exit = w.newBasicBlock("exit") + emitJump(w, w.Exit, nil) + w.currentBlock = w.Exit + w.emit(&ret, nil) + w.currentBlock = nil + + w.finishBody() + + fn.generics.Set(targs, w) + return w +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/ext.go b/vendor/honnef.co/go/tools/go/types/typeutil/ext.go new file mode 100644 index 000000000..bc553ec01 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/ext.go @@ -0,0 +1,18 @@ +package typeutil + +import ( + "fmt" + "go/types" +) + +type Iterator struct { + elem types.Type +} + +func (t *Iterator) Underlying() types.Type { return t } +func (t *Iterator) String() string { return fmt.Sprintf("iterator(%s)", t.elem) } +func (t *Iterator) Elem() types.Type { return t.elem } + +func NewIterator(elem types.Type) *Iterator { + return &Iterator{elem: elem} +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go b/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go new file mode 100644 index 000000000..c05af1d24 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/typeparams.go @@ -0,0 +1,106 @@ +package typeutil + +import ( + "errors" + "go/types" + + "golang.org/x/exp/typeparams" +) + +type TypeSet struct { + Terms []*typeparams.Term + empty bool +} + +func NewTypeSet(typ types.Type) TypeSet { + terms, err := typeparams.NormalTerms(typ) + if err != nil { + if errors.Is(err, typeparams.ErrEmptyTypeSet) { + return TypeSet{nil, true} + } else { + // We couldn't determine the type set. Assume it's all types. + return TypeSet{nil, false} + } + } + return TypeSet{terms, false} +} + +// CoreType returns the type set's core type, or nil if it has none. +// The function only looks at type terms and may thus return core types for some empty type sets, such as +// 'interface { map[int]string; foo() }' +func (ts TypeSet) CoreType() types.Type { + if len(ts.Terms) == 0 { + // Either the type set is empty, or it isn't constrained. Either way it doesn't have a core type. + return nil + } + typ := ts.Terms[0].Type().Underlying() + for _, term := range ts.Terms[1:] { + ut := term.Type().Underlying() + if types.Identical(typ, ut) { + continue + } + + ch1, ok := typ.(*types.Chan) + if !ok { + return nil + } + ch2, ok := ut.(*types.Chan) + if !ok { + return nil + } + if ch1.Dir() == types.SendRecv { + // typ is currently a bidirectional channel. The term's type is either also bidirectional, or + // unidirectional. Use the term's type. + typ = ut + } else if ch1.Dir() != ch2.Dir() { + // typ is not bidirectional and typ and term disagree about the direction + return nil + } + } + return typ +} + +// CoreType is a wrapper for NewTypeSet(typ).CoreType() +func CoreType(typ types.Type) types.Type { + return NewTypeSet(typ).CoreType() +} + +// All calls fn for each term in the type set and reports whether all invocations returned true. +// If the type set is empty or unconstrained, All immediately returns false. +func (ts TypeSet) All(fn func(*typeparams.Term) bool) bool { + if len(ts.Terms) == 0 { + return false + } + for _, term := range ts.Terms { + if !fn(term) { + return false + } + } + return true +} + +// Any calls fn for each term in the type set and reports whether any invocation returned true. +// It stops after the first call that returned true. +func (ts TypeSet) Any(fn func(*typeparams.Term) bool) bool { + for _, term := range ts.Terms { + if fn(term) { + return true + } + } + return false +} + +// All is a wrapper for NewTypeSet(typ).All(fn). +func All(typ types.Type, fn func(*typeparams.Term) bool) bool { + return NewTypeSet(typ).All(fn) +} + +// Any is a wrapper for NewTypeSet(typ).Any(fn). +func Any(typ types.Type, fn func(*typeparams.Term) bool) bool { + return NewTypeSet(typ).Any(fn) +} + +func IsSlice(term *typeparams.Term) bool { + _, ok := term.Type().Underlying().(*types.Slice) + return ok +} diff --git a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go index 2e839614a..63c93ac27 100644 --- a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go +++ b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go @@ -13,6 +13,7 @@ import ( "honnef.co/go/tools/go/ast/astutil" "honnef.co/go/tools/go/ir" "honnef.co/go/tools/go/ir/irutil" + "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" "golang.org/x/tools/go/analysis" @@ -34,11 +35,11 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { if val == nil { return true } - Tsrc, ok := val.X.Type().Underlying().(*types.Basic) + Tsrc, ok := typeutil.CoreType(val.X.Type()).(*types.Basic) if !ok || Tsrc.Kind() != types.String { return true } - Tdst, ok := val.Type().(*types.Slice) + Tdst, ok := typeutil.CoreType(val.Type()).(*types.Slice) if !ok { return true } @@ -90,7 +91,7 @@ func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { // It does not flag variables under the following conditions, unless flagHelpfulTypes is true, to reduce the number of noisy positives: // - packages that import syscall or unsafe – these sometimes use this form of assignment to make sure types are as expected // - variables named the blank identifier – a pattern used to confirm the types of variables -// - named untyped constants on the rhs – the explicitness might aid readability +// - untyped expressions on the rhs – the explicitness might aid readability func RedundantTypeInDeclarationChecker(verb string, flagHelpfulTypes bool) *analysis.Analyzer { fn := func(pass *analysis.Pass) (interface{}, error) { eval := func(expr ast.Expr) (types.TypeAndValue, error) { @@ -153,6 +154,10 @@ func RedundantTypeInDeclarationChecker(verb string, flagHelpfulTypes bool) *anal panic(err) } if b, ok := tv.Type.(*types.Basic); ok && (b.Info()&types.IsUntyped) != 0 { + if Tlhs != types.Default(b) { + // The rhs is untyped and its default type differs from the explicit type on the lhs + continue specLoop + } switch v := v.(type) { case *ast.Ident: // Only flag named constant rhs if it's a predeclared identifier. @@ -160,14 +165,11 @@ func RedundantTypeInDeclarationChecker(verb string, flagHelpfulTypes bool) *anal if pass.TypesInfo.ObjectOf(v).Pkg() != nil && !flagHelpfulTypes { continue specLoop } - case *ast.SelectorExpr: - // Constant selector expressions can only refer to named constants that arent predeclared. - if !flagHelpfulTypes { - continue specLoop - } + case *ast.BasicLit: + // Do flag basic literals default: - // don't skip if the type on the lhs matches the default type of the constant - if Tlhs != types.Default(b) { + // Don't flag untyped rhs expressions unless flagHelpfulTypes is set + if !flagHelpfulTypes { continue specLoop } } diff --git a/vendor/honnef.co/go/tools/knowledge/arg.go b/vendor/honnef.co/go/tools/knowledge/arg.go index 7ac0b358d..1fe61c1d5 100644 --- a/vendor/honnef.co/go/tools/knowledge/arg.go +++ b/vendor/honnef.co/go/tools/knowledge/arg.go @@ -1,6 +1,6 @@ package knowledge -var args = map[string]int{ +var Args = map[string]int{ "(*encoding/json.Decoder).Decode.v": 0, "(*encoding/json.Encoder).Encode.v": 0, "(*encoding/xml.Decoder).Decode.v": 0, @@ -55,8 +55,13 @@ var args = map[string]int{ "xml.Unmarshal.v": 1, } +// Arg turns the name of an argument into an argument index. +// Indices are zero-based and method receivers do not count as arguments. +// +// Arg refers to a manually compiled mapping (see the Args variable.) +// Modify the knowledge package to add new arguments. func Arg(name string) int { - n, ok := args[name] + n, ok := Args[name] if !ok { panic("unknown argument " + name) } diff --git a/vendor/honnef.co/go/tools/knowledge/deprecated.go b/vendor/honnef.co/go/tools/knowledge/deprecated.go index 8fa84fd44..7412a86e9 100644 --- a/vendor/honnef.co/go/tools/knowledge/deprecated.go +++ b/vendor/honnef.co/go/tools/knowledge/deprecated.go @@ -1,18 +1,26 @@ package knowledge const ( - DeprecatedNeverUse = -1 + // DeprecatedNeverUse indicates that an API should never be used, regardless of Go version. + DeprecatedNeverUse = -1 + // DeprecatedUseNoLonger indicates that an API has no use anymore. DeprecatedUseNoLonger = -2 ) +// Deprecation describes when a Go API has been deprecated. type Deprecation struct { - DeprecatedSince int + // The minor Go version since which this API has been deprecated. + DeprecatedSince int + // The minor Go version since which an alternative API has been available. + // May also be one of DeprecatedNeverUse or DeprecatedUseNoLonger. AlternativeAvailableSince int } // go/importer.ForCompiler contains "Deprecated:", but it refers to a single argument, not the whole function. // Luckily, the notice starts in the middle of a paragraph, and as such isn't detected by us. +// StdlibDeprecations contains a mapping of Go API (such as variables, methods, or fields, among others) +// to information about when it has been deprecated. var StdlibDeprecations = map[string]Deprecation{ // FIXME(dh): AllowBinary isn't being detected as deprecated // because the comment has a newline right after "Deprecated:" @@ -144,74 +152,102 @@ var StdlibDeprecations = map[string]Deprecation{ // Not marked as deprecated with a recognizable header, but deprecated nonetheless. "io/ioutil": {16, 16}, + + "bytes.Title": {18, 0}, + "strings.Title": {18, 0}, + "(crypto/tls.Config).PreferServerCipherSuites": {18, DeprecatedUseNoLonger}, + // It's not clear if Subjects was okay to use in the past, so we err on the less noisy side of assuming that it was. + "(*crypto/x509.CertPool).Subjects": {18, DeprecatedUseNoLonger}, + "go/types.NewSignature": {18, 18}, + "(net.Error).Temporary": {18, DeprecatedNeverUse}, + // InterfaceData is another tricky case. It was deprecated in Go 1.18, but has been useless since Go 1.4, and an + // "alternative" (using your own unsafe hacks) has existed forever. We don't want to get into hairsplitting with + // users who somehow successfully used this between 1.4 and 1.18, so we'll just tag it as deprecated since 1.18. + "(reflect.Value).InterfaceData": {18, 18}, + + // The following objects are only deprecated on Windows. + "syscall.Syscall": {18, 18}, + "syscall.Syscall12": {18, 18}, + "syscall.Syscall15": {18, 18}, + "syscall.Syscall18": {18, 18}, + "syscall.Syscall6": {18, 18}, + "syscall.Syscall9": {18, 18}, } -// Last imported from Go at 32b73ae18026e8a9dc4c5aa49999b1ea445bc68c with the following numbers of deprecations: +// Last imported from Go at 4aa1efed4853ea067d665a952eee77c52faac774 with the following numbers of deprecations: // -// chulak go@master ./src $ rg -c "Deprecated: " -// vendor/golang.org/x/crypto/curve25519/curve25519.go:1 -// vendor/golang.org/x/text/transform/transform.go:1 +// archive/tar/common.go:2 +// archive/zip/struct.go:6 +// bytes/bytes.go:1 +// cmd/compile/internal/ir/expr.go:1 +// cmd/compile/internal/ir/type.go:1 +// cmd/compile/internal/syntax/walk.go:1 // cmd/compile/internal/types/sym.go:2 -// syscall/route_netbsd.go:1 -// syscall/route_bsd.go:7 -// syscall/lsf_linux.go:6 -// syscall/exec_unix.go:1 -// syscall/route_darwin.go:1 -// syscall/route_freebsd.go:2 -// syscall/route_dragonfly.go:2 -// syscall/bpf_darwin.go:18 -// syscall/route_openbsd.go:1 -// syscall/syscall_windows.go:6 -// syscall/syscall.go:3 -// syscall/bpf_bsd.go:18 -// cmd/compile/internal/types2/type.go:2 -// syscall/exec_plan9.go:1 -// cmd/internal/obj/textflag.go:1 -// cmd/internal/obj/link.go:5 -// cmd/vendor/golang.org/x/sys/unix/zsysnum_darwin_arm64.go:1 -// cmd/vendor/golang.org/x/mod/semver/semver.go:1 -// cmd/vendor/golang.org/x/sys/windows/syscall_windows.go:2 -// cmd/vendor/golang.org/x/sys/unix/zsysnum_darwin_amd64.go:1 -// cmd/vendor/golang.org/x/mod/modfile/rule.go:2 -// cmd/vendor/golang.org/x/sys/windows/security_windows.go:1 // cmd/go/internal/modcmd/edit.go:1 // cmd/go/testdata/mod/example.com_deprecated_a_v1.9.0.txt:2 -// cmd/go/testdata/mod/example.com_undeprecated_v1.0.0.txt:2 // cmd/go/testdata/mod/example.com_deprecated_b_v1.9.0.txt:2 -// encoding/csv/reader.go:2 -// encoding/json/decode.go:1 -// encoding/json/encode.go:1 -// cmd/go/testdata/script/mod_list_deprecated.txt:2 +// cmd/go/testdata/mod/example.com_undeprecated_v1.0.0.txt:2 // cmd/go/testdata/script/mod_deprecate_message.txt:4 -// cmd/go/testdata/script/mod_list_deprecated_replace.txt:1 -// runtime/cpuprof.go:1 // cmd/go/testdata/script/mod_edit.txt:1 -// crypto/tls/common.go:6 -// crypto/rc4/rc4.go:1 +// cmd/go/testdata/script/mod_list_deprecated.txt:2 +// cmd/go/testdata/script/mod_list_deprecated_replace.txt:1 +// cmd/internal/obj/link.go:5 +// cmd/internal/obj/textflag.go:1 +// cmd/vendor/golang.org/x/mod/modfile/rule.go:2 +// cmd/vendor/golang.org/x/mod/semver/semver.go:1 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_darwin_amd64.go:1 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_darwin_arm64.go:1 +// cmd/vendor/golang.org/x/sys/windows/security_windows.go:1 +// cmd/vendor/golang.org/x/sys/windows/syscall_windows.go:2 +// compress/flate/inflate.go:2 // crypto/dsa/dsa.go:1 -// path/filepath/path_unix.go:1 -// path/filepath/path_windows.go:1 -// path/filepath/path_plan9.go:1 -// regexp/regexp.go:1 +// crypto/rc4/rc4.go:1 +// crypto/tls/common.go:7 +// crypto/x509/cert_pool.go:1 // crypto/x509/pem_decrypt.go:3 // crypto/x509/x509.go:1 -// archive/zip/struct.go:6 -// archive/tar/common.go:2 +// database/sql/driver/driver.go:6 // debug/gosym/pclntab.go:2 -// compress/flate/inflate.go:2 +// encoding/csv/reader.go:2 +// encoding/json/decode.go:1 +// encoding/json/encode.go:1 +// go/doc/doc.go:1 +// go/importer/importer.go:2 +// go/types/errorcodes.go:1 +// go/types/interface.go:2 +// go/types/signature.go:1 // image/geom.go:2 // image/jpeg/reader.go:1 -// os/file.go:1 // net/dial.go:2 -// net/http/server.go:2 -// net/http/socks_bundle.go:1 +// net/http/httptest/recorder.go:1 // net/http/httputil/persist.go:8 // net/http/request.go:6 +// net/http/server.go:2 +// net/http/socks_bundle.go:1 // net/http/transport.go:3 -// net/http/httptest/recorder.go:1 -// go/doc/doc.go:1 -// go/types/errorcodes.go:1 -// go/types/type.go:2 -// database/sql/driver/driver.go:6 +// net/net.go:1 +// os/file.go:1 +// path/filepath/path_plan9.go:1 +// path/filepath/path_unix.go:1 +// path/filepath/path_windows.go:1 +// reflect/value.go:1 +// regexp/regexp.go:1 +// runtime/cpuprof.go:1 +// strings/strings.go:1 +// syscall/bpf_bsd.go:18 +// syscall/bpf_darwin.go:18 +// syscall/dll_windows.go:6 +// syscall/exec_plan9.go:1 +// syscall/exec_unix.go:1 +// syscall/lsf_linux.go:6 +// syscall/route_bsd.go:7 +// syscall/route_darwin.go:1 +// syscall/route_dragonfly.go:2 +// syscall/route_freebsd.go:2 +// syscall/route_netbsd.go:1 +// syscall/route_openbsd.go:1 +// syscall/syscall.go:3 +// syscall/syscall_windows.go:6 // text/template/parse/node.go:5 -// go/importer/importer.go:2 +// vendor/golang.org/x/crypto/curve25519/curve25519.go:1 +// vendor/golang.org/x/text/transform/transform.go:1 diff --git a/vendor/honnef.co/go/tools/knowledge/doc.go b/vendor/honnef.co/go/tools/knowledge/doc.go new file mode 100644 index 000000000..72ff52444 --- /dev/null +++ b/vendor/honnef.co/go/tools/knowledge/doc.go @@ -0,0 +1,2 @@ +// Package knowledge contains manually collected information about Go APIs. +package knowledge diff --git a/vendor/honnef.co/go/tools/pattern/convert.go b/vendor/honnef.co/go/tools/pattern/convert.go index dfcd1560d..bfbd07831 100644 --- a/vendor/honnef.co/go/tools/pattern/convert.go +++ b/vendor/honnef.co/go/tools/pattern/convert.go @@ -6,6 +6,8 @@ import ( "go/token" "go/types" "reflect" + + "golang.org/x/exp/typeparams" ) var astTypes = map[string]reflect.Type{ @@ -13,6 +15,7 @@ var astTypes = map[string]reflect.Type{ "RangeStmt": reflect.TypeOf(ast.RangeStmt{}), "AssignStmt": reflect.TypeOf(ast.AssignStmt{}), "IndexExpr": reflect.TypeOf(ast.IndexExpr{}), + "IndexListExpr": reflect.TypeOf(typeparams.IndexListExpr{}), "Ident": reflect.TypeOf(ast.Ident{}), "ValueSpec": reflect.TypeOf(ast.ValueSpec{}), "GenDecl": reflect.TypeOf(ast.GenDecl{}), diff --git a/vendor/honnef.co/go/tools/pattern/fuzz.go b/vendor/honnef.co/go/tools/pattern/fuzz.go index 52e7df974..2afbb5242 100644 --- a/vendor/honnef.co/go/tools/pattern/fuzz.go +++ b/vendor/honnef.co/go/tools/pattern/fuzz.go @@ -1,3 +1,4 @@ +//go:build gofuzz // +build gofuzz package pattern diff --git a/vendor/honnef.co/go/tools/pattern/match.go b/vendor/honnef.co/go/tools/pattern/match.go index ebbbdd469..4d939b2c6 100644 --- a/vendor/honnef.co/go/tools/pattern/match.go +++ b/vendor/honnef.co/go/tools/pattern/match.go @@ -6,6 +6,8 @@ import ( "go/token" "go/types" "reflect" + + "golang.org/x/exp/typeparams" ) var tokensByString = map[string]Token{ @@ -428,6 +430,8 @@ func (s String) Match(m *Matcher, node interface{}) (interface{}, bool) { return nil, false case string: return o, string(s) == o + case types.TypeAndValue: + return o, o.Value != nil && o.Value.String() == string(s) default: return nil, false } @@ -474,31 +478,53 @@ func (fn Function) Match(m *Matcher, node interface{}) (interface{}, bool) { var name string var obj types.Object - r, ok := match(m, Or{Nodes: []Node{Ident{Any{}}, SelectorExpr{Any{}, Any{}}}}, node) + base := []Node{ + Ident{Any{}}, + SelectorExpr{Any{}, Any{}}, + } + p := Or{ + Nodes: append(base, + IndexExpr{Or{Nodes: base}, Any{}}, + IndexListExpr{Or{Nodes: base}, Any{}})} + + r, ok := match(m, p, node) if !ok { return nil, false } - switch r := r.(type) { + fun := r + switch idx := fun.(type) { + case *ast.IndexExpr: + fun = idx.X + case *typeparams.IndexListExpr: + fun = idx.X + } + + switch fun := fun.(type) { case *ast.Ident: - obj = m.TypesInfo.ObjectOf(r) + obj = m.TypesInfo.ObjectOf(fun) switch obj := obj.(type) { case *types.Func: // OPT(dh): optimize this similar to code.FuncName name = obj.FullName() case *types.Builtin: name = obj.Name() + case *types.TypeName: + name = types.TypeString(obj.Type(), nil) default: return nil, false } case *ast.SelectorExpr: - var ok bool - obj, ok = m.TypesInfo.ObjectOf(r.Sel).(*types.Func) - if !ok { + obj = m.TypesInfo.ObjectOf(fun.Sel) + switch obj := obj.(type) { + case *types.Func: + // OPT(dh): optimize this similar to code.FuncName + name = obj.FullName() + case *types.TypeName: + name = types.TypeString(obj.Type(), nil) + default: return nil, false } - // OPT(dh): optimize this similar to code.FuncName - name = obj.(*types.Func).FullName() default: panic("unreachable") } @@ -525,6 +551,51 @@ func (not Not) Match(m *Matcher, node interface{}) (interface{}, bool) { return node, true } +var integerLiteralQ = MustParse(`(Or (BasicLit "INT" _) (UnaryExpr (Or "+" "-") (IntegerLiteral _)))`) + +func (lit IntegerLiteral) Match(m *Matcher, node interface{}) (interface{}, bool) { + matched, ok := match(m, integerLiteralQ.Root, node) + if !ok { + return nil, false + } + tv, ok := m.TypesInfo.Types[matched.(ast.Expr)] + if !ok { + return nil, false + } + if tv.Value == nil { + return nil, false + } + _, ok = match(m, lit.Value, tv) + return matched, ok +} + +func (texpr TrulyConstantExpression) Match(m *Matcher, node interface{}) (interface{}, bool) { + expr, ok := node.(ast.Expr) + if !ok { + return nil, false + } + tv, ok := m.TypesInfo.Types[expr] + if !ok { + return nil, false + } + if tv.Value == nil { + return nil, false + } + truly := true + ast.Inspect(expr, func(node ast.Node) bool { + if _, ok := node.(*ast.Ident); ok { + truly = false + return false + } + return true + }) + if !truly { + return nil, false + } + _, ok = match(m, texpr.Value, tv) + return expr, ok +} + var ( // Types of fields in go/ast structs that we want to skip rtTokPos = reflect.TypeOf(token.Pos(0)) @@ -544,4 +615,6 @@ var ( _ matcher = Function{} _ matcher = Or{} _ matcher = Not{} + _ matcher = IntegerLiteral{} + _ matcher = TrulyConstantExpression{} ) diff --git a/vendor/honnef.co/go/tools/pattern/parser.go b/vendor/honnef.co/go/tools/pattern/parser.go index 68cc54b2e..a21f79640 100644 --- a/vendor/honnef.co/go/tools/pattern/parser.go +++ b/vendor/honnef.co/go/tools/pattern/parser.go @@ -92,54 +92,56 @@ var allTypes = []reflect.Type{ } var nodeToASTTypes = map[reflect.Type][]reflect.Type{ - reflect.TypeOf(String("")): nil, - reflect.TypeOf(Token(0)): nil, - reflect.TypeOf(List{}): {reflect.TypeOf((*ast.BlockStmt)(nil)), reflect.TypeOf((*ast.FieldList)(nil))}, - reflect.TypeOf(Builtin{}): {reflect.TypeOf((*ast.Ident)(nil))}, - reflect.TypeOf(Object{}): {reflect.TypeOf((*ast.Ident)(nil))}, - reflect.TypeOf(Function{}): {reflect.TypeOf((*ast.Ident)(nil)), reflect.TypeOf((*ast.SelectorExpr)(nil))}, - reflect.TypeOf(Any{}): allTypes, - reflect.TypeOf(RangeStmt{}): {reflect.TypeOf((*ast.RangeStmt)(nil))}, - reflect.TypeOf(AssignStmt{}): {reflect.TypeOf((*ast.AssignStmt)(nil))}, - reflect.TypeOf(IndexExpr{}): {reflect.TypeOf((*ast.IndexExpr)(nil))}, - reflect.TypeOf(Ident{}): {reflect.TypeOf((*ast.Ident)(nil))}, - reflect.TypeOf(ValueSpec{}): {reflect.TypeOf((*ast.ValueSpec)(nil))}, - reflect.TypeOf(GenDecl{}): {reflect.TypeOf((*ast.GenDecl)(nil))}, - reflect.TypeOf(BinaryExpr{}): {reflect.TypeOf((*ast.BinaryExpr)(nil))}, - reflect.TypeOf(ForStmt{}): {reflect.TypeOf((*ast.ForStmt)(nil))}, - reflect.TypeOf(ArrayType{}): {reflect.TypeOf((*ast.ArrayType)(nil))}, - reflect.TypeOf(DeferStmt{}): {reflect.TypeOf((*ast.DeferStmt)(nil))}, - reflect.TypeOf(MapType{}): {reflect.TypeOf((*ast.MapType)(nil))}, - reflect.TypeOf(ReturnStmt{}): {reflect.TypeOf((*ast.ReturnStmt)(nil))}, - reflect.TypeOf(SliceExpr{}): {reflect.TypeOf((*ast.SliceExpr)(nil))}, - reflect.TypeOf(StarExpr{}): {reflect.TypeOf((*ast.StarExpr)(nil))}, - reflect.TypeOf(UnaryExpr{}): {reflect.TypeOf((*ast.UnaryExpr)(nil))}, - reflect.TypeOf(SendStmt{}): {reflect.TypeOf((*ast.SendStmt)(nil))}, - reflect.TypeOf(SelectStmt{}): {reflect.TypeOf((*ast.SelectStmt)(nil))}, - reflect.TypeOf(ImportSpec{}): {reflect.TypeOf((*ast.ImportSpec)(nil))}, - reflect.TypeOf(IfStmt{}): {reflect.TypeOf((*ast.IfStmt)(nil))}, - reflect.TypeOf(GoStmt{}): {reflect.TypeOf((*ast.GoStmt)(nil))}, - reflect.TypeOf(Field{}): {reflect.TypeOf((*ast.Field)(nil))}, - reflect.TypeOf(SelectorExpr{}): {reflect.TypeOf((*ast.SelectorExpr)(nil))}, - reflect.TypeOf(StructType{}): {reflect.TypeOf((*ast.StructType)(nil))}, - reflect.TypeOf(KeyValueExpr{}): {reflect.TypeOf((*ast.KeyValueExpr)(nil))}, - reflect.TypeOf(FuncType{}): {reflect.TypeOf((*ast.FuncType)(nil))}, - reflect.TypeOf(FuncLit{}): {reflect.TypeOf((*ast.FuncLit)(nil))}, - reflect.TypeOf(FuncDecl{}): {reflect.TypeOf((*ast.FuncDecl)(nil))}, - reflect.TypeOf(ChanType{}): {reflect.TypeOf((*ast.ChanType)(nil))}, - reflect.TypeOf(CallExpr{}): {reflect.TypeOf((*ast.CallExpr)(nil))}, - reflect.TypeOf(CaseClause{}): {reflect.TypeOf((*ast.CaseClause)(nil))}, - reflect.TypeOf(CommClause{}): {reflect.TypeOf((*ast.CommClause)(nil))}, - reflect.TypeOf(CompositeLit{}): {reflect.TypeOf((*ast.CompositeLit)(nil))}, - reflect.TypeOf(EmptyStmt{}): {reflect.TypeOf((*ast.EmptyStmt)(nil))}, - reflect.TypeOf(SwitchStmt{}): {reflect.TypeOf((*ast.SwitchStmt)(nil))}, - reflect.TypeOf(TypeSwitchStmt{}): {reflect.TypeOf((*ast.TypeSwitchStmt)(nil))}, - reflect.TypeOf(TypeAssertExpr{}): {reflect.TypeOf((*ast.TypeAssertExpr)(nil))}, - reflect.TypeOf(TypeSpec{}): {reflect.TypeOf((*ast.TypeSpec)(nil))}, - reflect.TypeOf(InterfaceType{}): {reflect.TypeOf((*ast.InterfaceType)(nil))}, - reflect.TypeOf(BranchStmt{}): {reflect.TypeOf((*ast.BranchStmt)(nil))}, - reflect.TypeOf(IncDecStmt{}): {reflect.TypeOf((*ast.IncDecStmt)(nil))}, - reflect.TypeOf(BasicLit{}): {reflect.TypeOf((*ast.BasicLit)(nil))}, + reflect.TypeOf(String("")): nil, + reflect.TypeOf(Token(0)): nil, + reflect.TypeOf(List{}): {reflect.TypeOf((*ast.BlockStmt)(nil)), reflect.TypeOf((*ast.FieldList)(nil))}, + reflect.TypeOf(Builtin{}): {reflect.TypeOf((*ast.Ident)(nil))}, + reflect.TypeOf(Object{}): {reflect.TypeOf((*ast.Ident)(nil))}, + reflect.TypeOf(Function{}): {reflect.TypeOf((*ast.Ident)(nil)), reflect.TypeOf((*ast.SelectorExpr)(nil))}, + reflect.TypeOf(Any{}): allTypes, + reflect.TypeOf(RangeStmt{}): {reflect.TypeOf((*ast.RangeStmt)(nil))}, + reflect.TypeOf(AssignStmt{}): {reflect.TypeOf((*ast.AssignStmt)(nil))}, + reflect.TypeOf(IndexExpr{}): {reflect.TypeOf((*ast.IndexExpr)(nil))}, + reflect.TypeOf(Ident{}): {reflect.TypeOf((*ast.Ident)(nil))}, + reflect.TypeOf(ValueSpec{}): {reflect.TypeOf((*ast.ValueSpec)(nil))}, + reflect.TypeOf(GenDecl{}): {reflect.TypeOf((*ast.GenDecl)(nil))}, + reflect.TypeOf(BinaryExpr{}): {reflect.TypeOf((*ast.BinaryExpr)(nil))}, + reflect.TypeOf(ForStmt{}): {reflect.TypeOf((*ast.ForStmt)(nil))}, + reflect.TypeOf(ArrayType{}): {reflect.TypeOf((*ast.ArrayType)(nil))}, + reflect.TypeOf(DeferStmt{}): {reflect.TypeOf((*ast.DeferStmt)(nil))}, + reflect.TypeOf(MapType{}): {reflect.TypeOf((*ast.MapType)(nil))}, + reflect.TypeOf(ReturnStmt{}): {reflect.TypeOf((*ast.ReturnStmt)(nil))}, + reflect.TypeOf(SliceExpr{}): {reflect.TypeOf((*ast.SliceExpr)(nil))}, + reflect.TypeOf(StarExpr{}): {reflect.TypeOf((*ast.StarExpr)(nil))}, + reflect.TypeOf(UnaryExpr{}): {reflect.TypeOf((*ast.UnaryExpr)(nil))}, + reflect.TypeOf(SendStmt{}): {reflect.TypeOf((*ast.SendStmt)(nil))}, + reflect.TypeOf(SelectStmt{}): {reflect.TypeOf((*ast.SelectStmt)(nil))}, + reflect.TypeOf(ImportSpec{}): {reflect.TypeOf((*ast.ImportSpec)(nil))}, + reflect.TypeOf(IfStmt{}): {reflect.TypeOf((*ast.IfStmt)(nil))}, + reflect.TypeOf(GoStmt{}): {reflect.TypeOf((*ast.GoStmt)(nil))}, + reflect.TypeOf(Field{}): {reflect.TypeOf((*ast.Field)(nil))}, + reflect.TypeOf(SelectorExpr{}): {reflect.TypeOf((*ast.SelectorExpr)(nil))}, + reflect.TypeOf(StructType{}): {reflect.TypeOf((*ast.StructType)(nil))}, + reflect.TypeOf(KeyValueExpr{}): {reflect.TypeOf((*ast.KeyValueExpr)(nil))}, + reflect.TypeOf(FuncType{}): {reflect.TypeOf((*ast.FuncType)(nil))}, + reflect.TypeOf(FuncLit{}): {reflect.TypeOf((*ast.FuncLit)(nil))}, + reflect.TypeOf(FuncDecl{}): {reflect.TypeOf((*ast.FuncDecl)(nil))}, + reflect.TypeOf(ChanType{}): {reflect.TypeOf((*ast.ChanType)(nil))}, + reflect.TypeOf(CallExpr{}): {reflect.TypeOf((*ast.CallExpr)(nil))}, + reflect.TypeOf(CaseClause{}): {reflect.TypeOf((*ast.CaseClause)(nil))}, + reflect.TypeOf(CommClause{}): {reflect.TypeOf((*ast.CommClause)(nil))}, + reflect.TypeOf(CompositeLit{}): {reflect.TypeOf((*ast.CompositeLit)(nil))}, + reflect.TypeOf(EmptyStmt{}): {reflect.TypeOf((*ast.EmptyStmt)(nil))}, + reflect.TypeOf(SwitchStmt{}): {reflect.TypeOf((*ast.SwitchStmt)(nil))}, + reflect.TypeOf(TypeSwitchStmt{}): {reflect.TypeOf((*ast.TypeSwitchStmt)(nil))}, + reflect.TypeOf(TypeAssertExpr{}): {reflect.TypeOf((*ast.TypeAssertExpr)(nil))}, + reflect.TypeOf(TypeSpec{}): {reflect.TypeOf((*ast.TypeSpec)(nil))}, + reflect.TypeOf(InterfaceType{}): {reflect.TypeOf((*ast.InterfaceType)(nil))}, + reflect.TypeOf(BranchStmt{}): {reflect.TypeOf((*ast.BranchStmt)(nil))}, + reflect.TypeOf(IncDecStmt{}): {reflect.TypeOf((*ast.IncDecStmt)(nil))}, + reflect.TypeOf(BasicLit{}): {reflect.TypeOf((*ast.BasicLit)(nil))}, + reflect.TypeOf(IntegerLiteral{}): {reflect.TypeOf((*ast.BasicLit)(nil)), reflect.TypeOf((*ast.UnaryExpr)(nil))}, + reflect.TypeOf(TrulyConstantExpression{}): allTypes, // this is an over-approximation, which is fine } var requiresTypeInfo = map[string]bool{ @@ -306,56 +308,58 @@ func (p *Parser) populateNode(typ string, objs []Node) (Node, error) { } var structNodes = map[string]reflect.Type{ - "Any": reflect.TypeOf(Any{}), - "Ellipsis": reflect.TypeOf(Ellipsis{}), - "List": reflect.TypeOf(List{}), - "Binding": reflect.TypeOf(Binding{}), - "RangeStmt": reflect.TypeOf(RangeStmt{}), - "AssignStmt": reflect.TypeOf(AssignStmt{}), - "IndexExpr": reflect.TypeOf(IndexExpr{}), - "Ident": reflect.TypeOf(Ident{}), - "Builtin": reflect.TypeOf(Builtin{}), - "ValueSpec": reflect.TypeOf(ValueSpec{}), - "GenDecl": reflect.TypeOf(GenDecl{}), - "BinaryExpr": reflect.TypeOf(BinaryExpr{}), - "ForStmt": reflect.TypeOf(ForStmt{}), - "ArrayType": reflect.TypeOf(ArrayType{}), - "DeferStmt": reflect.TypeOf(DeferStmt{}), - "MapType": reflect.TypeOf(MapType{}), - "ReturnStmt": reflect.TypeOf(ReturnStmt{}), - "SliceExpr": reflect.TypeOf(SliceExpr{}), - "StarExpr": reflect.TypeOf(StarExpr{}), - "UnaryExpr": reflect.TypeOf(UnaryExpr{}), - "SendStmt": reflect.TypeOf(SendStmt{}), - "SelectStmt": reflect.TypeOf(SelectStmt{}), - "ImportSpec": reflect.TypeOf(ImportSpec{}), - "IfStmt": reflect.TypeOf(IfStmt{}), - "GoStmt": reflect.TypeOf(GoStmt{}), - "Field": reflect.TypeOf(Field{}), - "SelectorExpr": reflect.TypeOf(SelectorExpr{}), - "StructType": reflect.TypeOf(StructType{}), - "KeyValueExpr": reflect.TypeOf(KeyValueExpr{}), - "FuncType": reflect.TypeOf(FuncType{}), - "FuncLit": reflect.TypeOf(FuncLit{}), - "FuncDecl": reflect.TypeOf(FuncDecl{}), - "ChanType": reflect.TypeOf(ChanType{}), - "CallExpr": reflect.TypeOf(CallExpr{}), - "CaseClause": reflect.TypeOf(CaseClause{}), - "CommClause": reflect.TypeOf(CommClause{}), - "CompositeLit": reflect.TypeOf(CompositeLit{}), - "EmptyStmt": reflect.TypeOf(EmptyStmt{}), - "SwitchStmt": reflect.TypeOf(SwitchStmt{}), - "TypeSwitchStmt": reflect.TypeOf(TypeSwitchStmt{}), - "TypeAssertExpr": reflect.TypeOf(TypeAssertExpr{}), - "TypeSpec": reflect.TypeOf(TypeSpec{}), - "InterfaceType": reflect.TypeOf(InterfaceType{}), - "BranchStmt": reflect.TypeOf(BranchStmt{}), - "IncDecStmt": reflect.TypeOf(IncDecStmt{}), - "BasicLit": reflect.TypeOf(BasicLit{}), - "Object": reflect.TypeOf(Object{}), - "Function": reflect.TypeOf(Function{}), - "Or": reflect.TypeOf(Or{}), - "Not": reflect.TypeOf(Not{}), + "Any": reflect.TypeOf(Any{}), + "Ellipsis": reflect.TypeOf(Ellipsis{}), + "List": reflect.TypeOf(List{}), + "Binding": reflect.TypeOf(Binding{}), + "RangeStmt": reflect.TypeOf(RangeStmt{}), + "AssignStmt": reflect.TypeOf(AssignStmt{}), + "IndexExpr": reflect.TypeOf(IndexExpr{}), + "Ident": reflect.TypeOf(Ident{}), + "Builtin": reflect.TypeOf(Builtin{}), + "ValueSpec": reflect.TypeOf(ValueSpec{}), + "GenDecl": reflect.TypeOf(GenDecl{}), + "BinaryExpr": reflect.TypeOf(BinaryExpr{}), + "ForStmt": reflect.TypeOf(ForStmt{}), + "ArrayType": reflect.TypeOf(ArrayType{}), + "DeferStmt": reflect.TypeOf(DeferStmt{}), + "MapType": reflect.TypeOf(MapType{}), + "ReturnStmt": reflect.TypeOf(ReturnStmt{}), + "SliceExpr": reflect.TypeOf(SliceExpr{}), + "StarExpr": reflect.TypeOf(StarExpr{}), + "UnaryExpr": reflect.TypeOf(UnaryExpr{}), + "SendStmt": reflect.TypeOf(SendStmt{}), + "SelectStmt": reflect.TypeOf(SelectStmt{}), + "ImportSpec": reflect.TypeOf(ImportSpec{}), + "IfStmt": reflect.TypeOf(IfStmt{}), + "GoStmt": reflect.TypeOf(GoStmt{}), + "Field": reflect.TypeOf(Field{}), + "SelectorExpr": reflect.TypeOf(SelectorExpr{}), + "StructType": reflect.TypeOf(StructType{}), + "KeyValueExpr": reflect.TypeOf(KeyValueExpr{}), + "FuncType": reflect.TypeOf(FuncType{}), + "FuncLit": reflect.TypeOf(FuncLit{}), + "FuncDecl": reflect.TypeOf(FuncDecl{}), + "ChanType": reflect.TypeOf(ChanType{}), + "CallExpr": reflect.TypeOf(CallExpr{}), + "CaseClause": reflect.TypeOf(CaseClause{}), + "CommClause": reflect.TypeOf(CommClause{}), + "CompositeLit": reflect.TypeOf(CompositeLit{}), + "EmptyStmt": reflect.TypeOf(EmptyStmt{}), + "SwitchStmt": reflect.TypeOf(SwitchStmt{}), + "TypeSwitchStmt": reflect.TypeOf(TypeSwitchStmt{}), + "TypeAssertExpr": reflect.TypeOf(TypeAssertExpr{}), + "TypeSpec": reflect.TypeOf(TypeSpec{}), + "InterfaceType": reflect.TypeOf(InterfaceType{}), + "BranchStmt": reflect.TypeOf(BranchStmt{}), + "IncDecStmt": reflect.TypeOf(IncDecStmt{}), + "BasicLit": reflect.TypeOf(BasicLit{}), + "Object": reflect.TypeOf(Object{}), + "Function": reflect.TypeOf(Function{}), + "Or": reflect.TypeOf(Or{}), + "Not": reflect.TypeOf(Not{}), + "IntegerLiteral": reflect.TypeOf(IntegerLiteral{}), + "TrulyConstantExpression": reflect.TypeOf(TrulyConstantExpression{}), } func (p *Parser) object() (Node, error) { diff --git a/vendor/honnef.co/go/tools/pattern/pattern.go b/vendor/honnef.co/go/tools/pattern/pattern.go index e39de3a82..abb8a425f 100644 --- a/vendor/honnef.co/go/tools/pattern/pattern.go +++ b/vendor/honnef.co/go/tools/pattern/pattern.go @@ -13,6 +13,7 @@ var ( _ Node = RangeStmt{} _ Node = AssignStmt{} _ Node = IndexExpr{} + _ Node = IndexListExpr{} _ Node = Ident{} _ Node = Builtin{} _ Node = String("") @@ -61,6 +62,8 @@ var ( _ Node = Function{} _ Node = Not{} _ Node = Or{} + _ Node = IntegerLiteral{} + _ Node = TrulyConstantExpression{} ) type Function struct { @@ -263,6 +266,11 @@ type IndexExpr struct { Index Node } +type IndexListExpr struct { + X Node + Indices Node +} + type Node interface { String() string isNode() @@ -305,6 +313,12 @@ type BasicLit struct { Value Node } +// An IntegerLiteral is a constant expression made up of only integer basic literals and the "+" and "-" unary operators. +// That is, 0, -4, -+42 are all integer literals, but 1 + 2 is not. +type IntegerLiteral struct { + Value Node +} + type BinaryExpr struct { X Node Op Node @@ -326,6 +340,12 @@ type Not struct { Node Node } +// A TrulyConstantExpression is a constant expression that does not make use of any identifiers. +// It is constant even under varying build tags. +type TrulyConstantExpression struct { + Value Node +} + func stringify(n Node) string { v := reflect.ValueOf(n) var parts []string @@ -336,53 +356,56 @@ func stringify(n Node) string { return "(" + strings.Join(parts, " ") + ")" } -func (stmt AssignStmt) String() string { return stringify(stmt) } -func (expr IndexExpr) String() string { return stringify(expr) } -func (id Ident) String() string { return stringify(id) } -func (spec ValueSpec) String() string { return stringify(spec) } -func (decl GenDecl) String() string { return stringify(decl) } -func (lit BasicLit) String() string { return stringify(lit) } -func (expr BinaryExpr) String() string { return stringify(expr) } -func (stmt ForStmt) String() string { return stringify(stmt) } -func (stmt RangeStmt) String() string { return stringify(stmt) } -func (typ ArrayType) String() string { return stringify(typ) } -func (stmt DeferStmt) String() string { return stringify(stmt) } -func (typ MapType) String() string { return stringify(typ) } -func (stmt ReturnStmt) String() string { return stringify(stmt) } -func (expr SliceExpr) String() string { return stringify(expr) } -func (expr StarExpr) String() string { return stringify(expr) } -func (expr UnaryExpr) String() string { return stringify(expr) } -func (stmt SendStmt) String() string { return stringify(stmt) } -func (spec ImportSpec) String() string { return stringify(spec) } -func (stmt SelectStmt) String() string { return stringify(stmt) } -func (stmt IfStmt) String() string { return stringify(stmt) } -func (stmt IncDecStmt) String() string { return stringify(stmt) } -func (stmt GoStmt) String() string { return stringify(stmt) } -func (field Field) String() string { return stringify(field) } -func (expr SelectorExpr) String() string { return stringify(expr) } -func (typ StructType) String() string { return stringify(typ) } -func (expr KeyValueExpr) String() string { return stringify(expr) } -func (typ FuncType) String() string { return stringify(typ) } -func (lit FuncLit) String() string { return stringify(lit) } -func (decl FuncDecl) String() string { return stringify(decl) } -func (stmt BranchStmt) String() string { return stringify(stmt) } -func (expr CallExpr) String() string { return stringify(expr) } -func (clause CaseClause) String() string { return stringify(clause) } -func (typ ChanType) String() string { return stringify(typ) } -func (clause CommClause) String() string { return stringify(clause) } -func (lit CompositeLit) String() string { return stringify(lit) } -func (stmt EmptyStmt) String() string { return stringify(stmt) } -func (typ InterfaceType) String() string { return stringify(typ) } -func (stmt SwitchStmt) String() string { return stringify(stmt) } -func (expr TypeAssertExpr) String() string { return stringify(expr) } -func (spec TypeSpec) String() string { return stringify(spec) } -func (stmt TypeSwitchStmt) String() string { return stringify(stmt) } -func (nil Nil) String() string { return "nil" } -func (builtin Builtin) String() string { return stringify(builtin) } -func (obj Object) String() string { return stringify(obj) } -func (fn Function) String() string { return stringify(fn) } -func (el Ellipsis) String() string { return stringify(el) } -func (not Not) String() string { return stringify(not) } +func (stmt AssignStmt) String() string { return stringify(stmt) } +func (expr IndexExpr) String() string { return stringify(expr) } +func (expr IndexListExpr) String() string { return stringify(expr) } +func (id Ident) String() string { return stringify(id) } +func (spec ValueSpec) String() string { return stringify(spec) } +func (decl GenDecl) String() string { return stringify(decl) } +func (lit BasicLit) String() string { return stringify(lit) } +func (expr BinaryExpr) String() string { return stringify(expr) } +func (stmt ForStmt) String() string { return stringify(stmt) } +func (stmt RangeStmt) String() string { return stringify(stmt) } +func (typ ArrayType) String() string { return stringify(typ) } +func (stmt DeferStmt) String() string { return stringify(stmt) } +func (typ MapType) String() string { return stringify(typ) } +func (stmt ReturnStmt) String() string { return stringify(stmt) } +func (expr SliceExpr) String() string { return stringify(expr) } +func (expr StarExpr) String() string { return stringify(expr) } +func (expr UnaryExpr) String() string { return stringify(expr) } +func (stmt SendStmt) String() string { return stringify(stmt) } +func (spec ImportSpec) String() string { return stringify(spec) } +func (stmt SelectStmt) String() string { return stringify(stmt) } +func (stmt IfStmt) String() string { return stringify(stmt) } +func (stmt IncDecStmt) String() string { return stringify(stmt) } +func (stmt GoStmt) String() string { return stringify(stmt) } +func (field Field) String() string { return stringify(field) } +func (expr SelectorExpr) String() string { return stringify(expr) } +func (typ StructType) String() string { return stringify(typ) } +func (expr KeyValueExpr) String() string { return stringify(expr) } +func (typ FuncType) String() string { return stringify(typ) } +func (lit FuncLit) String() string { return stringify(lit) } +func (decl FuncDecl) String() string { return stringify(decl) } +func (stmt BranchStmt) String() string { return stringify(stmt) } +func (expr CallExpr) String() string { return stringify(expr) } +func (clause CaseClause) String() string { return stringify(clause) } +func (typ ChanType) String() string { return stringify(typ) } +func (clause CommClause) String() string { return stringify(clause) } +func (lit CompositeLit) String() string { return stringify(lit) } +func (stmt EmptyStmt) String() string { return stringify(stmt) } +func (typ InterfaceType) String() string { return stringify(typ) } +func (stmt SwitchStmt) String() string { return stringify(stmt) } +func (expr TypeAssertExpr) String() string { return stringify(expr) } +func (spec TypeSpec) String() string { return stringify(spec) } +func (stmt TypeSwitchStmt) String() string { return stringify(stmt) } +func (nil Nil) String() string { return "nil" } +func (builtin Builtin) String() string { return stringify(builtin) } +func (obj Object) String() string { return stringify(obj) } +func (fn Function) String() string { return stringify(fn) } +func (el Ellipsis) String() string { return stringify(el) } +func (not Not) String() string { return stringify(not) } +func (lit IntegerLiteral) String() string { return stringify(lit) } +func (expr TrulyConstantExpression) String() string { return stringify(expr) } func (or Or) String() string { s := "(Or" @@ -441,56 +464,59 @@ func (tok Token) String() string { func (Any) String() string { return "_" } -func (AssignStmt) isNode() {} -func (IndexExpr) isNode() {} -func (Ident) isNode() {} -func (ValueSpec) isNode() {} -func (GenDecl) isNode() {} -func (BasicLit) isNode() {} -func (BinaryExpr) isNode() {} -func (ForStmt) isNode() {} -func (RangeStmt) isNode() {} -func (ArrayType) isNode() {} -func (DeferStmt) isNode() {} -func (MapType) isNode() {} -func (ReturnStmt) isNode() {} -func (SliceExpr) isNode() {} -func (StarExpr) isNode() {} -func (UnaryExpr) isNode() {} -func (SendStmt) isNode() {} -func (ImportSpec) isNode() {} -func (SelectStmt) isNode() {} -func (IfStmt) isNode() {} -func (IncDecStmt) isNode() {} -func (GoStmt) isNode() {} -func (Field) isNode() {} -func (SelectorExpr) isNode() {} -func (StructType) isNode() {} -func (KeyValueExpr) isNode() {} -func (FuncType) isNode() {} -func (FuncLit) isNode() {} -func (FuncDecl) isNode() {} -func (BranchStmt) isNode() {} -func (CallExpr) isNode() {} -func (CaseClause) isNode() {} -func (ChanType) isNode() {} -func (CommClause) isNode() {} -func (CompositeLit) isNode() {} -func (EmptyStmt) isNode() {} -func (InterfaceType) isNode() {} -func (SwitchStmt) isNode() {} -func (TypeAssertExpr) isNode() {} -func (TypeSpec) isNode() {} -func (TypeSwitchStmt) isNode() {} -func (Nil) isNode() {} -func (Builtin) isNode() {} -func (Object) isNode() {} -func (Function) isNode() {} -func (Ellipsis) isNode() {} -func (Or) isNode() {} -func (List) isNode() {} -func (String) isNode() {} -func (Token) isNode() {} -func (Any) isNode() {} -func (Binding) isNode() {} -func (Not) isNode() {} +func (AssignStmt) isNode() {} +func (IndexExpr) isNode() {} +func (IndexListExpr) isNode() {} +func (Ident) isNode() {} +func (ValueSpec) isNode() {} +func (GenDecl) isNode() {} +func (BasicLit) isNode() {} +func (BinaryExpr) isNode() {} +func (ForStmt) isNode() {} +func (RangeStmt) isNode() {} +func (ArrayType) isNode() {} +func (DeferStmt) isNode() {} +func (MapType) isNode() {} +func (ReturnStmt) isNode() {} +func (SliceExpr) isNode() {} +func (StarExpr) isNode() {} +func (UnaryExpr) isNode() {} +func (SendStmt) isNode() {} +func (ImportSpec) isNode() {} +func (SelectStmt) isNode() {} +func (IfStmt) isNode() {} +func (IncDecStmt) isNode() {} +func (GoStmt) isNode() {} +func (Field) isNode() {} +func (SelectorExpr) isNode() {} +func (StructType) isNode() {} +func (KeyValueExpr) isNode() {} +func (FuncType) isNode() {} +func (FuncLit) isNode() {} +func (FuncDecl) isNode() {} +func (BranchStmt) isNode() {} +func (CallExpr) isNode() {} +func (CaseClause) isNode() {} +func (ChanType) isNode() {} +func (CommClause) isNode() {} +func (CompositeLit) isNode() {} +func (EmptyStmt) isNode() {} +func (InterfaceType) isNode() {} +func (SwitchStmt) isNode() {} +func (TypeAssertExpr) isNode() {} +func (TypeSpec) isNode() {} +func (TypeSwitchStmt) isNode() {} +func (Nil) isNode() {} +func (Builtin) isNode() {} +func (Object) isNode() {} +func (Function) isNode() {} +func (Ellipsis) isNode() {} +func (Or) isNode() {} +func (List) isNode() {} +func (String) isNode() {} +func (Token) isNode() {} +func (Any) isNode() {} +func (Binding) isNode() {} +func (Not) isNode() {} +func (IntegerLiteral) isNode() {} +func (TrulyConstantExpression) isNode() {} diff --git a/vendor/honnef.co/go/tools/printf/fuzz.go b/vendor/honnef.co/go/tools/printf/fuzz.go index 8ebf357fb..41c34c631 100644 --- a/vendor/honnef.co/go/tools/printf/fuzz.go +++ b/vendor/honnef.co/go/tools/printf/fuzz.go @@ -1,3 +1,4 @@ +//go:build gofuzz // +build gofuzz package printf diff --git a/vendor/honnef.co/go/tools/simple/doc.go b/vendor/honnef.co/go/tools/simple/doc.go index c7546cf59..11cc70c3a 100644 --- a/vendor/honnef.co/go/tools/simple/doc.go +++ b/vendor/honnef.co/go/tools/simple/doc.go @@ -1,187 +1,157 @@ +// Package simple contains analyzes that simplify code. +// All suggestions made by these analyzes are intended to result in objectively simpler code, +// and following their advice is recommended. package simple import "honnef.co/go/tools/analysis/lint" -var Docs = lint.Markdownify(map[string]*lint.Documentation{ +var Docs = lint.Markdownify(map[string]*lint.RawDocumentation{ "S1000": { Title: `Use plain channel send or receive instead of single-case select`, Text: `Select statements with a single case can be replaced with a simple -send or receive. - -Before: - - select { - case x := <-ch: - fmt.Println(x) - } - -After: - - x := <-ch - fmt.Println(x)`, - Since: "2017.1", +send or receive.`, + Before: ` +select { +case x := <-ch: + fmt.Println(x) +}`, + After: ` +x := <-ch +fmt.Println(x) +`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1001": { Title: `Replace for loop with call to copy`, - Text: `Use \'copy()\' for copying elements from one slice to another. - -Before: - - for i, x := range src { - dst[i] = x - } - -After: - - copy(dst, src)`, + Text: ` +Use \'copy()\' for copying elements from one slice to another. For +arrays of identical size, you can use simple assignment.`, + Before: ` +for i, x := range src { + dst[i] = x +}`, + After: `copy(dst, src)`, Since: "2017.1", + // MergeIfAll because the types of src and dst might be different under different build tags. + // You shouldn't write code like that… + MergeIf: lint.MergeIfAll, }, "S1002": { - Title: `Omit comparison with boolean constant`, - Text: `Before: - - if x == true {} - -After: - - if x {}`, - Since: "2017.1", + Title: `Omit comparison with boolean constant`, + Before: `if x == true {}`, + After: `if x {}`, + Since: "2017.1", + // MergeIfAll because 'true' might not be the builtin constant under all build tags. + // You shouldn't write code like that… + MergeIf: lint.MergeIfAll, }, "S1003": { - Title: "Replace call to `strings.Index` with `strings.Contains`", - Text: `Before: - - if strings.Index(x, y) != -1 {} - -After: - - if strings.Contains(x, y) {}`, - Since: "2017.1", + Title: `Replace call to \'strings.Index\' with \'strings.Contains\'`, + Before: `if strings.Index(x, y) != -1 {}`, + After: `if strings.Contains(x, y) {}`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1004": { - Title: "Replace call to `bytes.Compare` with `bytes.Equal`", - Text: `Before: - - if bytes.Compare(x, y) == 0 {} - -After: - - if bytes.Equal(x, y) {}`, - Since: "2017.1", + Title: `Replace call to \'bytes.Compare\' with \'bytes.Equal\'`, + Before: `if bytes.Compare(x, y) == 0 {}`, + After: `if bytes.Equal(x, y) {}`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1005": { Title: `Drop unnecessary use of the blank identifier`, - Text: `In many cases, assigning to the blank identifier is unnecessary. - -Before: - - for _ = range s {} - x, _ = someMap[key] - _ = <-ch - -After: - - for range s{} - x = someMap[key] - <-ch`, - Since: "2017.1", + Text: `In many cases, assigning to the blank identifier is unnecessary.`, + Before: ` +for _ = range s {} +x, _ = someMap[key] +_ = <-ch`, + After: ` +for range s{} +x = someMap[key] +<-ch`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1006": { - Title: "Use `for { ... }` for infinite loops", - Text: `For infinite loops, using \'for { ... }\' is the most idiomatic choice.`, - Since: "2017.1", + Title: `Use \"for { ... }\" for infinite loops`, + Text: `For infinite loops, using \'for { ... }\' is the most idiomatic choice.`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1007": { Title: `Simplify regular expression by using raw string literal`, - Text: `Raw string literals use \' instead of " and do not support -any escape sequences. This means that the backslash (\'\\') can be used + Text: `Raw string literals use backticks instead of quotation marks and do not support +any escape sequences. This means that the backslash can be used freely, without the need of escaping. Since regular expressions have their own escape sequences, raw strings -can improve their readability. - -Before: - - regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z") - -After: - - regexp.Compile(` + "`" + `\A(\w+) profile: total \d+\n\z` + "`" + `)`, - Since: "2017.1", +can improve their readability.`, + Before: `regexp.Compile("\\A(\\w+) profile: total \\d+\\n\\z")`, + After: "regexp.Compile(`\\A(\\w+) profile: total \\d+\\n\\z`)", + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1008": { Title: `Simplify returning boolean expression`, - Text: `Before: - - if { - return true - } - return false - -After: - - return `, - Since: "2017.1", + Before: ` +if { + return true +} +return false`, + After: `return `, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1009": { Title: `Omit redundant nil check on slices`, Text: `The \'len\' function is defined for all slices, even nil ones, which have a length of zero. It is not necessary to check if a slice is not nil -before checking that its length is not zero. - -Before: - - if x != nil && len(x) != 0 {} - -After: - - if len(x) != 0 {}`, - Since: "2017.1", +before checking that its length is not zero.`, + Before: `if x != nil && len(x) != 0 {}`, + After: `if len(x) != 0 {}`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1010": { Title: `Omit default slice index`, Text: `When slicing, the second index defaults to the length of the value, making \'s[n:len(s)]\' and \'s[n:]\' equivalent.`, - Since: "2017.1", + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1011": { - Title: "Use a single `append` to concatenate two slices", - Text: `Before: - - for _, e := range y { - x = append(x, e) - } - -After: - - x = append(x, y...)`, + Title: `Use a single \'append\' to concatenate two slices`, + Before: ` +for _, e := range y { + x = append(x, e) +}`, + After: `x = append(x, y...)`, Since: "2017.1", + // MergeIfAll because y might not be a slice under all build tags. + MergeIf: lint.MergeIfAll, }, "S1012": { - Title: "Replace `time.Now().Sub(x)` with `time.Since(x)`", + Title: `Replace \'time.Now().Sub(x)\' with \'time.Since(x)\'`, Text: `The \'time.Since\' helper has the same effect as using \'time.Now().Sub(x)\' -but is easier to read. - -Before: - - time.Now().Sub(x) - -After: - - time.Since(x)`, - Since: "2017.1", +but is easier to read.`, + Before: `time.Now().Sub(x)`, + After: `time.Since(x)`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1016": { @@ -190,91 +160,78 @@ After: other. In older versions of Go, the fields had to have identical struct tags. Since Go 1.8, however, struct tags are ignored during conversions. It is thus not necessary to manually copy every field -individually. - -Before: - - var x T1 - y := T2{ - Field1: x.Field1, - Field2: x.Field2, - } - -After: - - var x T1 - y := T2(x)`, - Since: "2017.1", +individually.`, + Before: ` +var x T1 +y := T2{ + Field1: x.Field1, + Field2: x.Field2, +}`, + After: ` +var x T1 +y := T2(x)`, + Since: "2017.1", + MergeIf: lint.MergeIfAll, }, "S1017": { - Title: "Replace manual trimming with `strings.TrimPrefix`", + Title: `Replace manual trimming with \'strings.TrimPrefix\'`, Text: `Instead of using \'strings.HasPrefix\' and manual slicing, use the \'strings.TrimPrefix\' function. If the string doesn't start with the prefix, the original string will be returned. Using \'strings.TrimPrefix\' reduces complexity, and avoids common bugs, such as off-by-one -mistakes. - -Before: - - if strings.HasPrefix(str, prefix) { - str = str[len(prefix):] - } - -After: - - str = strings.TrimPrefix(str, prefix)`, - Since: "2017.1", +mistakes.`, + Before: ` +if strings.HasPrefix(str, prefix) { + str = str[len(prefix):] +}`, + After: `str = strings.TrimPrefix(str, prefix)`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1018": { - Title: "Use `copy` for sliding elements", + Title: `Use \"copy\" for sliding elements`, Text: `\'copy()\' permits using the same source and destination slice, even with overlapping ranges. This makes it ideal for sliding elements in a -slice. +slice.`, -Before: - - for i := 0; i < n; i++ { - bs[i] = bs[offset+i] - } - -After: - - copy(bs[:n], bs[offset:])`, - Since: "2017.1", + Before: ` +for i := 0; i < n; i++ { + bs[i] = bs[offset+i] +}`, + After: `copy(bs[:n], bs[offset:])`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1019": { - Title: "Simplify `make` call by omitting redundant arguments", - Text: `The \'make\' function has default values for the length and capacity -arguments. For channels, the length defaults to zero, and for slices, the capacity defaults to the length.`, + Title: `Simplify \"make\" call by omitting redundant arguments`, + Text: `The \"make\" function has default values for the length and capacity +arguments. For channels, the length defaults to zero, and for slices, +the capacity defaults to the length.`, Since: "2017.1", + // MergeIfAll because the type might be different under different build tags. + // You shouldn't write code like that… + MergeIf: lint.MergeIfAll, }, "S1020": { - Title: `Omit redundant nil check in type assertion`, - Text: `Before: - - if _, ok := i.(T); ok && i != nil {} - -After: - - if _, ok := i.(T); ok {}`, - Since: "2017.1", + Title: `Omit redundant nil check in type assertion`, + Before: `if _, ok := i.(T); ok && i != nil {}`, + After: `if _, ok := i.(T); ok {}`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1021": { Title: `Merge variable declaration and assignment`, - Text: `Before: - - var x uint - x = 1 - -After: - - var x uint = 1`, - Since: "2017.1", + Before: ` +var x uint +x = 1`, + After: `var x uint = 1`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1023": { @@ -285,26 +242,22 @@ the final statement of the function. Switches in Go do not have automatic fallthrough, unlike languages like C. It is not necessary to have a break statement as the final statement in a case block.`, - Since: "2017.1", + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1024": { - Title: "Replace `x.Sub(time.Now())` with `time.Until(x)`", + Title: `Replace \'x.Sub(time.Now())\' with \'time.Until(x)\'`, Text: `The \'time.Until\' helper has the same effect as using \'x.Sub(time.Now())\' -but is easier to read. - -Before: - - x.Sub(time.Now()) - -After: - - time.Until(x)`, - Since: "2017.1", +but is easier to read.`, + Before: `x.Sub(time.Now())`, + After: `time.Until(x)`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1025": { - Title: "Don't use `fmt.Sprintf(\"%s\", x)` unnecessarily", + Title: `Don't use \'fmt.Sprintf("%s", x)\' unnecessarily`, Text: `In many instances, there are easier and more efficient ways of getting a value's string representation. Whenever a value's underlying type is a string already, or the type has a String method, they should be used @@ -314,14 +267,14 @@ Given the following shared definitions type T1 string type T2 int - + func (T2) String() string { return "Hello, world" } - + var x string var y T1 var z T2 -we can simplify the following +we can simplify fmt.Sprintf("%s", x) fmt.Sprintf("%s", y) @@ -331,20 +284,18 @@ to x string(y) - z.String()`, - Since: "2017.1", + z.String() +`, + Since: "2017.1", + MergeIf: lint.MergeIfAll, }, "S1028": { - Title: "Simplify error construction with `fmt.Errorf`", - Text: `Before: - - errors.New(fmt.Sprintf(...)) - -After: - - fmt.Errorf(...)`, - Since: "2017.1", + Title: `Simplify error construction with \'fmt.Errorf\'`, + Before: `errors.New(fmt.Sprintf(...))`, + After: `fmt.Errorf(...)`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1029": { @@ -353,20 +304,15 @@ After: isn't used, this is functionally equivalent to converting the string to a slice of runes and ranging over that. Ranging directly over the string will be more performant, however, as it avoids allocating a new -slice, the size of which depends on the length of the string. - -Before: - - for _, r := range []rune(s) {} - -After: - - for _, r := range s {}`, - Since: "2017.1", +slice, the size of which depends on the length of the string.`, + Before: `for _, r := range []rune(s) {}`, + After: `for _, r := range s {}`, + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1030": { - Title: "Use `bytes.Buffer.String` or `bytes.Buffer.Bytes`", + Title: `Use \'bytes.Buffer.String\' or \'bytes.Buffer.Bytes\'`, Text: `\'bytes.Buffer\' has both a \'String\' and a \'Bytes\' method. It is almost never necessary to use \'string(buf.Bytes())\' or \'[]byte(buf.String())\' – simply use the other method. @@ -374,70 +320,71 @@ use the other method. The only exception to this are map lookups. Due to a compiler optimization, \'m[string(buf.Bytes())]\' is more efficient than \'m[buf.String()]\'. `, - Since: "2017.1", + Since: "2017.1", + MergeIf: lint.MergeIfAny, }, "S1031": { Title: `Omit redundant nil check around loop`, Text: `You can use range on nil slices and maps, the loop will simply never execute. This makes an additional nil check around the loop -unnecessary. - -Before: - - if s != nil { - for _, x := range s { - ... - } - } - -After: - +unnecessary.`, + Before: ` +if s != nil { for _, x := range s { ... - }`, + } +}`, + After: ` +for _, x := range s { + ... +}`, Since: "2017.1", + // MergeIfAll because x might be a channel under some build tags. + // you shouldn't write code like that… + MergeIf: lint.MergeIfAll, }, "S1032": { - Title: "Use `sort.Ints(x)`, `sort.Float64s(x)`, and `sort.Strings(x)`", + Title: `Use \'sort.Ints(x)\', \'sort.Float64s(x)\', and \'sort.Strings(x)\'`, Text: `The \'sort.Ints\', \'sort.Float64s\' and \'sort.Strings\' functions are easier to read than \'sort.Sort(sort.IntSlice(x))\', \'sort.Sort(sort.Float64Slice(x))\' -and \'sort.Sort(sort.StringSlice(x))\'. - -Before: - - sort.Sort(sort.StringSlice(x)) - -After: - - sort.Strings(x)`, - Since: "2019.1", +and \'sort.Sort(sort.StringSlice(x))\'.`, + Before: `sort.Sort(sort.StringSlice(x))`, + After: `sort.Strings(x)`, + Since: "2019.1", + MergeIf: lint.MergeIfAny, }, "S1033": { - Title: "Unnecessary guard around call to `delete`", - Text: `Calling \'delete\' on a nil map is a no-op.`, - Since: "2019.2", + Title: `Unnecessary guard around call to \"delete\"`, + Text: `Calling \'delete\' on a nil map is a no-op.`, + Since: "2019.2", + MergeIf: lint.MergeIfAny, }, "S1034": { - Title: `Use result of type assertion to simplify cases`, - Since: "2019.2", + Title: `Use result of type assertion to simplify cases`, + Since: "2019.2", + MergeIf: lint.MergeIfAny, }, "S1035": { - Title: "Redundant call to `net/http.CanonicalHeaderKey` in method call on `net/http.Header`", - Text: `The methods on \'net/http.Header\', namely \'Add\', \'Del\', \'Get\' and \'Set\', already -canonicalize the given header name.`, - Since: "2020.1", + Title: `Redundant call to \'net/http.CanonicalHeaderKey\' in method call on \'net/http.Header\'`, + Text: ` +The methods on \'net/http.Header\', namely \'Add\', \'Del\', \'Get\' +and \'Set\', already canonicalize the given header name.`, + Since: "2020.1", + MergeIf: lint.MergeIfAny, }, "S1036": { Title: `Unnecessary guard around map access`, - Text: `When accessing a map key that doesn't exist yet, one -receives a zero value. Often, the zero value is a suitable value, for example when using append or doing integer math. + Text: ` +When accessing a map key that doesn't exist yet, one receives a zero +value. Often, the zero value is a suitable value, for example when +using append or doing integer math. The following @@ -463,7 +410,8 @@ can be simplified to m["k"] += 4 `, - Since: "2020.1", + Since: "2020.1", + MergeIf: lint.MergeIfAny, }, "S1037": { @@ -471,19 +419,26 @@ can be simplified to Text: `Using a select statement with a single case receiving from the result of \'time.After\' is a very elaborate way of sleeping that can much simpler be expressed with a simple call to time.Sleep.`, - Since: "2020.1", + Since: "2020.1", + MergeIf: lint.MergeIfAny, }, "S1038": { - Title: "Unnecessarily complex way of printing formatted string", - Text: `Instead of using \'fmt.Print(fmt.Sprintf(...))\', one can use \'fmt.Printf(...)\'.`, - Since: "2020.1", + Title: "Unnecessarily complex way of printing formatted string", + Text: `Instead of using \'fmt.Print(fmt.Sprintf(...))\', one can use \'fmt.Printf(...)\'.`, + Since: "2020.1", + MergeIf: lint.MergeIfAny, }, "S1039": { - Title: "Unnecessary use of `fmt.Sprint`", - Text: `Calling \'fmt.Sprint\' with a single string argument is unnecessary and identical to using the string directly.`, + Title: `Unnecessary use of \'fmt.Sprint\'`, + Text: ` +Calling \'fmt.Sprint\' with a single string argument is unnecessary +and identical to using the string directly.`, Since: "2020.1", + // MergeIfAll because s might not be a string under all build tags. + // you shouldn't write code like that… + MergeIf: lint.MergeIfAll, }, "S1040": { Title: "Type assertion to current type", @@ -494,5 +449,8 @@ delete the type assertion. If you want to check that \'x\' is not nil, consider being explicit and using an actual \'if x == nil\' comparison instead of relying on the type assertion panicking.`, Since: "2021.1", + // MergeIfAll because x might have different types under different build tags. + // You shouldn't write code like that… + MergeIf: lint.MergeIfAll, }, }) diff --git a/vendor/honnef.co/go/tools/simple/lint.go b/vendor/honnef.co/go/tools/simple/lint.go index c7acd85c8..7d81e8ab1 100644 --- a/vendor/honnef.co/go/tools/simple/lint.go +++ b/vendor/honnef.co/go/tools/simple/lint.go @@ -1,4 +1,3 @@ -// Package simple contains a linter for Go source code. package simple import ( @@ -23,6 +22,7 @@ import ( "honnef.co/go/tools/knowledge" "honnef.co/go/tools/pattern" + "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" ) @@ -61,47 +61,149 @@ var ( checkLoopCopyQ = pattern.MustParse(` (Or (RangeStmt - key value ":=" src@(Ident _) - [(AssignStmt - (IndexExpr dst@(Ident _) key) - "=" - value)]) + key@(Ident _) value@(Ident _) ":=" src + [(AssignStmt (IndexExpr dst key) "=" value)]) (RangeStmt - key nil ":=" src@(Ident _) - [(AssignStmt - (IndexExpr dst@(Ident _) key) - "=" - (IndexExpr src key))]))`) - checkLoopCopyR = pattern.MustParse(`(CallExpr (Ident "copy") [dst src])`) + key@(Ident _) nil ":=" src + [(AssignStmt (IndexExpr dst key) "=" (IndexExpr src key))]) + (ForStmt + (AssignStmt key@(Ident _) ":=" (IntegerLiteral "0")) + (BinaryExpr key "<" (CallExpr (Function "len") [src])) + (IncDecStmt key "++") + [(AssignStmt (IndexExpr dst key) "=" (IndexExpr src key))]))`) ) func CheckLoopCopy(pass *analysis.Pass) (interface{}, error) { + // TODO revisit once range doesn't require a structural type + + isInvariant := func(k, v types.Object, node ast.Expr) bool { + if code.MayHaveSideEffects(pass, node, nil) { + return false + } + invariant := true + ast.Inspect(node, func(node ast.Node) bool { + if node, ok := node.(*ast.Ident); ok { + obj := pass.TypesInfo.ObjectOf(node) + if obj == k || obj == v { + // don't allow loop bodies like 'a[i][i] = v' + invariant = false + return false + } + } + return true + }) + return invariant + } + + var elType func(T types.Type) (el types.Type, isArray bool, isArrayPointer bool, ok bool) + elType = func(T types.Type) (el types.Type, isArray bool, isArrayPointer bool, ok bool) { + switch typ := T.Underlying().(type) { + case *types.Slice: + return typ.Elem(), false, false, true + case *types.Array: + return typ.Elem(), true, false, true + case *types.Pointer: + el, isArray, _, ok = elType(typ.Elem()) + return el, isArray, true, ok + default: + return nil, false, false, false + } + } + fn := func(node ast.Node) { - m, edits, ok := code.MatchAndEdit(pass, checkLoopCopyQ, checkLoopCopyR, node) + m, ok := code.Match(pass, checkLoopCopyQ, node) if !ok { return } - t1 := pass.TypesInfo.TypeOf(m.State["src"].(*ast.Ident)) - t2 := pass.TypesInfo.TypeOf(m.State["dst"].(*ast.Ident)) - if _, ok := t1.Underlying().(*types.Slice); !ok { + + src := m.State["src"].(ast.Expr) + dst := m.State["dst"].(ast.Expr) + + k := pass.TypesInfo.ObjectOf(m.State["key"].(*ast.Ident)) + var v types.Object + if value, ok := m.State["value"]; ok { + v = pass.TypesInfo.ObjectOf(value.(*ast.Ident)) + } + if !isInvariant(k, v, dst) { return } - if !types.Identical(t1, t2) { + if !isInvariant(k, v, src) { + // For example: 'for i := range foo()' return } - tv, err := types.Eval(pass.Fset, pass.Pkg, node.Pos(), "copy") - if err == nil && tv.IsBuiltin() { - report.Report(pass, node, - "should use copy() instead of a loop", - report.ShortRange(), + Tsrc := pass.TypesInfo.TypeOf(src) + Tdst := pass.TypesInfo.TypeOf(dst) + TsrcElem, TsrcArray, TsrcPointer, ok := elType(Tsrc) + if !ok { + return + } + if TsrcPointer { + Tsrc = Tsrc.Underlying().(*types.Pointer).Elem() + } + TdstElem, TdstArray, TdstPointer, ok := elType(Tdst) + if !ok { + return + } + if TdstPointer { + Tdst = Tdst.Underlying().(*types.Pointer).Elem() + } + + if !types.Identical(TsrcElem, TdstElem) { + return + } + + if TsrcArray && TdstArray && types.Identical(Tsrc, Tdst) { + if TsrcPointer { + src = &ast.StarExpr{ + X: src, + } + } + if TdstPointer { + dst = &ast.StarExpr{ + X: dst, + } + } + r := &ast.AssignStmt{ + Lhs: []ast.Expr{dst}, + Rhs: []ast.Expr{src}, + Tok: token.ASSIGN, + } + + report.Report(pass, node, "should copy arrays using assignment instead of using a loop", report.FilterGenerated(), - report.Fixes(edit.Fix("replace loop with call to copy()", edits...))) + report.ShortRange(), + report.Fixes(edit.Fix("replace loop with assignment", edit.ReplaceWithNode(pass.Fset, node, r)))) } else { - report.Report(pass, node, "should use copy() instead of a loop", report.FilterGenerated()) + opts := []report.Option{ + report.ShortRange(), + report.FilterGenerated(), + } + tv, err := types.Eval(pass.Fset, pass.Pkg, node.Pos(), "copy") + if err == nil && tv.IsBuiltin() { + src := m.State["src"].(ast.Expr) + if TsrcArray { + src = &ast.SliceExpr{ + X: src, + } + } + dst := m.State["dst"].(ast.Expr) + if TdstArray { + dst = &ast.SliceExpr{ + X: dst, + } + } + + r := &ast.CallExpr{ + Fun: &ast.Ident{Name: "copy"}, + Args: []ast.Expr{dst, src}, + } + opts = append(opts, report.Fixes(edit.Fix("replace loop with call to copy()", edit.ReplaceWithNode(pass.Fset, node, r)))) + } + report.Report(pass, node, "should use copy() instead of a loop", opts...) } } - code.Preorder(pass, fn, (*ast.RangeStmt)(nil)) + code.Preorder(pass, fn, (*ast.ForStmt)(nil), (*ast.RangeStmt)(nil)) return nil, nil } @@ -129,8 +231,12 @@ func CheckIfBoolCmp(pass *analysis.Pass) (interface{}, error) { val = code.BoolConst(pass, expr.Y) other = expr.X } - basic, ok := pass.TypesInfo.TypeOf(other).Underlying().(*types.Basic) - if !ok || basic.Kind() != types.Bool { + + ok := typeutil.All(pass.TypesInfo.TypeOf(other), func(term *typeparams.Term) bool { + basic, ok := term.Type().Underlying().(*types.Basic) + return ok && basic.Kind() == types.Bool + }) + if !ok { return } op := "" @@ -145,7 +251,7 @@ func CheckIfBoolCmp(pass *analysis.Pass) (interface{}, error) { } report.Report(pass, expr, fmt.Sprintf("should omit comparison to bool constant, can be simplified to %s", r), report.FilterGenerated(), - report.Fixes(edit.Fix("simplify bool comparison", edit.ReplaceWithString(pass.Fset, expr, r)))) + report.Fixes(edit.Fix("simplify bool comparison", edit.ReplaceWithString(expr, r)))) } code.Preorder(pass, fn, (*ast.BinaryExpr)(nil)) return nil, nil @@ -181,11 +287,11 @@ func CheckBytesBufferConversions(pass *analysis.Pass) (interface{}, error) { report.Report(pass, call, fmt.Sprintf("should use %v.String() instead of %v", report.Render(pass, sel.X), report.Render(pass, call)), report.FilterGenerated(), - report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass, checkBytesBufferConversionsRs, m.State, node)))) + report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass.Fset, node, checkBytesBufferConversionsRs, m.State)))) } else if typ, ok := typ.(*types.Slice); ok && typ.Elem() == types.Universe.Lookup("byte").Type() && code.IsCallTo(pass, call.Args[0], "(*bytes.Buffer).String") { report.Report(pass, call, fmt.Sprintf("should use %v.Bytes() instead of %v", report.Render(pass, sel.X), report.Render(pass, call)), report.FilterGenerated(), - report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass, checkBytesBufferConversionsRb, m.State, node)))) + report.Fixes(edit.Fix("simplify conversion", edit.ReplaceWithPattern(pass.Fset, node, checkBytesBufferConversionsRb, m.State)))) } } @@ -279,7 +385,7 @@ func CheckStringsContains(pass *analysis.Pass) (interface{}, error) { } var ( - checkBytesCompareQ = pattern.MustParse(`(BinaryExpr (CallExpr (Function "bytes.Compare") args) op@(Or "==" "!=") (BasicLit "INT" "0"))`) + checkBytesCompareQ = pattern.MustParse(`(BinaryExpr (CallExpr (Function "bytes.Compare") args) op@(Or "==" "!=") (IntegerLiteral "0"))`) checkBytesCompareRe = pattern.MustParse(`(CallExpr (SelectorExpr (Ident "bytes") (Ident "Equal")) args)`) checkBytesCompareRn = pattern.MustParse(`(UnaryExpr "!" (CallExpr (SelectorExpr (Ident "bytes") (Ident "Equal")) args))`) ) @@ -304,9 +410,9 @@ func CheckBytesCompare(pass *analysis.Pass) (interface{}, error) { var fix analysis.SuggestedFix switch tok := m.State["op"].(token.Token); tok { case token.EQL: - fix = edit.Fix("simplify use of bytes.Compare", edit.ReplaceWithPattern(pass, checkBytesCompareRe, m.State, node)) + fix = edit.Fix("simplify use of bytes.Compare", edit.ReplaceWithPattern(pass.Fset, node, checkBytesCompareRe, m.State)) case token.NEQ: - fix = edit.Fix("simplify use of bytes.Compare", edit.ReplaceWithPattern(pass, checkBytesCompareRn, m.State, node)) + fix = edit.Fix("simplify use of bytes.Compare", edit.ReplaceWithPattern(pass.Fset, node, checkBytesCompareRn, m.State)) default: panic(fmt.Sprintf("unexpected token %v", tok)) } @@ -387,8 +493,8 @@ func CheckRegexpRaw(pass *analysis.Pass) (interface{}, error) { } var ( - checkIfReturnQIf = pattern.MustParse(`(IfStmt nil cond [(ReturnStmt [ret@(Ident _)])] nil)`) - checkIfReturnQRet = pattern.MustParse(`(ReturnStmt [ret@(Ident _)])`) + checkIfReturnQIf = pattern.MustParse(`(IfStmt nil cond [(ReturnStmt [ret@(Builtin (Or "true" "false"))])] nil)`) + checkIfReturnQRet = pattern.MustParse(`(ReturnStmt [ret@(Builtin (Or "true" "false"))])`) ) func CheckIfReturn(pass *analysis.Pass) (interface{}, error) { @@ -424,13 +530,7 @@ func CheckIfReturn(pass *analysis.Pass) (interface{}, error) { } ret1 := m1.State["ret"].(*ast.Ident) - if !code.IsBoolConst(pass, ret1) { - return - } ret2 := m2.State["ret"].(*ast.Ident) - if !code.IsBoolConst(pass, ret2) { - return - } if ret1.Name == ret2.Name { // we want the function to return true and false, not the @@ -507,7 +607,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { isConstZero := func(expr ast.Expr) (isConst bool, isZero bool) { _, ok := expr.(*ast.BasicLit) if ok { - return true, astutil.IsIntLiteral(expr, "0") + return true, code.IsIntegerLiteral(pass, expr, constant.MakeInt64(0)) } id, ok := expr.(*ast.Ident) if !ok { @@ -571,7 +671,7 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { return } - if eqNil && !astutil.IsIntLiteral(y.Y, "0") { // must be len(x) == *0* + if eqNil && !code.IsIntegerLiteral(pass, y.Y, constant.MakeInt64(0)) { // must be len(x) == *0* return } @@ -605,18 +705,29 @@ func CheckRedundantNilCheckWithLen(pass *analysis.Pass) (interface{}, error) { // finally check that xx type is one of array, slice, map or chan // this is to prevent false positive in case if xx is a pointer to an array - var nilType string - switch pass.TypesInfo.TypeOf(xx).(type) { - case *types.Slice: - nilType = "nil slices" - case *types.Map: - nilType = "nil maps" - case *types.Chan: - nilType = "nil channels" - default: + typ := pass.TypesInfo.TypeOf(xx) + ok = typeutil.All(typ, func(term *typeparams.Term) bool { + switch term.Type().Underlying().(type) { + case *types.Slice: + return true + case *types.Map: + return true + case *types.Chan: + return true + case *types.Pointer: + return false + case *typeparams.TypeParam: + return false + default: + lint.ExhaustiveTypeSwitch(term.Type().Underlying()) + return false + } + }) + if !ok { return } - report.Report(pass, expr, fmt.Sprintf("should omit nil check; len() for %s is defined as zero", nilType), report.FilterGenerated()) + + report.Report(pass, expr, fmt.Sprintf("should omit nil check; len() for %s is defined as zero", typ), report.FilterGenerated()) } code.Preorder(pass, fn, (*ast.BinaryExpr)(nil)) return nil, nil @@ -809,6 +920,7 @@ func CheckUnnecessaryBlank(pass *analysis.Pass) (interface{}, error) { } func CheckSimplerStructConversion(pass *analysis.Pass) (interface{}, error) { + // TODO(dh): support conversions between type parameters fn := func(node ast.Node, stack []ast.Node) { if unary, ok := stack[len(stack)-2].(*ast.UnaryExpr); ok && unary.Op == token.AND { // Do not suggest type conversion between pointers @@ -919,7 +1031,7 @@ func CheckSimplerStructConversion(pass *analysis.Pass) (interface{}, error) { Args: []ast.Expr{ident}, } report.Report(pass, node, - fmt.Sprintf("should convert %s (type %s) to %s instead of using struct literal", ident.Name, typ2.Obj().Name(), typ1.Obj().Name()), + fmt.Sprintf("should convert %s (type %s) to %s instead of using struct literal", ident.Name, types.TypeString(typ2, types.RelativeTo(pass.Pkg)), types.TypeString(typ1, types.RelativeTo(pass.Pkg))), report.FilterGenerated(), report.Fixes(edit.Fix("use type conversion", edit.ReplaceWithNode(pass.Fset, node, r)))) } @@ -1121,7 +1233,7 @@ func CheckTrim(pass *analysis.Pass) (interface{}, error) { var ( checkLoopSlideQ = pattern.MustParse(` (ForStmt - (AssignStmt initvar@(Ident _) _ (BasicLit "INT" "0")) + (AssignStmt initvar@(Ident _) _ (IntegerLiteral "0")) (BinaryExpr initvar "<" limit@(Ident _)) (IncDecStmt initvar "++") [(AssignStmt @@ -1147,7 +1259,9 @@ func CheckLoopSlide(pass *analysis.Pass) (interface{}, error) { if !ok { return } - if _, ok := pass.TypesInfo.TypeOf(m.State["slice"].(*ast.Ident)).Underlying().(*types.Slice); !ok { + typ := pass.TypesInfo.TypeOf(m.State["slice"].(*ast.Ident)) + // The pattern probably needs a core type, but All is fine, too. Either way we only accept slices. + if !typeutil.All(typ, typeutil.IsSlice) { return } @@ -1161,7 +1275,7 @@ func CheckLoopSlide(pass *analysis.Pass) (interface{}, error) { } var ( - checkMakeLenCapQ1 = pattern.MustParse(`(CallExpr (Builtin "make") [typ size@(BasicLit "INT" "0")])`) + checkMakeLenCapQ1 = pattern.MustParse(`(CallExpr (Builtin "make") [typ size@(IntegerLiteral "0")])`) checkMakeLenCapQ2 = pattern.MustParse(`(CallExpr (Builtin "make") [typ size size])`) ) @@ -1174,11 +1288,11 @@ func CheckMakeLenCap(pass *analysis.Pass) (interface{}, error) { if m, ok := code.Match(pass, checkMakeLenCapQ1, node); ok { T := m.State["typ"].(ast.Expr) size := m.State["size"].(ast.Node) - switch pass.TypesInfo.TypeOf(T).Underlying().(type) { - case *types.Slice, *types.Map: - return + + if _, ok := typeutil.CoreType(pass.TypesInfo.TypeOf(T)).Underlying().(*types.Chan); ok { + report.Report(pass, size, fmt.Sprintf("should use make(%s) instead", report.Render(pass, T)), report.FilterGenerated()) } - report.Report(pass, size, fmt.Sprintf("should use make(%s) instead", report.Render(pass, T)), report.FilterGenerated()) + } else if m, ok := code.Match(pass, checkMakeLenCapQ2, node); ok { // TODO(dh): don't consider sizes identical if they're // dynamic. for example: make(T, <-ch, <-ch). @@ -1441,6 +1555,9 @@ func CheckRedundantSprintf(pass *analysis.Pass) (interface{}, error) { return } typ := pass.TypesInfo.TypeOf(arg) + if typeparams.IsTypeParam(typ) { + return + } irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg if types.TypeString(typ, nil) == "reflect.Value" { @@ -1526,13 +1643,21 @@ func CheckNilCheckAroundRange(pass *analysis.Pass) (interface{}, error) { if !ok { return } - switch m.State["x"].(types.Object).Type().Underlying().(type) { - case *types.Slice, *types.Map: - report.Report(pass, node, "unnecessary nil check around range", - report.ShortRange(), - report.FilterGenerated()) - + ok = typeutil.All(m.State["x"].(types.Object).Type(), func(term *typeparams.Term) bool { + switch term.Type().Underlying().(type) { + case *types.Slice, *types.Map: + return true + case *typeparams.TypeParam, *types.Chan, *types.Pointer: + return false + default: + lint.ExhaustiveTypeSwitch(term.Type().Underlying()) + return false + } + }) + if !ok { + return } + report.Report(pass, node, "unnecessary nil check around range", report.ShortRange(), report.FilterGenerated()) } code.Preorder(pass, fn, (*ast.IfStmt)(nil)) return nil, nil @@ -1726,7 +1851,7 @@ func CheckSimplifyTypeSwitch(pass *analysis.Pass) (interface{}, error) { report.Render(pass, ident), report.Render(pass, ident)) if canSuggestFix { var edits []analysis.TextEdit - edits = append(edits, edit.ReplaceWithPattern(pass, checkSimplifyTypeSwitchR, m.State, expr)) + edits = append(edits, edit.ReplaceWithPattern(pass.Fset, expr, checkSimplifyTypeSwitchR, m.State)) for _, offender := range allOffenders { edits = append(edits, edit.ReplaceWithNode(pass.Fset, offender, offender.X)) } @@ -1780,7 +1905,7 @@ var checkUnnecessaryGuardQ = pattern.MustParse(` (AssignStmt [(Ident "_") ok] ":=" indexexpr@(IndexExpr _ _)) ok set@(IncDecStmt indexexpr "++") - (AssignStmt indexexpr "=" (BasicLit "INT" "1"))))`) + (AssignStmt indexexpr "=" (IntegerLiteral "1"))))`) func CheckUnnecessaryGuard(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { @@ -1809,7 +1934,7 @@ func CheckElaborateSleep(pass *analysis.Pass) (interface{}, error) { report.Report(pass, node, "should use time.Sleep instead of elaborate way of sleeping", report.ShortRange(), report.FilterGenerated(), - report.Fixes(edit.Fix("Use time.Sleep", edit.ReplaceWithPattern(pass, checkElaborateSleepR, m.State, node)))) + report.Fixes(edit.Fix("Use time.Sleep", edit.ReplaceWithPattern(pass.Fset, node, checkElaborateSleepR, m.State)))) } else { // TODO(dh): we could make a suggested fix if the body // doesn't declare or shadow any identifiers @@ -1823,23 +1948,80 @@ func CheckElaborateSleep(pass *analysis.Pass) (interface{}, error) { return nil, nil } -var checkPrintSprintQ = pattern.MustParse(` - (Or +var ( + checkPrintSprintQ = pattern.MustParse(` + (Or + (CallExpr + fn@(Or + (Function "fmt.Print") + (Function "fmt.Sprint") + (Function "fmt.Println") + (Function "fmt.Sprintln")) + [(CallExpr (Function "fmt.Sprintf") f:_)]) + (CallExpr + fn@(Or + (Function "fmt.Fprint") + (Function "fmt.Fprintln")) + [_ (CallExpr (Function "fmt.Sprintf") f:_)]))`) + + checkTestingErrorSprintfQ = pattern.MustParse(` (CallExpr - fn@(Or - (Function "fmt.Print") - (Function "fmt.Sprint") - (Function "fmt.Println") - (Function "fmt.Sprintln")) - [(CallExpr (Function "fmt.Sprintf") f:_)]) + sel@(SelectorExpr + recv + (Ident + name@(Or + "Error" + "Fatal" + "Fatalln" + "Log" + "Panic" + "Panicln" + "Print" + "Println" + "Skip"))) + [(CallExpr (Function "fmt.Sprintf") args)])`) + + checkLogSprintfQ = pattern.MustParse(` (CallExpr - fn@(Or - (Function "fmt.Fprint") - (Function "fmt.Fprintln")) - [_ (CallExpr (Function "fmt.Sprintf") f:_)]))`) + (Function + (Or + "log.Fatal" + "log.Fatalln" + "log.Panic" + "log.Panicln" + "log.Print" + "log.Println")) + [(CallExpr (Function "fmt.Sprintf") args)])`) + + checkSprintfMapping = map[string]struct { + recv string + alternative string + }{ + "(*testing.common).Error": {"(*testing.common)", "Errorf"}, + "(testing.TB).Error": {"(testing.TB)", "Errorf"}, + "(*testing.common).Fatal": {"(*testing.common)", "Fatalf"}, + "(testing.TB).Fatal": {"(testing.TB)", "Fatalf"}, + "(*testing.common).Log": {"(*testing.common)", "Logf"}, + "(testing.TB).Log": {"(testing.TB)", "Logf"}, + "(*testing.common).Skip": {"(*testing.common)", "Skipf"}, + "(testing.TB).Skip": {"(testing.TB)", "Skipf"}, + "(*log.Logger).Fatal": {"(*log.Logger)", "Fatalf"}, + "(*log.Logger).Fatalln": {"(*log.Logger)", "Fatalf"}, + "(*log.Logger).Panic": {"(*log.Logger)", "Panicf"}, + "(*log.Logger).Panicln": {"(*log.Logger)", "Panicf"}, + "(*log.Logger).Print": {"(*log.Logger)", "Printf"}, + "(*log.Logger).Println": {"(*log.Logger)", "Printf"}, + "log.Fatal": {"", "log.Fatalf"}, + "log.Fatalln": {"", "log.Fatalf"}, + "log.Panic": {"", "log.Panicf"}, + "log.Panicln": {"", "log.Panicf"}, + "log.Print": {"", "log.Printf"}, + "log.Println": {"", "log.Printf"}, + } +) func CheckPrintSprintf(pass *analysis.Pass) (interface{}, error) { - fn := func(node ast.Node) { + fmtPrintf := func(node ast.Node) { m, ok := code.Match(pass, checkPrintSprintQ, node) if !ok { return @@ -1866,6 +2048,57 @@ func CheckPrintSprintf(pass *analysis.Pass) (interface{}, error) { report.Report(pass, node, msg, report.FilterGenerated()) } + + methSprintf := func(node ast.Node) { + m, ok := code.Match(pass, checkTestingErrorSprintfQ, node) + if !ok { + return + } + mapped, ok := checkSprintfMapping[code.CallName(pass, node.(*ast.CallExpr))] + if !ok { + return + } + + // Ensure that Errorf/Fatalf refer to the right method + recvTV, ok := pass.TypesInfo.Types[m.State["recv"].(ast.Expr)] + if !ok { + return + } + obj, _, _ := types.LookupFieldOrMethod(recvTV.Type, recvTV.Addressable(), nil, mapped.alternative) + f, ok := obj.(*types.Func) + if !ok { + return + } + if typeutil.FuncName(f) != mapped.recv+"."+mapped.alternative { + return + } + + alt := &ast.SelectorExpr{ + X: m.State["recv"].(ast.Expr), + Sel: &ast.Ident{Name: mapped.alternative}, + } + report.Report(pass, node, fmt.Sprintf("should use %s(...) instead of %s(fmt.Sprintf(...))", report.Render(pass, alt), report.Render(pass, m.State["sel"].(*ast.SelectorExpr)))) + } + + pkgSprintf := func(node ast.Node) { + _, ok := code.Match(pass, checkLogSprintfQ, node) + if !ok { + return + } + callName := code.CallName(pass, node.(*ast.CallExpr)) + mapped, ok := checkSprintfMapping[callName] + if !ok { + return + } + report.Report(pass, node, fmt.Sprintf("should use %s(...) instead of %s(fmt.Sprintf(...))", mapped.alternative, callName)) + } + + fn := func(node ast.Node) { + fmtPrintf(node) + // TODO(dh): add suggested fixes + methSprintf(node) + pkgSprintf(node) + } code.Preorder(pass, fn, (*ast.CallExpr)(nil)) return nil, nil } diff --git a/vendor/honnef.co/go/tools/staticcheck/analysis.go b/vendor/honnef.co/go/tools/staticcheck/analysis.go index c6b39f71c..f45dab09d 100644 --- a/vendor/honnef.co/go/tools/staticcheck/analysis.go +++ b/vendor/honnef.co/go/tools/staticcheck/analysis.go @@ -214,6 +214,22 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ Run: CheckIneffectiveURLQueryModification, Requires: []*analysis.Analyzer{inspect.Analyzer}, }, + "SA4028": { + Run: CheckModuloOne, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + }, + "SA4029": { + Run: CheckIneffectiveSort, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + }, + "SA4030": { + Run: CheckIneffectiveRandInt, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + }, + "SA4031": { + Run: CheckAllocationNilCheck, + Requires: []*analysis.Analyzer{buildir.Analyzer, inspect.Analyzer, facts.TokenFile}, + }, "SA5000": { Run: CheckNilMaps, @@ -299,4 +315,13 @@ var Analyzers = lint.InitializeAnalyzers(Docs, map[string]*analysis.Analyzer{ Run: CheckStaticBitShift, Requires: []*analysis.Analyzer{inspect.Analyzer}, }, + "SA9007": { + Run: CheckBadRemoveAll, + Requires: []*analysis.Analyzer{buildir.Analyzer}, + }, + + "SA9008": { + Run: CheckTypeAssertionShadowingElse, + Requires: []*analysis.Analyzer{inspect.Analyzer, buildir.Analyzer, facts.TokenFile}, + }, }) diff --git a/vendor/honnef.co/go/tools/staticcheck/doc.go b/vendor/honnef.co/go/tools/staticcheck/doc.go index 2157d25b6..3e991b6c4 100644 --- a/vendor/honnef.co/go/tools/staticcheck/doc.go +++ b/vendor/honnef.co/go/tools/staticcheck/doc.go @@ -1,28 +1,33 @@ +// Package staticcheck contains analyzes that find bugs and performance issues. +// Barring the rare false positive, any code flagged by these analyzes needs to be fixed. package staticcheck import "honnef.co/go/tools/analysis/lint" -var Docs = lint.Markdownify(map[string]*lint.Documentation{ +var Docs = lint.Markdownify(map[string]*lint.RawDocumentation{ "SA1000": { Title: `Invalid regular expression`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1001": { Title: `Invalid template`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1002": { - Title: "Invalid format in `time.Parse`", + Title: `Invalid format in \'time.Parse\'`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1003": { - Title: "Unsupported argument to functions in `encoding/binary`", + Title: `Unsupported argument to functions in \'encoding/binary\'`, Text: `The \'encoding/binary\' package can only serialize types with known sizes. This precludes the use of the \'int\' and \'uint\' types, as their sizes differ on different architectures. Furthermore, it doesn't support @@ -31,10 +36,11 @@ serializing maps, channels, strings, or functions. Before Go 1.8, \'bool\' wasn't supported, either.`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1004": { - Title: "Suspiciously small untyped constant in `time.Sleep`", + Title: `Suspiciously small untyped constant in \'time.Sleep\'`, Text: `The \'time\'.Sleep function takes a \'time.Duration\' as its only argument. Durations are expressed in nanoseconds. Thus, calling \'time.Sleep(1)\' will sleep for 1 nanosecond. This is a common source of bugs, as sleep @@ -49,10 +55,11 @@ If you truly meant to sleep for a tiny amount of time, use for some amount of nanoseconds.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1005": { - Title: "Invalid first argument to `exec.Command`", + Title: `Invalid first argument to \'exec.Command\'`, Text: `\'os/exec\' runs programs directly (using variants of the fork and exec system calls on Unix systems). This shouldn't be confused with running a command in a shell. The shell will allow for features such as input @@ -73,10 +80,11 @@ Windows, will have a \'/bin/sh\' program: exec.Command("/bin/sh", "-c", "ls | grep Awesome")`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1006": { - Title: `Printf with dynamic first argument and no further arguments`, + Title: `\'Printf\' with dynamic first argument and no further arguments`, Text: `Using \'fmt.Printf\' with a dynamic first argument can lead to unexpected output. The first argument is a format string, where certain character combinations have special meaning. If, for example, a user were to @@ -94,20 +102,22 @@ it would lead to the following output: Similarly, forming the first parameter via string concatenation with user input should be avoided for the same reason. When printing user -input, either use a variant of fmt.Print, or use the \'%s\' Printf verb +input, either use a variant of \'fmt.Print\', or use the \'%s\' Printf verb and pass the string as an argument.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1007": { - Title: "Invalid URL in `net/url.Parse`", + Title: `Invalid URL in \'net/url.Parse\'`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1008": { - Title: "Non-canonical key in `http.Header` map", + Title: `Non-canonical key in \'http.Header\' map`, Text: `Keys in \'http.Header\' maps are canonical, meaning they follow a specific combination of uppercase and lowercase letters. Methods such as \'http.Header.Add\' and \'http.Header.Del\' convert inputs into this canonical @@ -130,44 +140,51 @@ The easiest way of obtaining the canonical form of a key is to use \'http.CanonicalHeaderKey\'.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1010": { - Title: "`(*regexp.Regexp).FindAll` called with `n == 0`, which will always return zero results", - Text: `If n >= 0, the function returns at most n matches/submatches. To + Title: `\'(*regexp.Regexp).FindAll\' called with \'n == 0\', which will always return zero results`, + Text: `If \'n >= 0\', the function returns at most \'n\' matches/submatches. To return all results, specify a negative number.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, // MergeIfAny if we only flag literals, not named constants }, "SA1011": { - Title: "Various methods in the `strings` package expect valid UTF-8, but invalid input is provided", + Title: `Various methods in the \"strings\" package expect valid UTF-8, but invalid input is provided`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1012": { - Title: "A nil `context.Context` is being passed to a function, consider using `context.TODO` instead", + Title: `A nil \'context.Context\' is being passed to a function, consider using \'context.TODO\' instead`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1013": { - Title: "`io.Seeker.Seek` is being called with the whence constant as the first argument, but it should be the second", + Title: `\'io.Seeker.Seek\' is being called with the whence constant as the first argument, but it should be the second`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1014": { - Title: "Non-pointer value passed to `Unmarshal` or `Decode`", + Title: `Non-pointer value passed to \'Unmarshal\' or \'Decode\'`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1015": { - Title: "Using `time.Tick` in a way that will leak. Consider using `time.NewTicker`, and only use `time.Tick` in tests, commands and endless functions", + Title: `Using \'time.Tick\' in a way that will leak. Consider using \'time.NewTicker\', and only use \'time.Tick\' in tests, commands and endless functions`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1016": { @@ -178,10 +195,11 @@ never passed to the process, but instead handled directly by the kernel. It is therefore pointless to try and handle these signals.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1017": { - Title: "Channels used with `os/signal.Notify` should be buffered", + Title: `Channels used with \'os/signal.Notify\' should be buffered`, Text: `The \'os/signal\' package uses non-blocking channel sends when delivering signals. If the receiving end of the channel isn't ready and the channel is either unbuffered or full, the signal will be dropped. To @@ -190,30 +208,34 @@ appropriate size. For a channel used for notification of just one signal value, a buffer of size 1 is sufficient.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1018": { - Title: "`strings.Replace` called with `n == 0`, which does nothing", - Text: `With n == 0, zero instances will be replaced. To replace all -instances, use a negative number, or use strings.ReplaceAll.`, + Title: `\'strings.Replace\' called with \'n == 0\', which does nothing`, + Text: `With \'n == 0\', zero instances will be replaced. To replace all +instances, use a negative number, or use \'strings.ReplaceAll\'.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, // MergeIfAny if we only flag literals, not named constants }, "SA1019": { Title: `Using a deprecated function, variable, constant or field`, Since: "2017.1", Severity: lint.SeverityDeprecated, + MergeIf: lint.MergeIfAny, }, "SA1020": { - Title: "Using an invalid host:port pair with a `net.Listen`-related function", + Title: `Using an invalid host:port pair with a \'net.Listen\'-related function`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1021": { - Title: "Using `bytes.Equal` to compare two `net.IP`", + Title: `Using \'bytes.Equal\' to compare two \'net.IP\'`, Text: `A \'net.IP\' stores an IPv4 or IPv6 address as a slice of bytes. The length of the slice for an IPv4 address, however, can be either 4 or 16 bytes long, using different ways of representing IPv4 addresses. In @@ -221,13 +243,15 @@ order to correctly compare two \'net.IP\'s, the \'net.IP.Equal\' method should be used, as it takes both representations into account.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1023": { - Title: "Modifying the buffer in an `io.Writer` implementation", + Title: `Modifying the buffer in an \'io.Writer\' implementation`, Text: `\'Write\' must not modify the slice data, even temporarily.`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1024": { @@ -236,7 +260,7 @@ be used, as it takes both representations into account.`, prefixes. A cutset is treated as a set of characters to remove from a string. For example, - strings.TrimLeft("42133word", "1234")) + strings.TrimLeft("42133word", "1234") will result in the string \'"word"\' – any characters that are 1, 2, 3 or 4 are cut from the left of the string. @@ -244,18 +268,21 @@ will result in the string \'"word"\' – any characters that are 1, 2, 3 or In order to remove one string from another, use \'strings.TrimPrefix\' instead.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1025": { - Title: "It is not possible to use `(*time.Timer).Reset`'s return value correctly", + Title: `It is not possible to use \'(*time.Timer).Reset\''s return value correctly`, Since: "2019.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1026": { Title: `Cannot marshal channels or functions`, Since: "2019.2", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1027": { @@ -269,42 +296,47 @@ You can use the structlayout tool to inspect the alignment of fields in a struct.`, Since: "2019.2", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1028": { - Title: "`sort.Slice` can only be used on slices", - Text: `The first argument of sort.Slice must be a slice.`, + Title: `\'sort.Slice\' can only be used on slices`, + Text: `The first argument of \'sort.Slice\' must be a slice.`, Since: "2020.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA1029": { - Title: "Inappropriate key in call to `context.WithValue`", + Title: `Inappropriate key in call to \'context.WithValue\'`, Text: `The provided key must be comparable and should not be -of type string or any other built-in type to avoid collisions between +of type \'string\' or any other built-in type to avoid collisions between packages using context. Users of \'WithValue\' should define their own types for keys. To avoid allocating when assigning to an \'interface{}\', -context keys often have concrete type struct{}. Alternatively, +context keys often have concrete type \'struct{}\'. Alternatively, exported context key variables' static type should be a pointer or interface.`, Since: "2020.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA1030": { - Title: "Invalid argument in call to a `strconv` function", + Title: `Invalid argument in call to a \'strconv\' function`, Text: `This check validates the format, number base and bit size arguments of the various parsing and formatting functions in \'strconv\'.`, Since: "2021.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA2000": { - Title: "`sync.WaitGroup.Add` called inside the goroutine, leading to a race condition", + Title: `\'sync.WaitGroup.Add\' called inside the goroutine, leading to a race condition`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA2001": { @@ -327,154 +359,171 @@ comments with a \'//lint:ignore\' directive can be used to suppress this rare false positive.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA2002": { - Title: "Called `testing.T.FailNow` or `SkipNow` in a goroutine, which isn't allowed", + Title: `Called \'testing.T.FailNow\' or \'SkipNow\' in a goroutine, which isn't allowed`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA2003": { - Title: "Deferred `Lock` right after locking, likely meant to defer `Unlock` instead", + Title: `Deferred \'Lock\' right after locking, likely meant to defer \'Unlock\' instead`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA3000": { - Title: "`TestMain` doesn't call `os.Exit`, hiding test failures", - Text: `Test executables (and in turn \'go test\') exit with a non-zero status -code if any tests failed. When specifying your own TestMain function, + Title: `\'TestMain\' doesn't call \'os.Exit\', hiding test failures`, + Text: `Test executables (and in turn \"go test\") exit with a non-zero status +code if any tests failed. When specifying your own \'TestMain\' function, it is your responsibility to arrange for this, by calling \'os.Exit\' with the correct code. The correct code is returned by \'(*testing.M).Run\', so -the usual way of implementing TestMain is to end it with +the usual way of implementing \'TestMain\' is to end it with \'os.Exit(m.Run())\'.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA3001": { - Title: "Assigning to `b.N` in benchmarks distorts the results", + Title: `Assigning to \'b.N\' in benchmarks distorts the results`, Text: `The testing package dynamically sets \'b.N\' to improve the reliability of benchmarks and uses it in computations to determine the duration of a single operation. Benchmark code must not alter \'b.N\' as this would falsify results.`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA4000": { - Title: `Boolean expression has identical expressions on both sides`, + Title: `Binary operator has identical expressions on both sides`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4001": { - Title: "`&*x` gets simplified to `x`, it does not copy `x`", - Since: "2017.1", - Severity: lint.SeverityWarning, - }, - - "SA4002": { - Title: `Comparing strings with known different sizes has predictable results`, + Title: `\'&*x\' gets simplified to \'x\', it does not copy \'x\'`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4003": { Title: `Comparing unsigned values against negative values is pointless`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4004": { Title: `The loop exits unconditionally after one iteration`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4005": { Title: `Field assignment that will never be observed. Did you mean to use a pointer receiver?`, Since: "2021.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4006": { Title: `A value assigned to a variable is never read before being overwritten. Forgotten error check or dead code?`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4008": { Title: `The variable in the loop condition never changes, are you incrementing the wrong variable?`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4009": { Title: `A function argument is overwritten before its first use`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4010": { - Title: "The result of `append` will never be observed anywhere", + Title: `The result of \'append\' will never be observed anywhere`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4011": { Title: `Break statement with no effect. Did you mean to break out of an outer loop?`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4012": { Title: `Comparing a value against NaN even though no value is equal to NaN`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4013": { - Title: "Negating a boolean twice (`!!b`) is the same as writing `b`. This is either redundant, or a typo.", + Title: `Negating a boolean twice (\'!!b\') is the same as writing \'b\'. This is either redundant, or a typo.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4014": { Title: `An if/else if chain has repeated conditions and no side-effects; if the condition didn't match the first time, it won't match the second time, either`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4015": { - Title: "Calling functions like `math.Ceil` on floats converted from integers doesn't do anything useful", + Title: `Calling functions like \'math.Ceil\' on floats converted from integers doesn't do anything useful`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4016": { - Title: "Certain bitwise operations, such as `x ^ 0`, do not do anything useful", + Title: `Certain bitwise operations, such as \'x ^ 0\', do not do anything useful`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, // MergeIfAny if we only flag literals, not named constants }, "SA4017": { Title: `A pure function's return value is discarded, making the call pointless`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4018": { Title: `Self-assignment of variables`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4019": { Title: `Multiple, identical build constraints in the same file`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4020": { @@ -547,19 +596,22 @@ the following type switch will have an unreachable case clause: and therefore \'doSomething()\''s return value implements both.`, Since: "2019.2", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA4021": { - Title: "`x = append(y)` is equivalent to `x = y`", + Title: `\"x = append(y)\" is equivalent to \"x = y\"`, Since: "2019.2", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4022": { Title: `Comparing the address of a variable against nil`, - Text: `Code such as \'if &x == nil\' is meaningless, because taking the address of a variable always yields a non-nil pointer.`, + Text: `Code such as \"if &x == nil\" is meaningless, because taking the address of a variable always yields a non-nil pointer.`, Since: "2020.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4023": { @@ -623,6 +675,7 @@ https://golang.org/doc/faq#nil_error, licensed under the Creative Commons Attribution 3.0 License.`, Since: "2020.2", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, // TODO should this be MergeIfAll? }, "SA4024": { @@ -638,12 +691,13 @@ Example: }`, Since: "2021.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4025": { Title: "Integer division of literals that results in zero", Text: `When dividing two integer constants, the result will -also be an integer. Thus, a division such as '2 / 3' results in '0'. +also be an integer. Thus, a division such as \'2 / 3\' results in \'0\'. This is true for all of the following examples: _ = 2 / 3 @@ -658,6 +712,7 @@ division involving named constants, to avoid noisy positives. `, Since: "2021.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA4026": { @@ -673,9 +728,11 @@ To explicitly and reliably create a negative zero, you can use the \'math.Copysign\' function: \'math.Copysign(0, -1)\'.`, Since: "2021.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, + "SA4027": { - Title: "(*net/url.URL).Query returns a copy, modifying it doesn't change the URL", + Title: `\'(*net/url.URL).Query\' returns a copy, modifying it doesn't change the URL`, Text: `\'(*net/url.URL).Query\' parses the current value of \'net/url.URL.RawQuery\' and returns it as a map of type \'net/url.Values\'. Subsequent changes to this map will not affect the URL unless the map gets encoded and @@ -685,24 +742,70 @@ As a consequence, the following code pattern is an expensive no-op: \'u.Query().Add(key, value)\'.`, Since: "2021.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, + }, + + "SA4028": { + Title: `\'x % 1\' is always zero`, + Since: "2022.1", + Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, // MergeIfAny if we only flag literals, not named constants + }, + + "SA4029": { + Title: "Ineffective attempt at sorting slice", + Text: ` +\'sort.Float64Slice\', \'sort.IntSlice\', and \'sort.StringSlice\' are +types, not functions. Doing \'x = sort.StringSlice(x)\' does nothing, +especially not sort any values. The correct usage is +\'sort.Sort(sort.StringSlice(x))\' or \'sort.StringSlice(x).Sort()\', +but there are more convenient helpers, namely \'sort.Float64s\', +\'sort.Ints\', and \'sort.Strings\'. +`, + Since: "2022.1", + Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, + }, + + "SA4030": { + Title: "Ineffective attempt at generating random number", + Text: ` +Functions in the \'math/rand\' package that accept upper limits, such +as \'Intn\', generate random numbers in the half-open interval [0,n). In +other words, the generated numbers will be \'>= 0\' and \'< n\' – they +don't include \'n\'. \'rand.Intn(1)\' therefore doesn't generate \'0\' +or \'1\', it always generates \'0\'.`, + Since: "2022.1", + Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, + }, + + "SA4031": { + Title: `Checking never-nil value against nil`, + Since: "2022.1", + Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA5000": { Title: `Assignment to nil map`, Since: "2017.1", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA5001": { - Title: "Deferring `Close` before checking for a possible error", + Title: `Deferring \'Close\' before checking for a possible error`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA5002": { - Title: "The empty for loop (`for {}`) spins and can block the scheduler", + Title: `The empty for loop (\"for {}\") spins and can block the scheduler`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA5003": { @@ -712,12 +815,14 @@ block. In a function that never returns, i.e. one containing an infinite loop, defers will never execute.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA5004": { - Title: "`for { select { ...` with an empty default branch spins", + Title: `\"for { select { ...\" with an empty default branch spins`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA5005": { @@ -735,12 +840,7 @@ on the object. That way, the number of references can temporarily go to zero before the object is being passed to the finalizer.`, Since: "2017.1", Severity: lint.SeverityWarning, - }, - - "SA5006": { - Title: `Slice index out of bounds`, - Since: "2017.1", - Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA5007": { @@ -756,18 +856,21 @@ safe to use. Go, however, does not implement TCO, and as such a loop should be used instead.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA5008": { Title: `Invalid struct tag`, Since: "2019.2", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA5009": { Title: `Invalid Printf call`, Since: "2019.2", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA5010": { @@ -791,6 +894,10 @@ either.`, Since: "2020.1", Severity: lint.SeverityWarning, + // Technically this should be MergeIfAll, but the Go compiler + // already flags some impossible type assertions, so + // MergeIfAny is consistent with the compiler. + MergeIf: lint.MergeIfAny, }, "SA5011": { @@ -845,7 +952,7 @@ conditionals. Consider the following example: fmt.Println(*x) } -Staticcheck will flag the dereference of x, even though it is perfectly +Staticcheck will flag the dereference of \'x\', even though it is perfectly safe. Staticcheck is not able to deduce that a call to Fatal will exit the program. For the time being, the easiest workaround is to modify the definition of Fatal like so: @@ -860,6 +967,7 @@ logrus. Please file an issue if we're missing support for a popular package.`, Since: "2020.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA5012": { @@ -870,12 +978,14 @@ For example, \'strings.NewReplacer\' takes pairs of old and new strings, and calling it with an odd number of elements would be an error.`, Since: "2020.2", Severity: lint.SeverityError, + MergeIf: lint.MergeIfAny, }, "SA6000": { - Title: "Using `regexp.Match` or related in a loop, should use `regexp.Compile`", + Title: `Using \'regexp.Match\' or related in a loop, should use \'regexp.Compile\'`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA6001": { @@ -907,10 +1017,11 @@ For some history on this optimization, check out commit f5f5a8b6209f84961687d993b93ea0d397f5d5bf in the Go repository.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA6002": { - Title: "Storing non-pointer values in `sync.Pool` allocates memory", + Title: `Storing non-pointer values in \'sync.Pool\' allocates memory`, Text: `A \'sync.Pool\' is used to avoid unnecessary allocations and reduce the amount of work the garbage collector has to do. @@ -925,6 +1036,7 @@ See the comments on https://go-review.googlesource.com/c/go/+/24371 that discuss this problem.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA6003": { @@ -948,10 +1060,11 @@ first one yields byte offsets, while the second one yields indices in the slice of runes.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA6005": { - Title: "Inefficient string comparison with `strings.ToLower` or `strings.ToUpper`", + Title: `Inefficient string comparison with \'strings.ToLower\' or \'strings.ToUpper\'`, Text: `Converting two strings to the same case and comparing them like so if strings.ToLower(s1) == strings.ToLower(s2) { @@ -973,24 +1086,28 @@ For a more in-depth explanation of this issue, see https://blog.digitalocean.com/how-to-efficiently-compare-strings-in-go/`, Since: "2019.2", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA9001": { Title: `Defers in range loops may not run when you expect them to`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA9002": { - Title: "Using a non-octal `os.FileMode` that looks like it was meant to be in octal.", + Title: `Using a non-octal \'os.FileMode\' that looks like it was meant to be in octal.`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA9003": { Title: `Empty body in an if or else branch`, Since: "2017.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA9004": { @@ -1084,11 +1201,13 @@ This code will output as \'EnumSecond\' has no explicit type, and thus defaults to \'int\'.`, Since: "2019.1", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, "SA9005": { Title: `Trying to marshal a struct with no public fields nor custom marshaling`, - Text: `The \'encoding/json\' and \'encoding/xml\' packages only operate on exported + Text: ` +The \'encoding/json\' and \'encoding/xml\' packages only operate on exported fields in structs, not unexported ones. It is usually an error to try to (un)marshal structs that only consist of unexported fields. @@ -1097,6 +1216,7 @@ marshaling behavior, e.g. via \'MarshalJSON\' methods. It will also not flag empty structs.`, Since: "2019.2", Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAll, }, "SA9006": { @@ -1122,5 +1242,61 @@ positives in somewhat exotic but valid bit twiddling tricks: }`, Since: "2020.2", Severity: lint.SeverityWarning, + // Technically this should be MergeIfAll, because the type of + // v might be different for different build tags. Practically, + // don't write code that depends on that. + MergeIf: lint.MergeIfAny, + }, + + "SA9007": { + Title: "Deleting a directory that shouldn't be deleted", + Text: ` +It is virtually never correct to delete system directories such as +/tmp or the user's home directory. However, it can be fairly easy to +do by mistake, for example by mistakingly using \'os.TempDir\' instead +of \'ioutil.TempDir\', or by forgetting to add a suffix to the result +of \'os.UserHomeDir\'. + +Writing + + d := os.TempDir() + defer os.RemoveAll(d) + +in your unit tests will have a devastating effect on the stability of your system. + +This check flags attempts at deleting the following directories: + +- os.TempDir +- os.UserCacheDir +- os.UserConfigDir +- os.UserHomeDir +`, + Since: "2022.1", + Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, + }, + + "SA9008": { + Title: `\'else\' branch of a type assertion is probably not reading the right value`, + Text: ` +When declaring variables as part of an \'if\' statement (like in \"if +foo := ...; foo {\"), the same variables will also be in the scope of +the \'else\' branch. This means that in the following example + + if x, ok := x.(int); ok { + // ... + } else { + fmt.Println("unexpected type %T", x) + } + +\'x\' in the \'else\' branch will refer to the \'x\' from \'x, ok +:=\'; it will not refer to the \'x\' that is being type-asserted. The +result of a failed type assertion is the zero value of the type that +is being asserted to, so \'x\' in the else branch will always have the +value \'0\' and the type \'int\'. +`, + Since: "2022.1", + Severity: lint.SeverityWarning, + MergeIf: lint.MergeIfAny, }, }) diff --git a/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go b/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go index f5e6c4010..6b9ed75f1 100644 --- a/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go +++ b/vendor/honnef.co/go/tools/staticcheck/fakejson/encode.go @@ -17,6 +17,7 @@ import ( "strings" "unicode" + "golang.org/x/exp/typeparams" "honnef.co/go/tools/staticcheck/fakereflect" ) @@ -106,6 +107,14 @@ func (enc *encoder) newTypeEncoder(t fakereflect.TypeAndCanAddr, stack string) * } func (enc *encoder) newMapEncoder(t fakereflect.TypeAndCanAddr, stack string) *UnsupportedTypeError { + if typeparams.IsTypeParam(t.Key().Type) { + // We don't know enough about the concrete instantiation to say much about the key. The only time we could make + // a definite "this key is bad" statement is if the type parameter is constrained by type terms, none of which + // are tilde terms, none of which are a basic type. In all other cases, the key might implement TextMarshaler. + // It doesn't seem worth checking for that one single case. + return enc.newTypeEncoder(t.Elem(), stack+"[k]") + } + switch t.Key().Type.Underlying().(type) { case *types.Basic: default: diff --git a/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go b/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go index a5ff21d62..7b2f97c29 100644 --- a/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go +++ b/vendor/honnef.co/go/tools/staticcheck/fakexml/marshal.go @@ -43,11 +43,7 @@ func (enc *Encoder) Encode(v types.Type) error { func implementsMarshaler(v fakereflect.TypeAndCanAddr) bool { t := v.Type - named, ok := t.(*types.Named) - if !ok { - return false - } - obj, _, _ := types.LookupFieldOrMethod(named, false, nil, "MarshalXML") + obj, _, _ := types.LookupFieldOrMethod(t, false, nil, "MarshalXML") if obj == nil { return false } @@ -77,11 +73,7 @@ func implementsMarshaler(v fakereflect.TypeAndCanAddr) bool { func implementsMarshalerAttr(v fakereflect.TypeAndCanAddr) bool { t := v.Type - named, ok := t.(*types.Named) - if !ok { - return false - } - obj, _, _ := types.LookupFieldOrMethod(named, false, nil, "MarshalXMLAttr") + obj, _, _ := types.LookupFieldOrMethod(t, false, nil, "MarshalXMLAttr") if obj == nil { return false } @@ -121,6 +113,15 @@ var textMarshalerType = types.NewInterfaceType([]*types.Func{ var N = 0 +type CyclicTypeError struct { + Type types.Type + Path string +} + +func (err *CyclicTypeError) Error() string { + return "cyclic type" +} + // marshalValue writes one or more XML elements representing val. // If val was obtained from a struct field, finfo must have its details. func (e *Encoder) marshalValue(val fakereflect.TypeAndCanAddr, finfo *fieldInfo, startTemplate *StartElement, stack string) error { @@ -130,11 +131,17 @@ func (e *Encoder) marshalValue(val fakereflect.TypeAndCanAddr, finfo *fieldInfo, e.seen[val] = struct{}{} // Drill into interfaces and pointers. + seen := map[fakereflect.TypeAndCanAddr]struct{}{} for val.IsInterface() || val.IsPtr() { if val.IsInterface() { return nil } val = val.Elem() + if _, ok := seen[val]; ok { + // Loop in type graph, e.g. 'type P *P' + return &CyclicTypeError{val.Type, stack} + } + seen[val] = struct{}{} } // Check for marshaler. @@ -337,8 +344,6 @@ func (e *Encoder) marshalStruct(tinfo *typeInfo, val fakereflect.TypeAndCanAddr, continue } } - - vf = indirect(vf) continue case fComment: diff --git a/vendor/honnef.co/go/tools/staticcheck/fakexml/typeinfo.go b/vendor/honnef.co/go/tools/staticcheck/fakexml/typeinfo.go index 63f48c418..07e5abe79 100644 --- a/vendor/honnef.co/go/tools/staticcheck/fakexml/typeinfo.go +++ b/vendor/honnef.co/go/tools/staticcheck/fakexml/typeinfo.go @@ -249,8 +249,14 @@ func StructFieldInfo(f fakereflect.StructField) (*fieldInfo, error) { // in case it exists and has a valid xml field tag, otherwise // it returns nil. func lookupXMLName(typ fakereflect.TypeAndCanAddr) (xmlname *fieldInfo) { + seen := map[fakereflect.TypeAndCanAddr]struct{}{} for typ.IsPtr() { typ = typ.Elem() + if _, ok := seen[typ]; ok { + // Loop in type graph, e.g. 'type P *P' + return nil + } + seen[typ] = struct{}{} } if !typ.IsStruct() { return nil diff --git a/vendor/honnef.co/go/tools/staticcheck/lint.go b/vendor/honnef.co/go/tools/staticcheck/lint.go index d3fa5441f..2301ba353 100644 --- a/vendor/honnef.co/go/tools/staticcheck/lint.go +++ b/vendor/honnef.co/go/tools/staticcheck/lint.go @@ -1,4 +1,3 @@ -// Package staticcheck contains a linter for Go source code. package staticcheck import ( @@ -9,6 +8,7 @@ import ( "go/types" htmltemplate "html/template" "net/http" + "os" "reflect" "regexp" "regexp/syntax" @@ -38,6 +38,7 @@ import ( "honnef.co/go/tools/staticcheck/fakereflect" "honnef.co/go/tools/staticcheck/fakexml" + "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" @@ -896,6 +897,13 @@ func checkUnsupportedMarshalXML(call *Call) { } else { arg.Invalid(fmt.Sprintf("trying to marshal unsupported type %s, via %s", typ, err.Path)) } + case *fakexml.CyclicTypeError: + typ := types.TypeString(err.Type, types.RelativeTo(arg.Value.Value.Parent().Pkg.Pkg)) + if err.Path == "x" { + arg.Invalid(fmt.Sprintf("trying to marshal cyclic type %s", typ)) + } else { + arg.Invalid(fmt.Sprintf("trying to marshal cyclic type %s, via %s", typ, err.Path)) + } case *fakexml.TagPathError: // Vet does a better job at reporting this error, because it can flag the actual struct tags, not just the call to Marshal default: @@ -915,6 +923,14 @@ func isInLoop(b *ir.BasicBlock) bool { } func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { + isSignal := func(pass *analysis.Pass, expr ast.Expr, name string) bool { + if expr, ok := expr.(*ast.SelectorExpr); ok { + return code.SelectorName(pass, expr) == name + } else { + return false + } + } + fn := func(node ast.Node) { call := node.(*ast.CallExpr) if !code.IsCallToAny(pass, call, @@ -924,22 +940,22 @@ func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { hasSigterm := false for _, arg := range call.Args { - if conv, ok := arg.(*ast.CallExpr); ok && isName(pass, conv.Fun, "os.Signal") { + if conv, ok := arg.(*ast.CallExpr); ok && isSignal(pass, conv.Fun, "os.Signal") { arg = conv.Args[0] } - if isName(pass, arg, "syscall.SIGTERM") { + if isSignal(pass, arg, "syscall.SIGTERM") { hasSigterm = true break } } for i, arg := range call.Args { - if conv, ok := arg.(*ast.CallExpr); ok && isName(pass, conv.Fun, "os.Signal") { + if conv, ok := arg.(*ast.CallExpr); ok && isSignal(pass, conv.Fun, "os.Signal") { arg = conv.Args[0] } - if isName(pass, arg, "os.Kill") || isName(pass, arg, "syscall.SIGKILL") { + if isSignal(pass, arg, "os.Kill") || isSignal(pass, arg, "syscall.SIGKILL") { var fixes []analysis.SuggestedFix if !hasSigterm { nargs := make([]ast.Expr, len(call.Args)) @@ -966,7 +982,7 @@ func CheckUntrappableSignal(pass *analysis.Pass) (interface{}, error) { fixes = append(fixes, edit.Fix(fmt.Sprintf("remove %s from list of arguments", report.Render(pass, arg)), edit.ReplaceWithNode(pass.Fset, call, &ncall))) report.Report(pass, arg, fmt.Sprintf("%s cannot be trapped (did you mean syscall.SIGTERM?)", report.Render(pass, arg)), report.Fixes(fixes...)) } - if isName(pass, arg, "syscall.SIGSTOP") { + if isSignal(pass, arg, "syscall.SIGSTOP") { nargs := make([]ast.Expr, 0, len(call.Args)-1) for j, a := range call.Args { if i == j { @@ -1028,22 +1044,19 @@ func CheckTemplate(pass *analysis.Pass) (interface{}, error) { } var ( + checkTimeSleepConstantPatternQ = pattern.MustParse(`(CallExpr (Function "time.Sleep") lit@(IntegerLiteral value))`) checkTimeSleepConstantPatternRns = pattern.MustParse(`(BinaryExpr duration "*" (SelectorExpr (Ident "time") (Ident "Nanosecond")))`) checkTimeSleepConstantPatternRs = pattern.MustParse(`(BinaryExpr duration "*" (SelectorExpr (Ident "time") (Ident "Second")))`) ) func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { - call := node.(*ast.CallExpr) - if !code.IsCallTo(pass, call, "time.Sleep") { - return - } - lit, ok := call.Args[knowledge.Arg("time.Sleep.d")].(*ast.BasicLit) + m, ok := code.Match(pass, checkTimeSleepConstantPatternQ, node) if !ok { return } - n, err := strconv.Atoi(lit.Value) - if err != nil { + n, ok := constant.Int64Val(m.State["value"].(types.TypeAndValue).Value) + if !ok { return } if n == 0 || n > 120 { @@ -1053,10 +1066,11 @@ func CheckTimeSleepConstant(pass *analysis.Pass) (interface{}, error) { return } + lit := m.State["lit"].(ast.Node) report.Report(pass, lit, fmt.Sprintf("sleeping for %d nanoseconds is probably a bug; be explicit if it isn't", n), report.Fixes( - edit.Fix("explicitly use nanoseconds", edit.ReplaceWithPattern(pass, checkTimeSleepConstantPatternRns, pattern.State{"duration": lit}, lit)), - edit.Fix("use seconds", edit.ReplaceWithPattern(pass, checkTimeSleepConstantPatternRs, pattern.State{"duration": lit}, lit)))) + edit.Fix("explicitly use nanoseconds", edit.ReplaceWithPattern(pass.Fset, lit, checkTimeSleepConstantPatternRns, pattern.State{"duration": lit})), + edit.Fix("use seconds", edit.ReplaceWithPattern(pass.Fset, lit, checkTimeSleepConstantPatternRs, pattern.State{"duration": lit})))) } code.Preorder(pass, fn, (*ast.CallExpr)(nil)) return nil, nil @@ -1170,7 +1184,7 @@ func CheckDubiousDeferInChannelRangeLoop(pass *analysis.Pass) (interface{}, erro fn := func(node ast.Node) { loop := node.(*ast.RangeStmt) typ := pass.TypesInfo.TypeOf(loop.X) - _, ok := typ.Underlying().(*types.Chan) + _, ok := typeutil.CoreType(typ).(*types.Chan) if !ok { return } @@ -1321,22 +1335,29 @@ func CheckLoopEmptyDefault(pass *analysis.Pass) (interface{}, error) { func CheckLhsRhsIdentical(pass *analysis.Pass) (interface{}, error) { var isFloat func(T types.Type) bool isFloat = func(T types.Type) bool { - switch T := T.Underlying().(type) { - case *types.Basic: - kind := T.Kind() - return kind == types.Float32 || kind == types.Float64 - case *types.Array: - return isFloat(T.Elem()) - case *types.Struct: - for i := 0; i < T.NumFields(); i++ { - if !isFloat(T.Field(i).Type()) { - return false - } - } + tset := typeutil.NewTypeSet(T) + if len(tset.Terms) == 0 { + // no terms, so floats are a possibility return true - default: - return false } + return tset.Any(func(term *typeparams.Term) bool { + switch typ := term.Type().Underlying().(type) { + case *types.Basic: + kind := typ.Kind() + return kind == types.Float32 || kind == types.Float64 + case *types.Array: + return isFloat(typ.Elem()) + case *types.Struct: + for i := 0; i < typ.NumFields(); i++ { + if !isFloat(typ.Field(i).Type()) { + return false + } + } + return true + default: + return false + } + }) } // TODO(dh): this check ignores the existence of side-effects and @@ -1515,7 +1536,7 @@ func CheckUnsafePrintf(pass *analysis.Pass) (interface{}, error) { alt := name[:len(name)-1] report.Report(pass, call, "printf-style function with dynamic format string and no further arguments should use print-style function instead", - report.Fixes(edit.Fix(fmt.Sprintf("use %s instead of %s", alt, name), edit.ReplaceWithString(pass.Fset, call.Fun, alt)))) + report.Fixes(edit.Fix(fmt.Sprintf("use %s instead of %s", alt, name), edit.ReplaceWithString(call.Fun, alt)))) } code.Preorder(pass, fn, (*ast.CallExpr)(nil)) return nil, nil @@ -1724,7 +1745,7 @@ func CheckCanonicalHeaderKey(pass *analysis.Pass) (interface{}, error) { var fix analysis.SuggestedFix switch op.Index.(type) { case *ast.BasicLit: - fix = edit.Fix("canonicalize header key", edit.ReplaceWithString(pass.Fset, op.Index, strconv.Quote(canonical))) + fix = edit.Fix("canonicalize header key", edit.ReplaceWithString(op.Index, strconv.Quote(canonical))) case *ast.Ident: call := &ast.CallExpr{ Fun: edit.Selector("http", "CanonicalHeaderKey"), @@ -2013,13 +2034,16 @@ func CheckExtremeComparison(pass *analysis.Pass) (interface{}, error) { report.Report(pass, expr, fmt.Sprintf("every value of type %s is <= %s", basic, max)) } + isZeroLiteral := func(expr ast.Expr) bool { + return code.IsIntegerLiteral(pass, expr, constant.MakeInt64(0)) + } if (basic.Info() & types.IsUnsigned) != 0 { - if (expr.Op == token.LSS && astutil.IsIntLiteral(expr.Y, "0")) || - (expr.Op == token.GTR && astutil.IsIntLiteral(expr.X, "0")) { + if (expr.Op == token.LSS && isZeroLiteral(expr.Y)) || + (expr.Op == token.GTR && isZeroLiteral(expr.X)) { report.Report(pass, expr, fmt.Sprintf("no value of type %s is less than 0", basic)) } - if expr.Op == token.GEQ && astutil.IsIntLiteral(expr.Y, "0") || - expr.Op == token.LEQ && astutil.IsIntLiteral(expr.X, "0") { + if expr.Op == token.GEQ && isZeroLiteral(expr.Y) || + expr.Op == token.LEQ && isZeroLiteral(expr.X) { report.Report(pass, expr, fmt.Sprintf("every value of type %s is >= 0", basic)) } } else { @@ -2263,10 +2287,20 @@ func CheckIneffectiveLoop(pass *analysis.Pass) (interface{}, error) { body = node.Body loop = node case *ast.RangeStmt: - typ := pass.TypesInfo.TypeOf(node.X) - if _, ok := typ.Underlying().(*types.Map); ok { - // looping once over a map is a valid pattern for - // getting an arbitrary element. + ok := typeutil.All(pass.TypesInfo.TypeOf(node.X), func(term *typeparams.Term) bool { + switch term.Type().Underlying().(type) { + case *types.Slice, *types.Chan, *types.Basic, *types.Pointer, *types.Array: + return true + case *types.Map: + // looping once over a map is a valid pattern for + // getting an arbitrary element. + return false + default: + lint.ExhaustiveTypeSwitch(term.Type().Underlying()) + return false + } + }) + if !ok { return true } body = node.Body @@ -2801,7 +2835,6 @@ func CheckInfiniteRecursion(pass *analysis.Pass) (interface{}, error) { } block := site.Block() - canReturn := false for _, b := range fn.Blocks { if block.Dominates(b) { continue @@ -2810,45 +2843,15 @@ func CheckInfiniteRecursion(pass *analysis.Pass) (interface{}, error) { continue } if _, ok := b.Control().(*ir.Return); ok { - canReturn = true - break + return } } - if canReturn { - return - } report.Report(pass, site, "infinite recursive call") }) } return nil, nil } -func objectName(obj types.Object) string { - if obj == nil { - return "" - } - var name string - if obj.Pkg() != nil && obj.Pkg().Scope().Lookup(obj.Name()) == obj { - s := obj.Pkg().Path() - if s != "" { - name += s + "." - } - } - name += obj.Name() - return name -} - -func isName(pass *analysis.Pass, expr ast.Expr, name string) bool { - var obj types.Object - switch expr := expr.(type) { - case *ast.Ident: - obj = pass.TypesInfo.ObjectOf(expr) - case *ast.SelectorExpr: - obj = pass.TypesInfo.ObjectOf(expr.Sel) - } - return objectName(obj) == name -} - func CheckLeakyTimeTick(pass *analysis.Pass) (interface{}, error) { for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { if code.IsMainLike(pass) || code.IsInTest(pass, fn) { @@ -2937,11 +2940,13 @@ func CheckRepeatedIfElse(pass *analysis.Pass) (interface{}, error) { func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { binop := node.(*ast.BinaryExpr) - b, ok := pass.TypesInfo.TypeOf(binop).Underlying().(*types.Basic) - if !ok { - return - } - if (b.Info() & types.IsInteger) == 0 { + if !typeutil.All(pass.TypesInfo.TypeOf(binop), func(term *typeparams.Term) bool { + b, ok := term.Type().Underlying().(*types.Basic) + if !ok { + return false + } + return (b.Info() & types.IsInteger) != 0 + }) { return } switch binop.Op { @@ -2951,8 +2956,7 @@ func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { // of a pattern, x<<0, x<<8, x<<16, ... return } - switch y := binop.Y.(type) { - case *ast.Ident: + if y, ok := binop.Y.(*ast.Ident); ok { obj, ok := pass.TypesInfo.ObjectOf(y).(*types.Const) if !ok { return @@ -2991,18 +2995,13 @@ func CheckSillyBitwiseOps(pass *analysis.Pass) (interface{}, error) { report.Report(pass, node, fmt.Sprintf("%s always equals %s; %s is defined as iota and has value 0, maybe %s is meant to be 1 << iota?", report.Render(pass, binop), report.Render(pass, binop.X), report.Render(pass, binop.Y), report.Render(pass, binop.Y))) } - case *ast.BasicLit: - if !astutil.IsIntLiteral(binop.Y, "0") { - return - } + } else if code.IsIntegerLiteral(pass, binop.Y, constant.MakeInt64(0)) { switch binop.Op { case token.AND: report.Report(pass, node, fmt.Sprintf("%s always equals 0", report.Render(pass, binop))) case token.OR, token.XOR: report.Report(pass, node, fmt.Sprintf("%s always equals %s", report.Render(pass, binop), report.Render(pass, binop.X))) } - default: - return } } code.Preorder(pass, fn, (*ast.BinaryExpr)(nil)) @@ -3043,7 +3042,7 @@ func CheckNonOctalFileMode(pass *analysis.Pass) (interface{}, error) { continue } report.Report(pass, arg, fmt.Sprintf("file mode '%s' evaluates to %#o; did you mean '0%s'?", lit.Value, v, lit.Value), - report.Fixes(edit.Fix("fix octal literal", edit.ReplaceWithString(pass.Fset, arg, "0"+lit.Value)))) + report.Fixes(edit.Fix("fix octal literal", edit.ReplaceWithString(arg, "0"+lit.Value)))) } } } @@ -3097,7 +3096,7 @@ fnLoop: // special case for benchmarks in the fmt package continue } - report.Report(pass, ins, fmt.Sprintf("%s is a pure function but its return value is ignored", callee.Name())) + report.Report(pass, ins, fmt.Sprintf("%s is a pure function but its return value is ignored", callee.Object().Name())) } } } @@ -3111,6 +3110,16 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { // Selectors can appear outside of function literals, e.g. when // declaring package level variables. + isStdlib := func(obj types.Object) bool { + // Modules with no dot in the first path element are reserved for the standard library and tooling. + // This is the best we can currently do. + // Nobody tells us which import paths are part of the standard library. + // + // We check the entire path instead of just the first path element, because the standard library doesn't contain paths with any dots, anyway. + + return !strings.Contains(obj.Pkg().Path(), ".") + } + var tfn types.Object stack := 0 fn := func(node ast.Node, push bool) bool { @@ -3131,6 +3140,9 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { } obj := pass.TypesInfo.ObjectOf(sel.Sel) + if obj_, ok := obj.(*types.Func); ok { + obj = typeparams.OriginMethod(obj_) + } if obj.Pkg() == nil { return true } @@ -3138,6 +3150,7 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { // Don't flag stuff in our own package return true } + if depr, ok := deprs.Objects[obj]; ok { // Note: gopls doesn't correctly run analyzers on // dependencies, so we'll never be able to find deprecated @@ -3147,6 +3160,12 @@ func CheckDeprecated(pass *analysis.Pass) (interface{}, error) { // but we gave up on that, because we wouldn't have access // to the deprecation message. std, ok := knowledge.StdlibDeprecations[code.SelectorName(pass, sel)] + if !ok && isStdlib(obj) { + // Deprecated object in the standard library, but we don't know the details of the deprecation. + // Don't flag it at all, to avoid flagging an object that was deprecated in 1.N when targeting 1.N-1. + // See https://staticcheck.io/issues/1108 for the background on this. + return true + } if ok { switch std.AlternativeAvailableSince { case knowledge.DeprecatedNeverUse: @@ -3430,7 +3449,12 @@ func CheckMapBytesKey(pass *analysis.Pass) (interface{}, error) { if !ok || conv.Type() != types.Universe.Lookup("string").Type() { continue } - if s, ok := conv.X.Type().(*types.Slice); !ok || s.Elem() != types.Universe.Lookup("byte").Type() { + tset := typeutil.NewTypeSet(conv.X.Type()) + // If at least one of the types is []byte, then it's more efficient to inline the conversion + if !tset.Any(func(term *typeparams.Term) bool { + s, ok := term.Type().Underlying().(*types.Slice) + return ok && s.Elem().Underlying() == types.Universe.Lookup("byte").Type() + }) { continue } refs := conv.Referrers() @@ -3746,6 +3770,9 @@ func CheckToLowerToUpperComparison(pass *analysis.Pass) (interface{}, error) { func CheckUnreachableTypeCases(pass *analysis.Pass) (interface{}, error) { // Check if T subsumes V in a type switch. T subsumes V if T is an interface and T's method set is a subset of V's method set. subsumes := func(T, V types.Type) bool { + if typeparams.IsTypeParam(T) { + return false + } tIface, ok := T.Underlying().(*types.Interface) if !ok { return false @@ -3911,10 +3938,21 @@ func checkJSONTag(pass *analysis.Pass, field *ast.Field, tag string) { case "string": cs++ // only for string, floating point, integer and bool - T := typeutil.Dereference(pass.TypesInfo.TypeOf(field.Type).Underlying()).Underlying() - basic, ok := T.(*types.Basic) - if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { + tset := typeutil.NewTypeSet(pass.TypesInfo.TypeOf(field.Type)) + if len(tset.Terms) == 0 { + // TODO(dh): improve message, call out the use of type parameters report.Report(pass, field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") + continue + } + for _, term := range tset.Terms { + T := typeutil.Dereference(term.Type().Underlying()) + for _, term2 := range typeutil.NewTypeSet(T).Terms { + basic, ok := term2.Type().Underlying().(*types.Basic) + if !ok || (basic.Info()&(types.IsBoolean|types.IsInteger|types.IsFloat|types.IsString)) == 0 { + // TODO(dh): improve message, show how we arrived at the type + report.Report(pass, field.Tag, "the JSON string option only applies to fields of type string, floating point, integer or bool, or pointers to those") + } + } } case "inline": ci++ @@ -4039,6 +4077,11 @@ func CheckMaybeNil(pass *analysis.Pass) (interface{}, error) { // if x == nil { println(x) } // _ = *x // + // but we can flag this, because the if's body doesn't use x: + // + // if x == nil { println("this is bad") } + // _ = *x + // // nor many other variations of conditional uses of or assignments to x. // // However, even this trivial implementation finds plenty of @@ -4118,7 +4161,12 @@ func CheckMaybeNil(pass *analysis.Pass) (interface{}, error) { ptr = instr.Addr case *ir.IndexAddr: ptr = instr.X - if _, ok := ptr.Type().Underlying().(*types.Slice); ok { + if typeutil.All(ptr.Type(), func(term *typeparams.Term) bool { + if _, ok := term.Type().Underlying().(*types.Slice); ok { + return true + } + return false + }) { // indexing a nil slice does not cause a nil pointer panic // // Note: This also works around the bad lowering of range loops over slices @@ -4277,7 +4325,7 @@ func findSliceLenChecks(pass *analysis.Pass) { if !ok { continue } - if _, ok := param.Type().Underlying().(*types.Slice); !ok { + if !typeutil.All(param.Type(), typeutil.IsSlice) { continue } @@ -4317,9 +4365,7 @@ func findIndirectSliceLenChecks(pass *analysis.Pass) { continue } - if callee.Pkg == fn.Pkg { - // TODO(dh): are we missing interesting wrappers - // because wrappers don't have Pkg set? + if callee.Pkg == fn.Pkg || callee.Pkg == nil { doFunction(callee) } @@ -4331,12 +4377,12 @@ func findIndirectSliceLenChecks(pass *analysis.Pass) { argi-- } - // TODO(dh): support parameters that have flown through sigmas and phis + // TODO(dh): support parameters that have flown through length-preserving instructions param, ok := arg.(*ir.Parameter) if !ok { continue } - if _, ok := param.Type().Underlying().(*types.Slice); !ok { + if !typeutil.All(param.Type(), typeutil.IsSlice) { continue } @@ -4486,7 +4532,7 @@ func CheckTypedNilInterface(pass *analysis.Pass) (interface{}, error) { if !ok || !(binop.Op == token.EQL || binop.Op == token.NEQ) { continue } - if _, ok := binop.X.Type().Underlying().(*types.Interface); !ok { + if _, ok := binop.X.Type().Underlying().(*types.Interface); !ok || typeparams.IsTypeParam(binop.X.Type()) { // TODO support swapped X and Y continue } @@ -4531,6 +4577,14 @@ func CheckTypedNilInterface(pass *analysis.Pass) (interface{}, error) { default: panic("unreachable") } + + terms, err := typeparams.NormalTerms(x.X.Type()) + if len(terms) == 0 || err != nil { + // Type is a type parameter with no type terms (or we couldn't determine the terms). Such a type + // _can_ be nil when put in an interface value. + continue + } + if report.HasRange(x.X) { report.Report(pass, binop, fmt.Sprintf("this comparison is %s true", qualifier), report.Related(x.X, "the lhs of the comparison gets its value from here and has a concrete type")) @@ -4574,13 +4628,13 @@ func CheckTypedNilInterface(pass *analysis.Pass) (interface{}, error) { var builtinLessThanZeroQ = pattern.MustParse(` (Or (BinaryExpr - (BasicLit "INT" "0") + (IntegerLiteral "0") ">" (CallExpr builtin@(Builtin (Or "len" "cap")) _)) (BinaryExpr (CallExpr builtin@(Builtin (Or "len" "cap")) _) "<" - (BasicLit "INT" "0"))) + (IntegerLiteral "0"))) `) func CheckBuiltinZeroComparison(pass *analysis.Pass) (interface{}, error) { @@ -4598,7 +4652,7 @@ func CheckBuiltinZeroComparison(pass *analysis.Pass) (interface{}, error) { return nil, nil } -var integerDivisionQ = pattern.MustParse(`(BinaryExpr (BasicLit "INT" _) "/" (BasicLit "INT" _))`) +var integerDivisionQ = pattern.MustParse(`(BinaryExpr (IntegerLiteral _) "/" (IntegerLiteral _))`) func CheckIntegerDivisionEqualsZero(pass *analysis.Pass) (interface{}, error) { fn := func(node ast.Node) { @@ -4772,12 +4826,12 @@ func CheckNegativeZeroFloat(pass *analysis.Pass) (interface{}, error) { report.Render(pass, node), conv.Name(), report.Render(pass, m.State["lit"])), - report.Fixes(edit.Fix("use math.Copysign to create negative zero", edit.ReplaceWithString(pass.Fset, node, replacement)))) + report.Fixes(edit.Fix("use math.Copysign to create negative zero", edit.ReplaceWithString(node, replacement)))) } else { const replacement = `math.Copysign(0, -1)` report.Report(pass, node, "in Go, the floating-point literal '-0.0' is the same as '0.0', it does not produce a negative zero", - report.Fixes(edit.Fix("use math.Copysign to create negative zero", edit.ReplaceWithString(pass.Fset, node, replacement)))) + report.Fixes(edit.Fix("use math.Copysign to create negative zero", edit.ReplaceWithString(node, replacement)))) } } code.Preorder(pass, fn, (*ast.UnaryExpr)(nil), (*ast.CallExpr)(nil)) @@ -4810,3 +4864,352 @@ func CheckIneffectiveURLQueryModification(pass *analysis.Pass) (interface{}, err code.Preorder(pass, fn, (*ast.CallExpr)(nil)) return nil, nil } + +func CheckBadRemoveAll(pass *analysis.Pass) (interface{}, error) { + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + call, ok := instr.(ir.CallInstruction) + if !ok { + continue + } + if !irutil.IsCallTo(call.Common(), "os.RemoveAll") { + continue + } + + kind := "" + ex := "" + callName := "" + arg := irutil.Flatten(call.Common().Args[0]) + switch arg := arg.(type) { + case *ir.Call: + callName = irutil.CallName(&arg.Call) + if callName != "os.TempDir" { + continue + } + kind = "temporary" + ex = os.TempDir() + case *ir.Extract: + if arg.Index != 0 { + continue + } + first, ok := arg.Tuple.(*ir.Call) + if !ok { + continue + } + callName = irutil.CallName(&first.Call) + switch callName { + case "os.UserCacheDir": + kind = "cache" + ex, _ = os.UserCacheDir() + case "os.UserConfigDir": + kind = "config" + ex, _ = os.UserConfigDir() + case "os.UserHomeDir": + kind = "home" + ex, _ = os.UserHomeDir() + default: + continue + } + default: + continue + } + + if ex == "" { + report.Report(pass, call, fmt.Sprintf("this call to os.RemoveAll deletes the user's entire %s directory, not a subdirectory therein", kind), + report.Related(arg, fmt.Sprintf("this call to %s returns the user's %s directory", callName, kind))) + } else { + report.Report(pass, call, fmt.Sprintf("this call to os.RemoveAll deletes the user's entire %s directory, not a subdirectory therein", kind), + report.Related(arg, fmt.Sprintf("this call to %s returns the user's %s directory, for example %s", callName, kind, ex))) + } + } + } + } + return nil, nil +} + +var moduloOneQ = pattern.MustParse(`(BinaryExpr _ "%" (IntegerLiteral "1"))`) + +func CheckModuloOne(pass *analysis.Pass) (interface{}, error) { + fn := func(node ast.Node) { + _, ok := code.Match(pass, moduloOneQ, node) + if !ok { + return + } + report.Report(pass, node, "x % 1 is always zero") + } + code.Preorder(pass, fn, (*ast.BinaryExpr)(nil)) + return nil, nil +} + +var typeAssertionShadowingElseQ = pattern.MustParse(`(IfStmt (AssignStmt [obj@(Ident _) ok@(Ident _)] ":=" assert@(TypeAssertExpr obj _)) ok _ elseBranch)`) + +func CheckTypeAssertionShadowingElse(pass *analysis.Pass) (interface{}, error) { + // TODO(dh): without the IR-based verification, this check is able + // to find more bugs, but also more prone to false positives. It + // would be a good candidate for the 'codereview' category of + // checks. + + irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg + fn := func(node ast.Node) { + m, ok := code.Match(pass, typeAssertionShadowingElseQ, node) + if !ok { + return + } + shadow := pass.TypesInfo.ObjectOf(m.State["obj"].(*ast.Ident)) + shadowed := m.State["assert"].(*ast.TypeAssertExpr).X + + path, exact := astutil.PathEnclosingInterval(code.File(pass, shadow), shadow.Pos(), shadow.Pos()) + if !exact { + // TODO(dh): when can this happen? + return + } + irfn := ir.EnclosingFunction(irpkg, path) + + shadoweeIR, isAddr := irfn.ValueForExpr(m.State["obj"].(*ast.Ident)) + if shadoweeIR == nil || isAddr { + // TODO(dh): is this possible? + return + } + + var branch ast.Node + switch br := m.State["elseBranch"].(type) { + case ast.Node: + branch = br + case []ast.Stmt: + branch = &ast.BlockStmt{List: br} + case nil: + return + default: + panic(fmt.Sprintf("unexpected type %T", br)) + } + + ast.Inspect(branch, func(node ast.Node) bool { + ident, ok := node.(*ast.Ident) + if !ok { + return true + } + if pass.TypesInfo.ObjectOf(ident) != shadow { + return true + } + + v, isAddr := irfn.ValueForExpr(ident) + if v == nil || isAddr { + return true + } + if irutil.Flatten(v) != shadoweeIR { + // Same types.Object, but different IR value. This + // either means that the variable has been + // assigned to since the type assertion, or that + // the variable has escaped to the heap. Either + // way, we shouldn't flag reads of it. + return true + } + + report.Report(pass, ident, + fmt.Sprintf("%s refers to the result of a failed type assertion and is a zero value, not the value that was being type-asserted", report.Render(pass, ident)), + report.Related(shadow, "this is the variable being read"), + report.Related(shadowed, "this is the variable being shadowed")) + return true + }) + } + code.Preorder(pass, fn, (*ast.IfStmt)(nil)) + return nil, nil +} + +var ineffectiveSortQ = pattern.MustParse(`(AssignStmt target@(Ident _) "=" (CallExpr typ@(Function (Or "sort.Float64Slice" "sort.IntSlice" "sort.StringSlice")) [target]))`) + +func CheckIneffectiveSort(pass *analysis.Pass) (interface{}, error) { + fn := func(node ast.Node) { + m, ok := code.Match(pass, ineffectiveSortQ, node) + if !ok { + return + } + + _, ok = pass.TypesInfo.TypeOf(m.State["target"].(ast.Expr)).(*types.Slice) + if !ok { + // Avoid flagging 'x = sort.StringSlice(x)' where TypeOf(x) == sort.StringSlice + return + } + + var alternative string + typeName := types.TypeString(m.State["typ"].(*types.TypeName).Type(), nil) + switch typeName { + case "sort.Float64Slice": + alternative = "Float64s" + case "sort.IntSlice": + alternative = "Ints" + case "sort.StringSlice": + alternative = "Strings" + default: + panic(fmt.Sprintf("unreachable: %q", typeName)) + } + + r := &ast.CallExpr{ + Fun: &ast.SelectorExpr{ + X: &ast.Ident{Name: "sort"}, + Sel: &ast.Ident{Name: alternative}, + }, + Args: []ast.Expr{m.State["target"].(ast.Expr)}, + } + + report.Report(pass, node, + fmt.Sprintf("%s is a type, not a function, and %s doesn't sort your values; consider using sort.%s instead", + typeName, + report.Render(pass, node.(*ast.AssignStmt).Rhs[0]), + alternative), + report.Fixes(edit.Fix(fmt.Sprintf("replace with call to sort.%s", alternative), edit.ReplaceWithNode(pass.Fset, node, r)))) + } + code.Preorder(pass, fn, (*ast.AssignStmt)(nil)) + return nil, nil +} + +var ineffectiveRandIntQ = pattern.MustParse(` + (CallExpr + (Function + name@(Or + "math/rand.Int31n" + "math/rand.Int63n" + "math/rand.Intn" + "(*math/rand.Rand).Int31n" + "(*math/rand.Rand).Int63n" + "(*math/rand.Rand).Intn")) + [(IntegerLiteral "1")])`) + +func CheckIneffectiveRandInt(pass *analysis.Pass) (interface{}, error) { + fn := func(node ast.Node) { + m, ok := code.Match(pass, ineffectiveRandIntQ, node) + if !ok { + return + } + + report.Report(pass, node, + fmt.Sprintf("%s(n) generates a random value 0 <= x < n; that is, the generated values don't include n; %s therefore always returns 0", + m.State["name"], report.Render(pass, node))) + } + + code.Preorder(pass, fn, (*ast.CallExpr)(nil)) + return nil, nil +} + +var allocationNilCheckQ = pattern.MustParse(`(IfStmt _ cond@(BinaryExpr lhs op@(Or "==" "!=") (Builtin "nil")) _ _)`) + +func CheckAllocationNilCheck(pass *analysis.Pass) (interface{}, error) { + irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg + fn := func(node ast.Node) { + m, ok := code.Match(pass, allocationNilCheckQ, node) + if !ok { + return + } + cond := m.State["cond"].(ast.Node) + if _, ok := code.Match(pass, checkAddressIsNilQ, cond); ok { + // Don't duplicate diagnostics reported by SA4022 + return + } + lhs := m.State["lhs"].(ast.Expr) + path, exact := astutil.PathEnclosingInterval(code.File(pass, lhs), lhs.Pos(), lhs.Pos()) + if !exact { + // TODO(dh): when can this happen? + return + } + irfn := ir.EnclosingFunction(irpkg, path) + v, isAddr := irfn.ValueForExpr(lhs) + if isAddr { + return + } + + seen := map[ir.Value]struct{}{} + var values []ir.Value + var neverNil func(v ir.Value, track bool) bool + neverNil = func(v ir.Value, track bool) bool { + if _, ok := seen[v]; ok { + return true + } + seen[v] = struct{}{} + switch v := v.(type) { + case *ir.MakeClosure, *ir.Function: + if track { + values = append(values, v) + } + return true + case *ir.MakeChan, *ir.MakeMap, *ir.MakeSlice, *ir.Alloc: + if track { + values = append(values, v) + } + return true + case *ir.Slice: + if track { + values = append(values, v) + } + return neverNil(v.X, false) + case *ir.FieldAddr: + if track { + values = append(values, v) + } + return neverNil(v.X, false) + case *ir.Sigma: + return neverNil(v.X, true) + case *ir.Phi: + for _, e := range v.Edges { + if !neverNil(e, true) { + return false + } + } + return true + default: + return false + } + } + + if !neverNil(v, true) { + return + } + + var qualifier string + if op := m.State["op"].(token.Token); op == token.EQL { + qualifier = "never" + } else { + qualifier = "always" + } + fallback := fmt.Sprintf("this nil check is %s true", qualifier) + + sort.Slice(values, func(i, j int) bool { return values[i].Pos() < values[j].Pos() }) + + if ident, ok := m.State["lhs"].(*ast.Ident); ok { + if _, ok := pass.TypesInfo.ObjectOf(ident).(*types.Var); ok { + var opts []report.Option + if v.Parent() == irfn { + if len(values) == 1 { + opts = append(opts, report.Related(values[0], fmt.Sprintf("this is the value of %s", ident.Name))) + } else { + for _, vv := range values { + opts = append(opts, report.Related(vv, fmt.Sprintf("this is one of the value of %s", ident.Name))) + } + } + } + + switch v.(type) { + case *ir.MakeClosure, *ir.Function: + report.Report(pass, cond, "the checked variable contains a function and is never nil; did you mean to call it?", opts...) + default: + report.Report(pass, cond, fallback, opts...) + } + } else { + if _, ok := v.(*ir.Function); ok { + report.Report(pass, cond, "functions are never nil; did you mean to call it?") + } else { + report.Report(pass, cond, fallback) + } + } + } else { + if _, ok := v.(*ir.Function); ok { + report.Report(pass, cond, "functions are never nil; did you mean to call it?") + } else { + report.Report(pass, cond, fallback) + } + } + + } + code.Preorder(pass, fn, (*ast.IfStmt)(nil)) + return nil, nil +} diff --git a/vendor/honnef.co/go/tools/staticcheck/rules.go b/vendor/honnef.co/go/tools/staticcheck/rules.go index d4e4c1968..debdf5d23 100644 --- a/vendor/honnef.co/go/tools/staticcheck/rules.go +++ b/vendor/honnef.co/go/tools/staticcheck/rules.go @@ -255,6 +255,9 @@ func validatePort(s string) bool { func ValidHostPort(v Value) bool { if k := extractConstExpectKind(v.Value, constant.String); k != nil { s := constant.StringVal(k.Value) + if s == "" { + return true + } _, port, err := net.SplitHostPort(s) if err != nil { return false diff --git a/vendor/honnef.co/go/tools/stylecheck/doc.go b/vendor/honnef.co/go/tools/stylecheck/doc.go index 2b5b5ee26..7ac707ee2 100644 --- a/vendor/honnef.co/go/tools/stylecheck/doc.go +++ b/vendor/honnef.co/go/tools/stylecheck/doc.go @@ -1,8 +1,15 @@ +// Package stylecheck contains analyzes that enforce style rules. +// Most of the recommendations made are universally agreed upon by the wider Go community. +// Some analyzes, however, implement stricter rules that not everyone will agree with. +// In the context of Staticcheck, these analyzes are not enabled by default. +// +// For the most part it is recommended to follow the advice given by the analyzers that are enabled by default, +// but you may want to disable additional analyzes on a case by case basis. package stylecheck import "honnef.co/go/tools/analysis/lint" -var Docs = lint.Markdownify(map[string]*lint.Documentation{ +var Docs = lint.Markdownify(map[string]*lint.RawDocumentation{ "ST1000": { Title: `Incorrect or missing package comment`, Text: `Packages must have a package comment that is formatted according to @@ -10,6 +17,7 @@ the guidelines laid out in https://github.com/golang/go/wiki/CodeReviewComments#package-comments.`, Since: "2019.1", NonDefault: true, + MergeIf: lint.MergeIfAny, }, "ST1001": { @@ -21,7 +29,7 @@ imports. Quoting Go Code Review Comments: -> The import . form can be useful in tests that, due to circular +> The \'import .\' form can be useful in tests that, due to circular > dependencies, cannot be made part of the package being tested: > > package foo_test @@ -32,14 +40,15 @@ Quoting Go Code Review Comments: > ) > > In this case, the test file cannot be in package foo because it -> uses bar/testutil, which imports foo. So we use the 'import .' +> uses \'bar/testutil\', which imports \'foo\'. So we use the \'import .\' > form to let the file pretend to be part of package foo even though -> it is not. Except for this one case, do not use import . in your +> it is not. Except for this one case, do not use \'import .\' in your > programs. It makes the programs much harder to read because it is -> unclear whether a name like Quux is a top-level identifier in the +> unclear whether a name like \'Quux\' is a top-level identifier in the > current package or in an imported package.`, Since: "2019.1", Options: []string{"dot_import_whitelist"}, + MergeIf: lint.MergeIfAny, }, "ST1003": { @@ -55,6 +64,7 @@ See the following links for details: Since: "2019.1", NonDefault: true, Options: []string{"initialisms"}, + MergeIf: lint.MergeIfAny, }, "ST1005": { @@ -67,10 +77,11 @@ Quoting Go Code Review Comments: > Error strings should not be capitalized (unless beginning with > proper nouns or acronyms) or end with punctuation, since they are > usually printed following other context. That is, use -> fmt.Errorf("something bad") not fmt.Errorf("Something bad"), so -> that log.Printf("Reading %s: %v", filename, err) formats without a +> \'fmt.Errorf("something bad")\' not \'fmt.Errorf("Something bad")\', so +> that \'log.Printf("Reading %s: %v", filename, err)\' formats without a > spurious capital letter mid-message.`, - Since: "2019.1", + Since: "2019.1", + MergeIf: lint.MergeIfAny, }, "ST1006": { @@ -88,30 +99,34 @@ Quoting Go Code Review Comments: > almost every line of every method of the type; familiarity admits > brevity. Be consistent, too: if you call the receiver "c" in one > method, don't call it "cl" in another.`, - Since: "2019.1", + Since: "2019.1", + MergeIf: lint.MergeIfAny, }, "ST1008": { - Title: `A function's error value should be its last return value`, - Text: `A function's error value should be its last return value.`, - Since: `2019.1`, + Title: `A function's error value should be its last return value`, + Text: `A function's error value should be its last return value.`, + Since: `2019.1`, + MergeIf: lint.MergeIfAny, }, "ST1011": { - Title: "Poorly chosen name for variable of type `time.Duration`", + Title: `Poorly chosen name for variable of type \'time.Duration\'`, Text: `\'time.Duration\' values represent an amount of time, which is represented as a count of nanoseconds. An expression like \'5 * time.Microsecond\' yields the value \'5000\'. It is therefore not appropriate to suffix a -variable of type \'time.Duration\' with any time unit, such as \'Msec or -Milli\'.`, - Since: `2019.1`, +variable of type \'time.Duration\' with any time unit, such as \'Msec\' or +\'Milli\'.`, + Since: `2019.1`, + MergeIf: lint.MergeIfAny, }, "ST1012": { Title: `Poorly chosen name for error variable`, Text: `Error variables that are part of an API should be called \'errFoo\' or \'ErrFoo\'.`, - Since: "2019.1", + Since: "2019.1", + MergeIf: lint.MergeIfAny, }, "ST1013": { @@ -124,32 +139,37 @@ instead of hard-coding magic numbers, to vastly improve the readability of your code.`, Since: "2019.1", Options: []string{"http_status_code_whitelist"}, + MergeIf: lint.MergeIfAny, }, "ST1015": { - Title: `A switch's default case should be the first or last case`, - Since: "2019.1", + Title: `A switch's default case should be the first or last case`, + Since: "2019.1", + MergeIf: lint.MergeIfAny, }, "ST1016": { Title: `Use consistent method receiver names`, Since: "2019.1", NonDefault: true, + MergeIf: lint.MergeIfAny, }, "ST1017": { Title: `Don't use Yoda conditions`, - Text: `Yoda conditions are conditions of the kind \'if 42 == x\', where the + Text: `Yoda conditions are conditions of the kind \"if 42 == x\", where the literal is on the left side of the comparison. These are a common idiom in languages in which assignment is an expression, to avoid bugs -of the kind \'if (x = 42)\'. In Go, which doesn't allow for this kind of -bug, we prefer the more idiomatic \'if x == 42\'.`, - Since: "2019.2", +of the kind \"if (x = 42)\". In Go, which doesn't allow for this kind of +bug, we prefer the more idiomatic \"if x == 42\".`, + Since: "2019.2", + MergeIf: lint.MergeIfAny, }, "ST1018": { - Title: `Avoid zero-width and control characters in string literals`, - Since: "2019.2", + Title: `Avoid zero-width and control characters in string literals`, + Since: "2019.2", + MergeIf: lint.MergeIfAny, }, "ST1019": { @@ -158,12 +178,12 @@ bug, we prefer the more idiomatic \'if x == 42\'.`, different import aliases are being used. That is, the following bit of code is valid: - import ( - "fmt" - fumpt "fmt" - format "fmt" - _ "fmt" - ) + import ( + "fmt" + fumpt "fmt" + format "fmt" + _ "fmt" + ) However, this is very rarely done on purpose. Usually, it is a sign of code that got refactored, accidentally adding duplicate @@ -175,7 +195,8 @@ intentionally (see for example https://github.com/golang/go/commit/3409ce39bfd7584523b7a8c150a310cea92d879d) – if you want to allow this pattern in your code base, you're advised to disable this check.`, - Since: "2020.1", + Since: "2020.1", + MergeIf: lint.MergeIfAny, }, "ST1020": { @@ -186,13 +207,14 @@ should be a one-sentence summary that starts with the name being declared. If every doc comment begins with the name of the item it describes, -you can use the doc subcommand of the go tool and run the output +you can use the \'doc\' subcommand of the \'go\' tool and run the output through grep. See https://golang.org/doc/effective_go.html#commentary for more information on how to write good documentation.`, Since: "2020.1", NonDefault: true, + MergeIf: lint.MergeIfAny, }, "ST1021": { @@ -210,6 +232,7 @@ See https://golang.org/doc/effective_go.html#commentary for more information on how to write good documentation.`, Since: "2020.1", NonDefault: true, + MergeIf: lint.MergeIfAny, }, "ST1022": { @@ -220,18 +243,20 @@ should be a one-sentence summary that starts with the name being declared. If every doc comment begins with the name of the item it describes, -you can use the doc subcommand of the go tool and run the output +you can use the \'doc\' subcommand of the \'go\' tool and run the output through grep. See https://golang.org/doc/effective_go.html#commentary for more information on how to write good documentation.`, Since: "2020.1", NonDefault: true, + MergeIf: lint.MergeIfAny, }, "ST1023": { Title: "Redundant type in variable declaration", Since: "2021.1", NonDefault: true, + MergeIf: lint.MergeIfAll, }, }) diff --git a/vendor/honnef.co/go/tools/stylecheck/lint.go b/vendor/honnef.co/go/tools/stylecheck/lint.go index 8a70fad45..c80dd37bb 100644 --- a/vendor/honnef.co/go/tools/stylecheck/lint.go +++ b/vendor/honnef.co/go/tools/stylecheck/lint.go @@ -24,6 +24,7 @@ import ( "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/pattern" + "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" @@ -185,6 +186,8 @@ func CheckBlankImports(pass *analysis.Pass) (interface{}, error) { return nil, nil } +var checkIncDecQ = pattern.MustParse(`(AssignStmt x tok@(Or "+=" "-=") (BasicLit "INT" "1"))`) + func CheckIncDec(pass *analysis.Pass) (interface{}, error) { // TODO(dh): this can be noisy for function bodies that look like this: // x += 3 @@ -193,24 +196,19 @@ func CheckIncDec(pass *analysis.Pass) (interface{}, error) { // ... // x += 1 fn := func(node ast.Node) { - assign := node.(*ast.AssignStmt) - if assign.Tok != token.ADD_ASSIGN && assign.Tok != token.SUB_ASSIGN { - return - } - if (len(assign.Lhs) != 1 || len(assign.Rhs) != 1) || - !astutil.IsIntLiteral(assign.Rhs[0], "1") { + m, ok := code.Match(pass, checkIncDecQ, node) + if !ok { return } - suffix := "" - switch assign.Tok { + switch m.State["tok"].(token.Token) { case token.ADD_ASSIGN: suffix = "++" case token.SUB_ASSIGN: suffix = "--" } - report.Report(pass, assign, fmt.Sprintf("should replace %s with %s%s", report.Render(pass, assign), report.Render(pass, assign.Lhs[0]), suffix)) + report.Report(pass, node, fmt.Sprintf("should replace %s with %s%s", report.Render(pass, node), report.Render(pass, m.State["x"].(ast.Node)), suffix)) } code.Preorder(pass, fn, (*ast.AssignStmt)(nil)) return nil, nil @@ -230,6 +228,11 @@ fnLoop: // errors in other positions, that's fine. continue } + + if rets.Len() >= 2 && rets.At(rets.Len()-1).Type() == types.Universe.Lookup("bool").Type() && rets.At(rets.Len()-2).Type() == types.Universe.Lookup("error").Type() { + // Accept (..., error, bool) and assume it's a comma-ok function. It's not clear whether the bool should come last or not for these kinds of functions. + continue + } for i := rets.Len() - 2; i >= 0; i-- { if rets.At(i).Type() == types.Universe.Lookup("error").Type() { report.Report(pass, rets.At(i), "error should be returned as the last argument", report.ShortRange()) @@ -399,7 +402,7 @@ func CheckErrorStrings(pass *analysis.Pass) (interface{}, error) { } switch s[len(s)-1] { case '.', ':', '!', '\n': - report.Report(pass, call, "error strings should not end with punctuation or a newline") + report.Report(pass, call, "error strings should not end with punctuation or newlines") } idx := strings.IndexByte(s, ' ') if idx == -1 { @@ -534,7 +537,7 @@ func CheckErrorVarNames(pass *analysis.Pass) (interface{}, error) { return nil, nil } -var httpStatusCodes = map[int]string{ +var httpStatusCodes = map[int64]string{ 100: "StatusContinue", 101: "StatusSwitchingProtocols", 102: "StatusProcessing", @@ -620,25 +623,26 @@ func CheckHTTPStatusCodes(pass *analysis.Pass) (interface{}, error) { if arg >= len(call.Args) { return } - lit, ok := call.Args[arg].(*ast.BasicLit) + tv, ok := code.IntegerLiteral(pass, call.Args[arg]) if !ok { return } - if whitelist[lit.Value] { + n, ok := constant.Int64Val(tv.Value) + if !ok { return } - - n, err := strconv.Atoi(lit.Value) - if err != nil { + if whitelist[strconv.FormatInt(n, 10)] { return } + s, ok := httpStatusCodes[n] if !ok { return } + lit := call.Args[arg] report.Report(pass, lit, fmt.Sprintf("should use constant http.%s instead of numeric literal %d", s, n), report.FilterGenerated(), - report.Fixes(edit.Fix(fmt.Sprintf("use http.%s instead of %d", s, n), edit.ReplaceWithString(pass.Fset, lit, "http."+s)))) + report.Fixes(edit.Fix(fmt.Sprintf("use http.%s instead of %d", s, n), edit.ReplaceWithString(lit, "http."+s)))) } code.Preorder(pass, fn, (*ast.CallExpr)(nil)) return nil, nil @@ -660,7 +664,7 @@ func CheckDefaultCaseOrder(pass *analysis.Pass) (interface{}, error) { } var ( - checkYodaConditionsQ = pattern.MustParse(`(BinaryExpr left@(BasicLit _ _) tok@(Or "==" "!=") right@(Not (BasicLit _ _)))`) + checkYodaConditionsQ = pattern.MustParse(`(BinaryExpr left@(TrulyConstantExpression _) tok@(Or "==" "!=") right@(Not (TrulyConstantExpression _)))`) checkYodaConditionsR = pattern.MustParse(`(BinaryExpr right tok left)`) ) @@ -690,14 +694,21 @@ func CheckInvisibleCharacters(pass *analysis.Pass) (interface{}, error) { var invalids []invalid hasFormat := false hasControl := false + prev := rune(-1) + const zwj = '\u200d' for off, r := range lit.Value { if unicode.Is(unicode.Cf, r) { - invalids = append(invalids, invalid{r, off}) - hasFormat = true + // Don't flag joined emojis. These are multiple emojis joined with ZWJ, which some platform render as single composite emojis. + // For the purpose of this check, we consider all symbols, including all symbol modifiers, emoji. + if r != zwj || (r == zwj && !unicode.Is(unicode.S, prev)) { + invalids = append(invalids, invalid{r, off}) + hasFormat = true + } } else if unicode.Is(unicode.Cc, r) && r != '\n' && r != '\t' && r != '\r' { invalids = append(invalids, invalid{r, off}) hasControl = true } + prev = r } switch len(invalids) { @@ -727,7 +738,7 @@ func CheckInvisibleCharacters(pass *analysis.Pass) (interface{}, error) { }}, } delete := analysis.SuggestedFix{ - Message: fmt.Sprintf("delete %s character %U", kind, r), + Message: fmt.Sprintf("delete %s character %U", kind, r.r), TextEdits: []analysis.TextEdit{{ Pos: lit.Pos() + token.Pos(r.off), End: lit.Pos() + token.Pos(r.off) + token.Pos(utf8.RuneLen(r.r)), @@ -794,18 +805,24 @@ func CheckExportedFunctionDocs(pass *analysis.Pass) (interface{}, error) { kind := "function" if decl.Recv != nil { kind = "method" - switch T := decl.Recv.List[0].Type.(type) { - case *ast.StarExpr: - if !ast.IsExported(T.X.(*ast.Ident).Name) { - return - } + var ident *ast.Ident + T := decl.Recv.List[0].Type + if T_, ok := T.(*ast.StarExpr); ok { + T = T_.X + } + switch T := T.(type) { + case *ast.IndexExpr: + ident = T.X.(*ast.Ident) + case *typeparams.IndexListExpr: + ident = T.X.(*ast.Ident) case *ast.Ident: - if !ast.IsExported(T.Name) { - return - } + ident = T default: lint.ExhaustiveTypeSwitch(T) } + if !ast.IsExported(ident.Name) { + return + } } prefix := decl.Name.Name + " " if !strings.HasPrefix(text, prefix) { @@ -859,6 +876,11 @@ func CheckExportedTypeDocs(pass *analysis.Pass) (interface{}, error) { } } + // Check comment before we strip articles in case the type's name is an article. + if strings.HasPrefix(text, node.Name.Name+" ") { + return false + } + s := text articles := [...]string{"A", "An", "The"} for _, a := range articles { diff --git a/vendor/honnef.co/go/tools/unused/edge.go b/vendor/honnef.co/go/tools/unused/edge.go index 5fcf1465a..6d32946d2 100644 --- a/vendor/honnef.co/go/tools/unused/edge.go +++ b/vendor/honnef.co/go/tools/unused/edge.go @@ -1,6 +1,6 @@ package unused -//go:generate stringer -type edgeKind +//go:generate go run golang.org/x/tools/cmd/stringer@master -type edgeKind type edgeKind uint64 func (e edgeKind) is(o edgeKind) bool { @@ -52,4 +52,8 @@ const ( edgeUsedConstant edgeVarDecl edgeIgnored + edgeSamePointer + edgeTypeParam + edgeTypeArg + edgeUnionTerm ) diff --git a/vendor/honnef.co/go/tools/unused/edgekind_string.go b/vendor/honnef.co/go/tools/unused/edgekind_string.go index 7629636cf..ae27b2507 100644 --- a/vendor/honnef.co/go/tools/unused/edgekind_string.go +++ b/vendor/honnef.co/go/tools/unused/edgekind_string.go @@ -51,54 +51,64 @@ func _() { _ = x[edgeUnsafeConversion-1099511627776] _ = x[edgeUsedConstant-2199023255552] _ = x[edgeVarDecl-4398046511104] + _ = x[edgeIgnored-8796093022208] + _ = x[edgeSamePointer-17592186044416] + _ = x[edgeTypeParam-35184372088832] + _ = x[edgeTypeArg-70368744177664] + _ = x[edgeUnionTerm-140737488355328] } -const _edgeKind_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecl" +const _edgeKind_name = "edgeAliasedgeBlankFieldedgeAnonymousStructedgeCgoExportededgeConstGroupedgeElementTypeedgeEmbeddedInterfaceedgeExportedConstantedgeExportedFieldedgeExportedFunctionedgeExportedMethodedgeExportedTypeedgeExportedVariableedgeExtendsExportedFieldsedgeExtendsExportedMethodSetedgeFieldAccessedgeFunctionArgumentedgeFunctionResultedgeFunctionSignatureedgeImplementsedgeInstructionOperandedgeInterfaceCalledgeInterfaceMethodedgeKeyTypeedgeLinknameedgeMainFunctionedgeNamedTypeedgeNetRPCRegisteredgeNoCopySentineledgeProvidesMethodedgeReceiveredgeRuntimeFunctionedgeSignatureedgeStructConversionedgeTestSinkedgeTupleElementedgeTypeedgeTypeNameedgeUnderlyingTypeedgePointerTypeedgeUnsafeConversionedgeUsedConstantedgeVarDecledgeIgnorededgeSamePointeredgeTypeParamedgeTypeArgedgeUnionTerm" var _edgeKind_map = map[edgeKind]string{ - 1: _edgeKind_name[0:9], - 2: _edgeKind_name[9:23], - 4: _edgeKind_name[23:42], - 8: _edgeKind_name[42:57], - 16: _edgeKind_name[57:71], - 32: _edgeKind_name[71:86], - 64: _edgeKind_name[86:107], - 128: _edgeKind_name[107:127], - 256: _edgeKind_name[127:144], - 512: _edgeKind_name[144:164], - 1024: _edgeKind_name[164:182], - 2048: _edgeKind_name[182:198], - 4096: _edgeKind_name[198:218], - 8192: _edgeKind_name[218:243], - 16384: _edgeKind_name[243:271], - 32768: _edgeKind_name[271:286], - 65536: _edgeKind_name[286:306], - 131072: _edgeKind_name[306:324], - 262144: _edgeKind_name[324:345], - 524288: _edgeKind_name[345:359], - 1048576: _edgeKind_name[359:381], - 2097152: _edgeKind_name[381:398], - 4194304: _edgeKind_name[398:417], - 8388608: _edgeKind_name[417:428], - 16777216: _edgeKind_name[428:440], - 33554432: _edgeKind_name[440:456], - 67108864: _edgeKind_name[456:469], - 134217728: _edgeKind_name[469:487], - 268435456: _edgeKind_name[487:505], - 536870912: _edgeKind_name[505:523], - 1073741824: _edgeKind_name[523:535], - 2147483648: _edgeKind_name[535:554], - 4294967296: _edgeKind_name[554:567], - 8589934592: _edgeKind_name[567:587], - 17179869184: _edgeKind_name[587:599], - 34359738368: _edgeKind_name[599:615], - 68719476736: _edgeKind_name[615:623], - 137438953472: _edgeKind_name[623:635], - 274877906944: _edgeKind_name[635:653], - 549755813888: _edgeKind_name[653:668], - 1099511627776: _edgeKind_name[668:688], - 2199023255552: _edgeKind_name[688:704], - 4398046511104: _edgeKind_name[704:715], + 1: _edgeKind_name[0:9], + 2: _edgeKind_name[9:23], + 4: _edgeKind_name[23:42], + 8: _edgeKind_name[42:57], + 16: _edgeKind_name[57:71], + 32: _edgeKind_name[71:86], + 64: _edgeKind_name[86:107], + 128: _edgeKind_name[107:127], + 256: _edgeKind_name[127:144], + 512: _edgeKind_name[144:164], + 1024: _edgeKind_name[164:182], + 2048: _edgeKind_name[182:198], + 4096: _edgeKind_name[198:218], + 8192: _edgeKind_name[218:243], + 16384: _edgeKind_name[243:271], + 32768: _edgeKind_name[271:286], + 65536: _edgeKind_name[286:306], + 131072: _edgeKind_name[306:324], + 262144: _edgeKind_name[324:345], + 524288: _edgeKind_name[345:359], + 1048576: _edgeKind_name[359:381], + 2097152: _edgeKind_name[381:398], + 4194304: _edgeKind_name[398:417], + 8388608: _edgeKind_name[417:428], + 16777216: _edgeKind_name[428:440], + 33554432: _edgeKind_name[440:456], + 67108864: _edgeKind_name[456:469], + 134217728: _edgeKind_name[469:487], + 268435456: _edgeKind_name[487:505], + 536870912: _edgeKind_name[505:523], + 1073741824: _edgeKind_name[523:535], + 2147483648: _edgeKind_name[535:554], + 4294967296: _edgeKind_name[554:567], + 8589934592: _edgeKind_name[567:587], + 17179869184: _edgeKind_name[587:599], + 34359738368: _edgeKind_name[599:615], + 68719476736: _edgeKind_name[615:623], + 137438953472: _edgeKind_name[623:635], + 274877906944: _edgeKind_name[635:653], + 549755813888: _edgeKind_name[653:668], + 1099511627776: _edgeKind_name[668:688], + 2199023255552: _edgeKind_name[688:704], + 4398046511104: _edgeKind_name[704:715], + 8796093022208: _edgeKind_name[715:726], + 17592186044416: _edgeKind_name[726:741], + 35184372088832: _edgeKind_name[741:754], + 70368744177664: _edgeKind_name[754:765], + 140737488355328: _edgeKind_name[765:778], } func (i edgeKind) String() string { diff --git a/vendor/honnef.co/go/tools/unused/typemap/identical.go b/vendor/honnef.co/go/tools/unused/typemap/identical.go deleted file mode 100644 index 248f3e17b..000000000 --- a/vendor/honnef.co/go/tools/unused/typemap/identical.go +++ /dev/null @@ -1,149 +0,0 @@ -package typemap - -import ( - "go/types" -) - -// Unlike types.Identical, receivers of Signature types are not ignored. -// Unlike types.Identical, interfaces are compared via pointer equality (except for the empty interface, which gets deduplicated). -// Unlike types.Identical, structs are compared via pointer equality. -func identical0(x, y types.Type) bool { - if x == y { - return true - } - - switch x := x.(type) { - case *types.Basic: - // Basic types are singletons except for the rune and byte - // aliases, thus we cannot solely rely on the x == y check - // above. See also comment in TypeName.IsAlias. - if y, ok := y.(*types.Basic); ok { - return x.Kind() == y.Kind() - } - - case *types.Array: - // Two array types are identical if they have identical element types - // and the same array length. - if y, ok := y.(*types.Array); ok { - // If one or both array lengths are unknown (< 0) due to some error, - // assume they are the same to avoid spurious follow-on errors. - return (x.Len() < 0 || y.Len() < 0 || x.Len() == y.Len()) && identical0(x.Elem(), y.Elem()) - } - - case *types.Slice: - // Two slice types are identical if they have identical element types. - if y, ok := y.(*types.Slice); ok { - return identical0(x.Elem(), y.Elem()) - } - - case *types.Struct: - if y, ok := y.(*types.Struct); ok { - return x == y - } - - case *types.Pointer: - // Two pointer types are identical if they have identical base types. - if y, ok := y.(*types.Pointer); ok { - return identical0(x.Elem(), y.Elem()) - } - - case *types.Tuple: - // Two tuples types are identical if they have the same number of elements - // and corresponding elements have identical types. - if y, ok := y.(*types.Tuple); ok { - if x.Len() == y.Len() { - if x != nil { - for i := 0; i < x.Len(); i++ { - v := x.At(i) - w := y.At(i) - if !identical0(v.Type(), w.Type()) { - return false - } - } - } - return true - } - } - - case *types.Signature: - // Two function types are identical if they have the same number of parameters - // and result values, corresponding parameter and result types are identical, - // and either both functions are variadic or neither is. Parameter and result - // names are not required to match. - if y, ok := y.(*types.Signature); ok { - - return x.Variadic() == y.Variadic() && - identical0(x.Params(), y.Params()) && - identical0(x.Results(), y.Results()) && - (x.Recv() != nil && y.Recv() != nil && identical0(x.Recv().Type(), y.Recv().Type()) || x.Recv() == nil && y.Recv() == nil) - } - - case *types.Interface: - // The issue with interfaces, typeutil.Map and types.Identical - // - // types.Identical, when comparing two interfaces, only looks at the set - // of all methods, not differentiating between implicit (embedded) and - // explicit methods. - // - // When we see the following two types, in source order - // - // type I1 interface { foo() } - // type I2 interface { I1 } - // - // then we will first correctly process I1 and its underlying type. When - // we get to I2, we will see that its underlying type is identical to - // that of I1 and not process it again. This, however, means that we will - // not record the fact that I2 embeds I1. If only I2 is reachable via the - // graph root, then I1 will not be considered used. - // - // We choose to be lazy and compare interfaces by their - // pointers. This will obviously miss identical interfaces, - // but this only has a runtime cost, it doesn't affect - // correctness. - if y, ok := y.(*types.Interface); ok { - if x.NumEmbeddeds() == 0 && - y.NumEmbeddeds() == 0 && - x.NumMethods() == 0 && - y.NumMethods() == 0 { - // all truly empty interfaces are the same - return true - } - return x == y - } - - case *types.Map: - // Two map types are identical if they have identical key and value types. - if y, ok := y.(*types.Map); ok { - return identical0(x.Key(), y.Key()) && identical0(x.Elem(), y.Elem()) - } - - case *types.Chan: - // Two channel types are identical if they have identical value types - // and the same direction. - if y, ok := y.(*types.Chan); ok { - return x.Dir() == y.Dir() && identical0(x.Elem(), y.Elem()) - } - - case *types.Named: - // Two named types are identical if their type names originate - // in the same type declaration. - if y, ok := y.(*types.Named); ok { - return x.Obj() == y.Obj() - } - - case nil: - - default: - panic("unreachable") - } - - return false -} - -// Identical reports whether x and y are identical types. -// Unlike types.Identical, receivers of Signature types are not ignored. -// Unlike types.Identical, interfaces are compared via pointer equality (except for the empty interface, which gets deduplicated). -// Unlike types.Identical, structs are compared via pointer equality. -func Identical(x, y types.Type) (ret bool) { - return identical0(x, y) -} diff --git a/vendor/honnef.co/go/tools/unused/typemap/map.go b/vendor/honnef.co/go/tools/unused/typemap/map.go deleted file mode 100644 index d9961cab1..000000000 --- a/vendor/honnef.co/go/tools/unused/typemap/map.go +++ /dev/null @@ -1,318 +0,0 @@ -// Copyright 2014 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package typemap defines Map, a mapping from types.Type to interface{} values. -package typemap - -import ( - "bytes" - "fmt" - "go/types" - "reflect" -) - -// Map is a hash-table-based mapping from types (types.Type) to -// arbitrary interface{} values. The concrete types that implement -// the Type interface are pointers. Since they are not canonicalized, -// == cannot be used to check for equivalence, and thus we cannot -// simply use a Go map. -// -// Just as with map[K]V, a nil *Map is a valid empty map. -// -// Not thread-safe. -// -// This fork handles Signatures correctly, respecting method -// receivers. Furthermore, it doesn't deduplicate interfaces or -// structs. Interfaces aren't deduplicated as not to conflate implicit -// and explicit methods. Structs aren't deduplicated because we track -// fields of each type separately. -// -type Map struct { - hasher Hasher // shared by many Maps - table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused - length int // number of map entries -} - -// entry is an entry (key/value association) in a hash bucket. -type entry struct { - key types.Type - value interface{} -} - -// SetHasher sets the hasher used by Map. -// -// All Hashers are functionally equivalent but contain internal state -// used to cache the results of hashing previously seen types. -// -// A single Hasher created by MakeHasher() may be shared among many -// Maps. This is recommended if the instances have many keys in -// common, as it will amortize the cost of hash computation. -// -// A Hasher may grow without bound as new types are seen. Even when a -// type is deleted from the map, the Hasher never shrinks, since other -// types in the map may reference the deleted type indirectly. -// -// Hashers are not thread-safe, and read-only operations such as -// Map.Lookup require updates to the hasher, so a full Mutex lock (not a -// read-lock) is require around all Map operations if a shared -// hasher is accessed from multiple threads. -// -// If SetHasher is not called, the Map will create a private hasher at -// the first call to Insert. -// -func (m *Map) SetHasher(hasher Hasher) { - m.hasher = hasher -} - -// Delete removes the entry with the given key, if any. -// It returns true if the entry was found. -// -func (m *Map) Delete(key types.Type) bool { - if m != nil && m.table != nil { - hash := m.hasher.Hash(key) - bucket := m.table[hash] - for i, e := range bucket { - if e.key != nil && Identical(key, e.key) { - // We can't compact the bucket as it - // would disturb iterators. - bucket[i] = entry{} - m.length-- - return true - } - } - } - return false -} - -// At returns the map entry for the given key. -// The result is nil if the entry is not present. -// -func (m *Map) At(key types.Type) interface{} { - if m != nil && m.table != nil { - for _, e := range m.table[m.hasher.Hash(key)] { - if e.key != nil && Identical(key, e.key) { - return e.value - } - } - } - return nil -} - -// Set sets the map entry for key to val, -// and returns the previous entry, if any. -func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) { - if m.table != nil { - hash := m.hasher.Hash(key) - bucket := m.table[hash] - var hole *entry - for i, e := range bucket { - if e.key == nil { - hole = &bucket[i] - } else if Identical(key, e.key) { - prev = e.value - bucket[i].value = value - return - } - } - - if hole != nil { - *hole = entry{key, value} // overwrite deleted entry - } else { - m.table[hash] = append(bucket, entry{key, value}) - } - } else { - if m.hasher.memo == nil { - m.hasher = MakeHasher() - } - hash := m.hasher.Hash(key) - m.table = map[uint32][]entry{hash: {entry{key, value}}} - } - - m.length++ - return -} - -// Len returns the number of map entries. -func (m *Map) Len() int { - if m != nil { - return m.length - } - return 0 -} - -// Iterate calls function f on each entry in the map in unspecified order. -// -// If f should mutate the map, Iterate provides the same guarantees as -// Go maps: if f deletes a map entry that Iterate has not yet reached, -// f will not be invoked for it, but if f inserts a map entry that -// Iterate has not yet reached, whether or not f will be invoked for -// it is unspecified. -// -func (m *Map) Iterate(f func(key types.Type, value interface{})) { - if m != nil { - for _, bucket := range m.table { - for _, e := range bucket { - if e.key != nil { - f(e.key, e.value) - } - } - } - } -} - -// Keys returns a new slice containing the set of map keys. -// The order is unspecified. -func (m *Map) Keys() []types.Type { - keys := make([]types.Type, 0, m.Len()) - m.Iterate(func(key types.Type, _ interface{}) { - keys = append(keys, key) - }) - return keys -} - -func (m *Map) toString(values bool) string { - if m == nil { - return "{}" - } - var buf bytes.Buffer - fmt.Fprint(&buf, "{") - sep := "" - m.Iterate(func(key types.Type, value interface{}) { - fmt.Fprint(&buf, sep) - sep = ", " - fmt.Fprint(&buf, key) - if values { - fmt.Fprintf(&buf, ": %q", value) - } - }) - fmt.Fprint(&buf, "}") - return buf.String() -} - -// String returns a string representation of the map's entries. -// Values are printed using fmt.Sprintf("%v", v). -// Order is unspecified. -// -func (m *Map) String() string { - return m.toString(true) -} - -// KeysString returns a string representation of the map's key set. -// Order is unspecified. -// -func (m *Map) KeysString() string { - return m.toString(false) -} - -//////////////////////////////////////////////////////////////////////// -// Hasher - -// A Hasher maps each type to its hash value. -// For efficiency, a hasher uses memoization; thus its memory -// footprint grows monotonically over time. -// Hashers are not thread-safe. -// Hashers have reference semantics. -// Call MakeHasher to create a Hasher. -type Hasher struct { - memo map[types.Type]uint32 -} - -// MakeHasher returns a new Hasher instance. -func MakeHasher() Hasher { - return Hasher{make(map[types.Type]uint32)} -} - -// Hash computes a hash value for the given type t such that -// Identical(t, t') => Hash(t) == Hash(t'). -func (h Hasher) Hash(t types.Type) uint32 { - hash, ok := h.memo[t] - if !ok { - hash = h.hashFor(t) - h.memo[t] = hash - } - return hash -} - -// hashString computes the Fowler–Noll–Vo hash of s. -func hashString(s string) uint32 { - var h uint32 - for i := 0; i < len(s); i++ { - h ^= uint32(s[i]) - h *= 16777619 - } - return h -} - -// hashFor computes the hash of t. -func (h Hasher) hashFor(t types.Type) uint32 { - // See Identical for rationale. - switch t := t.(type) { - case *types.Basic: - return uint32(t.Kind()) - - case *types.Array: - return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) - - case *types.Slice: - return 9049 + 2*h.Hash(t.Elem()) - - case *types.Struct: - var hash uint32 = 9059 - for i, n := 0, t.NumFields(); i < n; i++ { - f := t.Field(i) - if f.Anonymous() { - hash += 8861 - } - hash += hashString(t.Tag(i)) - hash += hashString(f.Name()) // (ignore f.Pkg) - hash += h.Hash(f.Type()) - } - return hash - - case *types.Pointer: - return 9067 + 2*h.Hash(t.Elem()) - - case *types.Signature: - var hash uint32 = 9091 - if t.Variadic() { - hash *= 8863 - } - return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) - - case *types.Interface: - var hash uint32 = 9103 - for i, n := 0, t.NumMethods(); i < n; i++ { - // See go/types.identicalMethods for rationale. - // Method order is not significant. - // Ignore m.Pkg(). - m := t.Method(i) - hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) - } - return hash - - case *types.Map: - return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) - - case *types.Chan: - return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) - - case *types.Named: - // Not safe with a copying GC; objects may move. - return uint32(reflect.ValueOf(t.Obj()).Pointer()) - - case *types.Tuple: - return h.hashTuple(t) - } - panic(t) -} - -func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { - // See go/types.identicalTypes for rationale. - n := tuple.Len() - var hash uint32 = 9137 + 2*uint32(n) - for i := 0; i < n; i++ { - hash += 3 * h.Hash(tuple.At(i).Type()) - } - return hash -} diff --git a/vendor/honnef.co/go/tools/unused/unused.go b/vendor/honnef.co/go/tools/unused/unused.go index ab7c9f7ea..3d69dbb68 100644 --- a/vendor/honnef.co/go/tools/unused/unused.go +++ b/vendor/honnef.co/go/tools/unused/unused.go @@ -1,5 +1,8 @@ +// Package unused contains code for finding unused code. package unused +// TODO(dh): don't add instantiated types/methods to the graph. add the origin types/methods. + import ( "fmt" "go/ast" @@ -17,8 +20,8 @@ import ( "honnef.co/go/tools/go/ir" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" - "honnef.co/go/tools/unused/typemap" + "golang.org/x/exp/typeparams" "golang.org/x/tools/go/analysis" ) @@ -61,6 +64,9 @@ var Debug io.Writer interfaces. if a method that implements an interface is defined on a pointer receiver, and the pointer type is never used, but the named type is, then we still want to mark the method as used. + - (2.5) all their type parameters. Unused type parameters are probably useless, but they're a brand new feature and we + don't want to introduce false positives because we couldn't anticipate some novel use-case. + - (2.6) all their type arguments - variables and constants use: - their types @@ -77,6 +83,7 @@ var Debug io.Writer - (4.7) fields they access - (4.8) types of all instructions - (4.9) package-level variables they assign to iff in tests (sinks for benchmarks) + - (4.10) all their type parameters. See 2.5 for reasoning. - conversions use: - (5.1) when converting between two equivalent structs, the fields in @@ -127,7 +134,6 @@ var Debug io.Writer - (9.7) variable _reads_ use variables, writes do not, except in tests - (9.8) runtime functions that may be called from user code via the compiler - - const groups: (10.1) if one constant out of a block of constants is used, mark all of them used. a lot of the time, unused constants exist for the sake @@ -143,6 +149,9 @@ var Debug io.Writer positives. Thus, we only accurately track fields of named struct types, and assume that unnamed struct types use all their fields. +- type parameters use: + - (12.1) their constraint type + */ func assert(b bool) { @@ -448,7 +457,11 @@ func typString(obj types.Object) string { case *types.Const: return "const" case *types.TypeName: - return "type" + if _, ok := obj.Type().(*typeparams.TypeParam); ok { + return "type param" + } else { + return "type" + } default: return "identifier" } @@ -548,9 +561,9 @@ func run(pass *analysis.Pass) (interface{}, error) { for _, v := range g.Nodes { debugNode(v) } - g.TypeNodes.Iterate(func(key types.Type, value interface{}) { - debugNode(value.(*node)) - }) + for _, node := range g.TypeNodes { + debugNode(node) + } debugf("}\n") } @@ -560,10 +573,9 @@ func run(pass *analysis.Pass) (interface{}, error) { func results(g *graph) (used, unused []types.Object) { g.color(g.Root) - g.TypeNodes.Iterate(func(_ types.Type, value interface{}) { - node := value.(*node) + for _, node := range g.TypeNodes { if node.seen { - return + continue } switch obj := node.obj.(type) { case *types.Struct: @@ -580,7 +592,7 @@ func results(g *graph) (used, unused []types.Object) { } } } - }) + } // OPT(dh): can we find meaningful initial capacities for the used and unused slices? @@ -615,10 +627,14 @@ func results(g *graph) (used, unused []types.Object) { } type graph struct { - Root *node - seenTypes typemap.Map + Root *node + + // Mapping of types T to canonical *T + pointers map[types.Type]*types.Pointer + + seenTypes map[types.Type]struct{} - TypeNodes typemap.Map + TypeNodes map[types.Type]*node Nodes map[interface{}]*node // context @@ -629,13 +645,27 @@ type graph struct { func newGraph() *graph { g := &graph{ - Nodes: map[interface{}]*node{}, - seenFns: map[*ir.Function]struct{}{}, + Nodes: map[interface{}]*node{}, + seenFns: map[*ir.Function]struct{}{}, + seenTypes: map[types.Type]struct{}{}, + TypeNodes: map[types.Type]*node{}, + pointers: map[types.Type]*types.Pointer{}, } g.Root = g.newNode(nil) return g } +func (g *graph) newPointer(typ types.Type) *types.Pointer { + if p, ok := g.pointers[typ]; ok { + return p + } else { + p := types.NewPointer(typ) + g.pointers[typ] = p + g.see(p) + return p + } +} + func (g *graph) color(root *node) { if root.seen { return @@ -681,11 +711,11 @@ func (g *graph) nodeMaybe(obj types.Object) (*node, bool) { func (g *graph) node(obj interface{}) (n *node, new bool) { switch obj := obj.(type) { case types.Type: - if v := g.TypeNodes.At(obj); v != nil { - return v.(*node), false + if v := g.TypeNodes[obj]; v != nil { + return v, false } n = g.newNode(obj) - g.TypeNodes.Set(obj, n) + g.TypeNodes[obj] = n return n, true case types.Object: // OPT(dh): the types.Object and default cases are identical @@ -798,8 +828,30 @@ func (g *graph) see(obj interface{}) *node { } assert(obj != nil) + + if fn, ok := obj.(*types.Func); ok { + obj = typeparams.OriginMethod(fn) + } + if t, ok := obj.(*types.Named); ok { + obj = typeparams.NamedTypeOrigin(t) + } + // add new node to graph node, _ := g.node(obj) + + if p, ok := obj.(*types.Pointer); ok { + if pt, ok := g.pointers[p.Elem()]; ok { + // We've used graph.newPointer before we saw this pointer; add an edge that marks the two pointers as being + // identical + if p != pt { + g.use(p, pt, edgeSamePointer) + g.use(pt, p, edgeSamePointer) + } + } else { + g.pointers[p.Elem()] = p + } + } + return node } @@ -814,6 +866,21 @@ func (g *graph) use(used, by interface{}, kind edgeKind) { return } } + + if fn, ok := used.(*types.Func); ok { + used = typeparams.OriginMethod(fn) + } + if fn, ok := by.(*types.Func); ok { + by = typeparams.OriginMethod(fn) + } + + if t, ok := used.(*types.Named); ok { + used = typeparams.NamedTypeOrigin(t) + } + if t, ok := by.(*types.Named); ok { + by = typeparams.NamedTypeOrigin(t) + } + usedNode, new := g.node(used) assert(!new) if by == nil { @@ -1125,7 +1192,7 @@ func (g *graph) entry(pkg *pkg) { var ifaces []*types.Interface var notIfaces []types.Type - g.seenTypes.Iterate(func(t types.Type, _ interface{}) { + for t := range g.seenTypes { switch t := t.(type) { case *types.Interface: // OPT(dh): (8.1) we only need interfaces that have unexported methods @@ -1135,7 +1202,7 @@ func (g *graph) entry(pkg *pkg) { notIfaces = append(notIfaces, t) } } - }) + } // (8.0) handle interfaces for _, t := range notIfaces { @@ -1235,7 +1302,7 @@ func (g *graph) entry(pkg *pkg) { } func (g *graph) useMethod(t types.Type, sel *types.Selection, by interface{}, kind edgeKind) { - obj := sel.Obj() + obj := sel.Obj().(*types.Func) path := sel.Index() assert(obj != nil) if len(path) > 1 { @@ -1262,6 +1329,7 @@ func owningObject(fn *ir.Function) types.Object { } func (g *graph) function(fn *ir.Function) { + assert(fn != nil) if fn.Package() != nil && fn.Package() != g.pkg.IR { return } @@ -1285,7 +1353,7 @@ func (g *graph) function(fn *ir.Function) { } func (g *graph) typ(t types.Type, parent types.Type) { - if g.seenTypes.At(t) != nil { + if _, ok := g.seenTypes[t]; ok { return } @@ -1295,7 +1363,7 @@ func (g *graph) typ(t types.Type, parent types.Type) { } } - g.seenTypes.Set(t, struct{}{}) + g.seenTypes[t] = struct{}{} if isIrrelevant(t) { return } @@ -1323,7 +1391,7 @@ func (g *graph) typ(t types.Type, parent types.Type) { if _, ok := T.Underlying().(*types.Pointer); !ok { // An embedded field is addressable, so check // the pointer type to get the full method set - T = types.NewPointer(T) + T = g.newPointer(T) } ms := g.pkg.IR.Prog.MethodSets.MethodSet(T) for j := 0; j < ms.Len(); j++ { @@ -1369,13 +1437,29 @@ func (g *graph) typ(t types.Type, parent types.Type) { // Nothing to do case *types.Named: // (9.3) types use their underlying and element types - g.seeAndUse(t.Underlying(), t, edgeUnderlyingType) + origin := typeparams.NamedTypeOrigin(t) + g.seeAndUse(origin.Underlying(), t, edgeUnderlyingType) g.seeAndUse(t.Obj(), t, edgeTypeName) g.seeAndUse(t, t.Obj(), edgeNamedType) // (2.4) named types use the pointer type if _, ok := t.Underlying().(*types.Interface); !ok && t.NumMethods() > 0 { - g.seeAndUse(types.NewPointer(t), t, edgePointerType) + g.seeAndUse(g.newPointer(origin), t, edgePointerType) + } + + // (2.5) named types use their type parameters + + for i := 0; i < typeparams.ForNamed(t).Len(); i++ { + tparam := typeparams.ForNamed(t).At(i) + g.seeAndUse(tparam, t, edgeTypeParam) + g.typ(tparam, nil) + } + + // (2.6) named types use their type arguments + for i := 0; i < typeparams.NamedTypeArgs(t).Len(); i++ { + targ := typeparams.NamedTypeArgs(t).At(i) + g.seeAndUse(targ, t, edgeTypeArg) + g.typ(t, nil) } for i := 0; i < t.NumMethods(); i++ { @@ -1389,7 +1473,7 @@ func (g *graph) typ(t types.Type, parent types.Type) { g.function(g.pkg.IR.Prog.FuncValue(t.Method(i))) } - g.typ(t.Underlying(), t) + g.typ(origin.Underlying(), t) case *types.Slice: // (9.3) types use their underlying and element types g.seeAndUse(t.Elem(), t, edgeElementType) @@ -1414,6 +1498,7 @@ func (g *graph) typ(t types.Type, parent types.Type) { for i := 0; i < t.NumEmbeddeds(); i++ { tt := t.EmbeddedType(i) // (8.4) All embedded interfaces are marked as used + g.typ(tt, nil) g.seeAndUse(tt, t, edgeEmbeddedInterface) } case *types.Array: @@ -1434,6 +1519,22 @@ func (g *graph) typ(t types.Type, parent types.Type) { g.seeAndUse(t.At(i).Type(), t, edgeTupleElement|edgeType) g.typ(t.At(i).Type(), nil) } + case *typeutil.Iterator: + // (9.3) types use their underlying and element types + g.seeAndUse(t.Elem(), t, edgeElementType) + g.typ(t.Elem(), nil) + case *typeparams.TypeParam: + // (9.3) types use their underlying and element types + + g.seeAndUse(t.Obj(), t, edgeTypeName) + g.seeAndUse(t, t.Obj(), edgeNamedType) + g.seeAndUse(t.Constraint(), t, edgeElementType) + g.typ(t.Constraint(), t) + case *typeparams.Union: + for i := 0; i < t.Len(); i++ { + g.seeAndUse(t.Term(i).Type(), t, edgeUnionTerm) + g.typ(t.Term(i).Type(), nil) + } default: panic(fmt.Sprintf("unreachable: %T", t)) } @@ -1465,6 +1566,20 @@ func (g *graph) signature(sig *types.Signature, fn types.Object) { g.seeAndUse(param.Type(), user, edgeFunctionResult|edgeType) g.typ(param.Type(), nil) } + for i := 0; i < typeparams.RecvTypeParams(sig).Len(); i++ { + // We track the type parameter's constraint, not the type parameter itself. + // We never want to flag an unused type parameter. + param := typeparams.RecvTypeParams(sig).At(i).Constraint() + g.seeAndUse(param, user, edgeFunctionArgument|edgeType) + g.typ(param, nil) + } + for i := 0; i < typeparams.ForSignature(sig).Len(); i++ { + // We track the type parameter's constraint, not the type parameter itself. + // We never want to flag an unused type parameter. + param := typeparams.ForSignature(sig).At(i).Constraint() + g.seeAndUse(param, user, edgeFunctionArgument|edgeType) + g.typ(param, nil) + } } func (g *graph) instructions(fn *ir.Function) { @@ -1515,19 +1630,27 @@ func (g *graph) instructions(fn *ir.Function) { } switch instr := instr.(type) { case *ir.Field: + // Can't access fields via generics, for now. + st := instr.X.Type().Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access g.seeAndUse(field, fnObj, edgeFieldAccess) case *ir.FieldAddr: - st := typeutil.Dereference(instr.X.Type()).Underlying().(*types.Struct) + // User code can't access fields on type parameters, but composite literals are still possible, which + // compile to FieldAddr + Store. + + st := typeutil.CoreType(typeutil.Dereference(instr.X.Type())).(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access g.seeAndUse(field, fnObj, edgeFieldAccess) case *ir.Store: // nothing to do, handled generically by operands - case *ir.Call: + case ir.CallInstruction: c := instr.Common() + for _, targ := range c.TypeArgs { + g.seeAndUse(targ, fnObj, edgeTypeArg) + } if !c.IsInvoke() { // handled generically as an instruction operand } else { @@ -1539,8 +1662,8 @@ func (g *graph) instructions(fn *ir.Function) { case *ir.ChangeType: // conversion type handled generically - s1, ok1 := typeutil.Dereference(instr.Type()).Underlying().(*types.Struct) - s2, ok2 := typeutil.Dereference(instr.X.Type()).Underlying().(*types.Struct) + s1, ok1 := typeutil.CoreType(typeutil.Dereference(instr.Type())).(*types.Struct) + s2, ok2 := typeutil.CoreType(typeutil.Dereference(instr.X.Type())).(*types.Struct) if ok1 && ok2 { // Converting between two structs. The fields are // relevant for the conversion, but only if the @@ -1647,14 +1770,16 @@ func (g *graph) instructions(fn *ir.Function) { // nothing to do case *ir.Load: // nothing to do - case *ir.Go: - // nothing to do - case *ir.Defer: - // nothing to do case *ir.Parameter: // nothing to do case *ir.Const: // nothing to do + case *ir.ArrayConst: + // nothing to do + case *ir.AggregateConst: + // nothing to do + case *ir.GenericConst: + // nothing to do case *ir.Recv: // nothing to do case *ir.TypeSwitch: diff --git a/vendor/modules.txt b/vendor/modules.txt index 3b3bd54f3..a92074855 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -29,19 +29,22 @@ code.cloudfoundry.org/tlsconfig # code.cloudfoundry.org/workpool v0.0.0-20200131000409-2ac56b354115 ## explicit code.cloudfoundry.org/workpool -# github.com/Antonboom/errname v0.1.5 -## explicit; go 1.17 +# github.com/Antonboom/errname v0.1.6 +## explicit; go 1.18 github.com/Antonboom/errname/pkg/analyzer -# github.com/Antonboom/nilnil v0.1.0 -## explicit; go 1.17 +# github.com/Antonboom/nilnil v0.1.1 +## explicit; go 1.18 github.com/Antonboom/nilnil/pkg/analyzer -# github.com/BurntSushi/toml v1.0.0 +# github.com/BurntSushi/toml v1.1.0 ## explicit; go 1.16 github.com/BurntSushi/toml github.com/BurntSushi/toml/internal # github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 ## explicit; go 1.13 github.com/Djarvur/go-err113 +# github.com/GaijinEntertainment/go-exhaustruct/v2 v2.1.0 +## explicit; go 1.18 +github.com/GaijinEntertainment/go-exhaustruct/v2/pkg/analyzer # github.com/Masterminds/semver v1.5.0 ## explicit github.com/Masterminds/semver @@ -119,7 +122,7 @@ github.com/beorn7/perks/quantile # github.com/bkielbasa/cyclop v1.2.0 ## explicit; go 1.15 github.com/bkielbasa/cyclop/pkg/analyzer -# github.com/blizzy78/varnamelen v0.6.1 +# github.com/blizzy78/varnamelen v0.8.0 ## explicit; go 1.16 github.com/blizzy78/varnamelen # github.com/bmatcuk/doublestar v1.3.4 @@ -128,11 +131,11 @@ github.com/bmatcuk/doublestar # github.com/bombsimon/wsl/v3 v3.3.0 ## explicit; go 1.12 github.com/bombsimon/wsl/v3 -# github.com/breml/bidichk v0.2.2 -## explicit; go 1.16 +# github.com/breml/bidichk v0.2.3 +## explicit; go 1.17 github.com/breml/bidichk/pkg/bidichk -# github.com/breml/errchkjson v0.2.3 -## explicit; go 1.16 +# github.com/breml/errchkjson v0.3.0 +## explicit; go 1.17 github.com/breml/errchkjson # github.com/butuzov/ireturn v0.1.1 ## explicit; go 1.15 @@ -148,7 +151,7 @@ github.com/charithe/durationcheck # github.com/charlievieth/fs v0.0.2 ## explicit; go 1.15 github.com/charlievieth/fs -# github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af +# github.com/chavacava/garif v0.0.0-20220316182200-5cad0b5181d4 ## explicit; go 1.16 github.com/chavacava/garif # github.com/cheggaaa/pb/v3 v3.0.8 @@ -243,13 +246,16 @@ github.com/fatih/color # github.com/fatih/structtag v1.2.0 ## explicit; go 1.12 github.com/fatih/structtag +# github.com/firefart/nonamedreturns v1.0.1 +## explicit; go 1.18 +github.com/firefart/nonamedreturns/analyzer # github.com/fsnotify/fsnotify v1.5.4 ## explicit; go 1.16 github.com/fsnotify/fsnotify -# github.com/fzipp/gocyclo v0.4.0 +# github.com/fzipp/gocyclo v0.5.1 ## explicit; go 1.18 github.com/fzipp/gocyclo -# github.com/go-critic/go-critic v0.6.2 +# github.com/go-critic/go-critic v0.6.3 ## explicit; go 1.16 github.com/go-critic/go-critic/checkers github.com/go-critic/go-critic/checkers/internal/astwalk @@ -323,14 +329,14 @@ github.com/golangci/dupl/printer github.com/golangci/dupl/suffixtree github.com/golangci/dupl/syntax github.com/golangci/dupl/syntax/golang -# github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 -## explicit +# github.com/golangci/go-misc v0.0.0-20220329215616-d24fe342adfe +## explicit; go 1.17 github.com/golangci/go-misc/deadcode # github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a ## explicit github.com/golangci/gofmt/gofmt github.com/golangci/gofmt/goimports -# github.com/golangci/golangci-lint v1.45.2 +# github.com/golangci/golangci-lint v1.46.2 ## explicit; go 1.18 github.com/golangci/golangci-lint/cmd/golangci-lint github.com/golangci/golangci-lint/internal/cache @@ -462,8 +468,8 @@ github.com/kisielk/errcheck/errcheck ## explicit github.com/kisielk/gotool github.com/kisielk/gotool/internal/load -# github.com/kulti/thelper v0.5.1 -## explicit; go 1.14 +# github.com/kulti/thelper v0.6.2 +## explicit; go 1.18 github.com/kulti/thelper/pkg/analyzer # github.com/kunwardeep/paralleltest v1.0.3 ## explicit; go 1.14 @@ -471,7 +477,7 @@ github.com/kunwardeep/paralleltest/pkg/paralleltest # github.com/kyoh86/exportloopref v0.1.8 ## explicit; go 1.14 github.com/kyoh86/exportloopref -# github.com/ldez/gomoddirectives v0.2.2 +# github.com/ldez/gomoddirectives v0.2.3 ## explicit; go 1.16 github.com/ldez/gomoddirectives # github.com/ldez/tagliatelle v0.3.1 @@ -485,7 +491,10 @@ github.com/leonklingele/grouper/pkg/analyzer/globals github.com/leonklingele/grouper/pkg/analyzer/imports github.com/leonklingele/grouper/pkg/analyzer/types github.com/leonklingele/grouper/pkg/analyzer/vars -# github.com/magiconair/properties v1.8.5 +# github.com/lufeee/execinquery v1.2.1 +## explicit; go 1.17 +github.com/lufeee/execinquery +# github.com/magiconair/properties v1.8.6 ## explicit; go 1.13 github.com/magiconair/properties # github.com/maratori/testpackage v1.0.1 @@ -515,7 +524,7 @@ github.com/maxbrunsfeld/counterfeiter/v6/generator # github.com/mbilski/exhaustivestruct v1.2.0 ## explicit; go 1.15 github.com/mbilski/exhaustivestruct/pkg/analyzer -# github.com/mgechev/revive v1.1.4 +# github.com/mgechev/revive v1.2.1 ## explicit; go 1.12 github.com/mgechev/revive/config github.com/mgechev/revive/formatter @@ -524,7 +533,7 @@ github.com/mgechev/revive/rule # github.com/mitchellh/go-homedir v1.1.0 ## explicit github.com/mitchellh/go-homedir -# github.com/mitchellh/mapstructure v1.4.3 +# github.com/mitchellh/mapstructure v1.5.0 ## explicit; go 1.14 github.com/mitchellh/mapstructure # github.com/moricho/tparallel v0.2.1 @@ -549,7 +558,7 @@ github.com/nbutton23/zxcvbn-go/utils/math # github.com/nishanths/exhaustive v0.7.11 ## explicit; go 1.14 github.com/nishanths/exhaustive -# github.com/nishanths/predeclared v0.2.1 +# github.com/nishanths/predeclared v0.2.2 ## explicit; go 1.14 github.com/nishanths/predeclared/passes/predeclared # github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d @@ -611,9 +620,15 @@ github.com/onsi/gomega/matchers/support/goraph/edge github.com/onsi/gomega/matchers/support/goraph/node github.com/onsi/gomega/matchers/support/goraph/util github.com/onsi/gomega/types -# github.com/pelletier/go-toml v1.9.4 +# github.com/pelletier/go-toml v1.9.5 ## explicit; go 1.12 github.com/pelletier/go-toml +# github.com/pelletier/go-toml/v2 v2.0.0 +## explicit; go 1.16 +github.com/pelletier/go-toml/v2 +github.com/pelletier/go-toml/v2/internal/ast +github.com/pelletier/go-toml/v2/internal/danger +github.com/pelletier/go-toml/v2/internal/tracker # github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d ## explicit github.com/phayes/checkstyle @@ -626,32 +641,31 @@ github.com/pkg/errors # github.com/pmezard/go-difflib v1.0.0 ## explicit github.com/pmezard/go-difflib/difflib -# github.com/polyfloyd/go-errorlint v0.0.0-20211125173453-6d6d39c5bb8b +# github.com/polyfloyd/go-errorlint v1.0.0 ## explicit; go 1.13 github.com/polyfloyd/go-errorlint/errorlint -# github.com/prometheus/client_golang v1.7.1 -## explicit; go 1.11 +# github.com/prometheus/client_golang v1.12.1 +## explicit; go 1.13 github.com/prometheus/client_golang/prometheus github.com/prometheus/client_golang/prometheus/internal github.com/prometheus/client_golang/prometheus/testutil/promlint # github.com/prometheus/client_model v0.2.0 ## explicit; go 1.9 github.com/prometheus/client_model/go -# github.com/prometheus/common v0.10.0 -## explicit; go 1.11 +# github.com/prometheus/common v0.32.1 +## explicit; go 1.13 github.com/prometheus/common/expfmt github.com/prometheus/common/internal/bitbucket.org/ww/goautoneg github.com/prometheus/common/model -# github.com/prometheus/procfs v0.6.0 +# github.com/prometheus/procfs v0.7.3 ## explicit; go 1.13 github.com/prometheus/procfs github.com/prometheus/procfs/internal/fs github.com/prometheus/procfs/internal/util -# github.com/quasilyte/go-ruleguard v0.3.15 -## explicit; go 1.15 +# github.com/quasilyte/go-ruleguard v0.3.16-0.20220213074421-6aa060fab41a +## explicit; go 1.17 github.com/quasilyte/go-ruleguard/internal/goenv github.com/quasilyte/go-ruleguard/internal/golist -github.com/quasilyte/go-ruleguard/internal/stdinfo github.com/quasilyte/go-ruleguard/internal/xsrcimporter github.com/quasilyte/go-ruleguard/internal/xtypes github.com/quasilyte/go-ruleguard/ruleguard @@ -662,7 +676,7 @@ github.com/quasilyte/go-ruleguard/ruleguard/profiling github.com/quasilyte/go-ruleguard/ruleguard/quasigo github.com/quasilyte/go-ruleguard/ruleguard/textmatch github.com/quasilyte/go-ruleguard/ruleguard/typematch -# github.com/quasilyte/gogrep v0.0.0-20220103110004-ffaa07af02e3 +# github.com/quasilyte/gogrep v0.0.0-20220120141003-628d8b3623b5 ## explicit; go 1.16 github.com/quasilyte/gogrep github.com/quasilyte/gogrep/internal/stdinfo @@ -670,6 +684,9 @@ github.com/quasilyte/gogrep/nodetag # github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 ## explicit; go 1.14 github.com/quasilyte/regex/syntax +# github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 +## explicit; go 1.17 +github.com/quasilyte/stdinfo # github.com/rivo/uniseg v0.2.0 ## explicit; go 1.12 github.com/rivo/uniseg @@ -682,7 +699,7 @@ github.com/ryanrolds/sqlclosecheck/pkg/analyzer # github.com/sanposhiho/wastedassign/v2 v2.0.6 ## explicit; go 1.14 github.com/sanposhiho/wastedassign/v2 -# github.com/securego/gosec/v2 v2.10.0 +# github.com/securego/gosec/v2 v2.11.0 ## explicit; go 1.16 github.com/securego/gosec/v2 github.com/securego/gosec/v2/cwe @@ -696,8 +713,8 @@ github.com/sirupsen/logrus # github.com/sivchari/containedctx v1.0.2 ## explicit; go 1.17 github.com/sivchari/containedctx -# github.com/sivchari/tenv v1.4.7 -## explicit; go 1.17 +# github.com/sivchari/tenv v1.5.0 +## explicit; go 1.18 github.com/sivchari/tenv # github.com/sonatard/noctx v0.0.1 ## explicit; go 1.13 @@ -707,7 +724,7 @@ github.com/sonatard/noctx/reqwithoutctx # github.com/sourcegraph/go-diff v0.6.1 ## explicit; go 1.14 github.com/sourcegraph/go-diff/diff -# github.com/spf13/afero v1.6.0 +# github.com/spf13/afero v1.8.2 ## explicit; go 1.13 github.com/spf13/afero github.com/spf13/afero/mem @@ -723,17 +740,23 @@ github.com/spf13/jwalterweatherman # github.com/spf13/pflag v1.0.5 ## explicit; go 1.12 github.com/spf13/pflag -# github.com/spf13/viper v1.10.1 +# github.com/spf13/viper v1.11.0 ## explicit; go 1.17 github.com/spf13/viper github.com/spf13/viper/internal/encoding +github.com/spf13/viper/internal/encoding/dotenv github.com/spf13/viper/internal/encoding/hcl +github.com/spf13/viper/internal/encoding/ini +github.com/spf13/viper/internal/encoding/javaproperties github.com/spf13/viper/internal/encoding/json github.com/spf13/viper/internal/encoding/toml github.com/spf13/viper/internal/encoding/yaml # github.com/ssgreg/nlreturn/v2 v2.2.1 ## explicit; go 1.13 github.com/ssgreg/nlreturn/v2/pkg/nlreturn +# github.com/stbenjam/no-sprintf-host-port v0.1.1 +## explicit; go 1.16 +github.com/stbenjam/no-sprintf-host-port/pkg/analyzer # github.com/stretchr/objx v0.1.1 ## explicit github.com/stretchr/objx @@ -756,7 +779,7 @@ github.com/tetafro/godot # github.com/timakin/bodyclose v0.0.0-20210704033933-f49887972144 ## explicit; go 1.12 github.com/timakin/bodyclose/passes/bodyclose -# github.com/tomarrell/wrapcheck/v2 v2.5.0 +# github.com/tomarrell/wrapcheck/v2 v2.6.1 ## explicit; go 1.16 github.com/tomarrell/wrapcheck/v2/wrapcheck # github.com/tommy-muehle/go-mnd/v2 v2.5.0 @@ -781,8 +804,8 @@ github.com/vito/go-interact/interact github.com/yagipy/maintidx github.com/yagipy/maintidx/pkg/cyc github.com/yagipy/maintidx/pkg/halstvol -# github.com/yeya24/promlinter v0.1.1-0.20210918184747-d757024714a1 -## explicit; go 1.14 +# github.com/yeya24/promlinter v0.2.0 +## explicit; go 1.16 github.com/yeya24/promlinter # gitlab.com/bosi/decorder v0.2.1 ## explicit; go 1.17 @@ -817,6 +840,9 @@ golang.org/x/crypto/internal/poly1305 golang.org/x/crypto/internal/subtle golang.org/x/crypto/ssh golang.org/x/crypto/ssh/internal/bcrypt_pbkdf +# golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e +## explicit; go 1.18 +golang.org/x/exp/typeparams # golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 ## explicit; go 1.17 golang.org/x/mod/internal/lazyregexp @@ -863,6 +889,7 @@ golang.org/x/sys/windows golang.org/x/term # golang.org/x/text v0.3.7 ## explicit; go 1.17 +golang.org/x/text/cases golang.org/x/text/encoding golang.org/x/text/encoding/charmap golang.org/x/text/encoding/htmlindex @@ -873,6 +900,7 @@ golang.org/x/text/encoding/korean golang.org/x/text/encoding/simplifiedchinese golang.org/x/text/encoding/traditionalchinese golang.org/x/text/encoding/unicode +golang.org/x/text/internal golang.org/x/text/internal/language golang.org/x/text/internal/language/compact golang.org/x/text/internal/tag @@ -884,7 +912,7 @@ golang.org/x/text/transform golang.org/x/text/unicode/bidi golang.org/x/text/unicode/norm golang.org/x/text/width -# golang.org/x/tools v0.1.10 +# golang.org/x/tools v0.1.11-0.20220316014157-77aa08bb151a ## explicit; go 1.17 golang.org/x/tools/cmd/goimports golang.org/x/tools/go/analysis @@ -1108,7 +1136,7 @@ google.golang.org/protobuf/types/known/emptypb google.golang.org/protobuf/types/known/fieldmaskpb google.golang.org/protobuf/types/known/timestamppb google.golang.org/protobuf/types/known/wrapperspb -# gopkg.in/ini.v1 v1.66.2 +# gopkg.in/ini.v1 v1.66.4 ## explicit gopkg.in/ini.v1 # gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 @@ -1120,8 +1148,8 @@ gopkg.in/yaml.v2 # gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b ## explicit gopkg.in/yaml.v3 -# honnef.co/go/tools v0.2.2 -## explicit; go 1.14 +# honnef.co/go/tools v0.3.1 +## explicit; go 1.17 honnef.co/go/tools/analysis/code honnef.co/go/tools/analysis/edit honnef.co/go/tools/analysis/facts @@ -1146,9 +1174,8 @@ honnef.co/go/tools/staticcheck/fakereflect honnef.co/go/tools/staticcheck/fakexml honnef.co/go/tools/stylecheck honnef.co/go/tools/unused -honnef.co/go/tools/unused/typemap -# mvdan.cc/gofumpt v0.3.0 -## explicit; go 1.16 +# mvdan.cc/gofumpt v0.3.1 +## explicit; go 1.17 mvdan.cc/gofumpt/format mvdan.cc/gofumpt/internal/version # mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed diff --git a/vendor/mvdan.cc/gofumpt/format/format.go b/vendor/mvdan.cc/gofumpt/format/format.go index 6f797cac4..727acce75 100644 --- a/vendor/mvdan.cc/gofumpt/format/format.go +++ b/vendor/mvdan.cc/gofumpt/format/format.go @@ -46,8 +46,6 @@ type Options struct { // ModulePath corresponds to the Go module path which contains the source // code being formatted. When inside a Go module, ModulePath should be: - // rules which require new language features. When inside a Go module, - // LangVersion should generally be specified as the result of: // // go mod edit -json | jq -r '.Module.Path' // @@ -914,7 +912,6 @@ func (f *fumpter) joinStdImports(d *ast.GenDecl) { periodIndex := strings.IndexByte(path, '.') slashIndex := strings.IndexByte(path, '/') switch { - // Imports with a period in the first path element are third party. // Note that this includes "foo.com" and excludes "foo/bar.com/baz". case periodIndex > 0 && (slashIndex == -1 || periodIndex < slashIndex),