diff --git a/internal/gcimporter/iexport_go118_test.go b/internal/gcimporter/iexport_go118_test.go index c748fb36165..8b291972032 100644 --- a/internal/gcimporter/iexport_go118_test.go +++ b/internal/gcimporter/iexport_go118_test.go @@ -106,10 +106,6 @@ func TestImportTypeparamTests(t *testing.T) { t.Fatal(err) } - if isUnifiedBuilder() { - t.Skip("unified export data format is currently unsupported") - } - for _, entry := range list { if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".go") { // For now, only consider standalone go files. diff --git a/internal/gcimporter/iexport_test.go b/internal/gcimporter/iexport_test.go index b03581ccb52..501d53476df 100644 --- a/internal/gcimporter/iexport_test.go +++ b/internal/gcimporter/iexport_test.go @@ -7,57 +7,28 @@ package gcimporter_test import ( - "bufio" "bytes" "fmt" "go/ast" - "go/build" "go/constant" "go/importer" "go/parser" "go/token" "go/types" - "io" "math/big" "os" "path/filepath" "reflect" - "runtime" - "sort" "strings" "testing" - "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/go/buildutil" "golang.org/x/tools/go/gcexportdata" - "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/gcimporter" "golang.org/x/tools/internal/testenv" - "golang.org/x/tools/internal/typeparams/genericfeatures" ) -func readExportFile(filename string) ([]byte, error) { - f, err := os.Open(filename) - if err != nil { - return nil, err - } - defer f.Close() - - buf := bufio.NewReader(f) - if _, _, err := gcimporter.FindExportData(buf); err != nil { - return nil, err - } - - if ch, err := buf.ReadByte(); err != nil { - return nil, err - } else if ch != 'i' { - return nil, fmt.Errorf("unexpected byte: %v", ch) - } - - return io.ReadAll(buf) -} - func iexport(fset *token.FileSet, version int, pkg *types.Package) ([]byte, error) { var buf bytes.Buffer const bundle, shallow = false, false @@ -67,19 +38,8 @@ func iexport(fset *token.FileSet, version int, pkg *types.Package) ([]byte, erro return buf.Bytes(), nil } -// isUnifiedBuilder reports whether we are executing on a go builder that uses -// unified export data. -func isUnifiedBuilder() bool { - return os.Getenv("GO_BUILDER_NAME") == "linux-amd64-unified" -} - -const minStdlibPackages = 248 - func TestIExportData_stdlib(t *testing.T) { - if runtime.Compiler == "gccgo" { - t.Skip("gccgo standard library is inaccessible") - } - testenv.NeedsGoBuild(t) + testenv.NeedsGoPackages(t) if isRace { t.Skipf("stdlib tests take too long in race mode and flake on builders") } @@ -87,85 +47,86 @@ func TestIExportData_stdlib(t *testing.T) { t.Skip("skipping RAM hungry test in -short mode") } - // Load, parse and type-check the program. - ctxt := build.Default // copy - ctxt.GOPATH = "" // disable GOPATH - conf := loader.Config{ - Build: &ctxt, - AllowErrors: true, - TypeChecker: types.Config{ - Sizes: types.SizesFor(ctxt.Compiler, ctxt.GOARCH), - Error: func(err error) { t.Log(err) }, - }, - } - for _, path := range buildutil.AllPackages(conf.Build) { - conf.Import(path) + var errorsDir string // GOROOT/src/errors directory + { + cfg := packages.Config{ + Mode: packages.NeedName | packages.NeedFiles, + } + pkgs, err := packages.Load(&cfg, "errors") + if err != nil { + t.Fatal(err) + } + errorsDir = filepath.Dir(pkgs[0].GoFiles[0]) } - // Create a package containing type and value errors to ensure - // they are properly encoded/decoded. - f, err := conf.ParseFile("haserrors/haserrors.go", `package haserrors + // Load types from syntax for all std packages. + // + // Append a file to package errors containing type and + // value errors to ensure they are properly encoded/decoded. + const bad = `package errors const UnknownValue = "" + 0 type UnknownType undefined -`) +` + cfg := packages.Config{ + Mode: packages.LoadAllSyntax | packages.NeedDeps, + Overlay: map[string][]byte{filepath.Join(errorsDir, "bad.go"): []byte(bad)}, + } + pkgs, err := packages.Load(&cfg, "std") // ~800ms if err != nil { t.Fatal(err) } - conf.CreateFromFiles("haserrors", f) + fset := pkgs[0].Fset - prog, err := conf.Load() - if err != nil { - t.Fatalf("Load failed: %v", err) - } + version := gcimporter.IExportVersion - var sorted []*types.Package - isUnified := isUnifiedBuilder() - for pkg, info := range prog.AllPackages { - // Temporarily skip packages that use generics on the unified builder, to - // fix TryBots. - // - // TODO(#48595): fix this test with GOEXPERIMENT=unified. - inspect := inspector.New(info.Files) - features := genericfeatures.ForPackage(inspect, &info.Info) - if isUnified && features != 0 { - t.Logf("skipping package %q which uses generics", pkg.Path()) - continue + // Export and reimport each package, and check that they match. + var allPkgs []*types.Package + var errorsPkg *types.Package // reimported errors package + packages.Visit(pkgs, nil, func(ppkg *packages.Package) { // ~300ms + pkg := ppkg.Types + path := pkg.Path() + if path == "unsafe" || + strings.HasPrefix(path, "cmd/") || + strings.HasPrefix(path, "vendor/") { + return } - if info.Files != nil { // non-empty directory - sorted = append(sorted, pkg) + allPkgs = append(allPkgs, pkg) + + // Export and reimport the package, and compare. + exportdata, err := iexport(fset, version, pkg) + if err != nil { + t.Error(err) + return + } + pkg2 := testPkgData(t, fset, version, pkg, exportdata) + if path == "errors" { + errorsPkg = pkg2 } - } - sort.Slice(sorted, func(i, j int) bool { - return sorted[i].Path() < sorted[j].Path() }) - version := gcimporter.IExportVersion - numPkgs := len(sorted) - if want := minStdlibPackages; numPkgs < want { - t.Errorf("Loaded only %d packages, want at least %d", numPkgs, want) + // Assert that we saw a plausible sized library. + const minStdlibPackages = 284 + if n := len(allPkgs); n < minStdlibPackages { + t.Errorf("Loaded only %d packages, want at least %d", n, minStdlibPackages) } - // TODO(adonovan): opt: parallelize this slow loop. - for _, pkg := range sorted { - if exportdata, err := iexport(conf.Fset, version, pkg); err != nil { - t.Error(err) - } else { - testPkgData(t, conf.Fset, version, pkg, exportdata) + // Check that reimported errors package has bad decls. + if errorsPkg == nil { + t.Fatalf("'errors' package not found") + } + for _, name := range []string{"UnknownType", "UnknownValue"} { + obj := errorsPkg.Scope().Lookup(name) + if obj == nil { + t.Errorf("errors.%s not found", name) } - - if pkg.Name() == "main" || pkg.Name() == "haserrors" { - // skip; no export data - } else if bp, err := ctxt.Import(pkg.Path(), "", build.FindOnly); err != nil { - t.Log("warning:", err) - } else if exportdata, err := readExportFile(bp.PkgObj); err != nil { - t.Log("warning:", err) - } else { - testPkgData(t, conf.Fset, version, pkg, exportdata) + if typ := obj.Type().Underlying(); typ.String() != "invalid type" { + t.Errorf("errors.%s has underlying type %s, want invalid type", name, typ) } } + // (Sole) test of bundle functionality (250ms). var bundle bytes.Buffer - if err := gcimporter.IExportBundle(&bundle, conf.Fset, sorted); err != nil { + if err := gcimporter.IExportBundle(&bundle, fset, allPkgs); err != nil { t.Fatal(err) } fset2 := token.NewFileSet() @@ -174,13 +135,13 @@ type UnknownType undefined if err != nil { t.Fatal(err) } - - for i, pkg := range sorted { - testPkg(t, conf.Fset, version, pkg, fset2, pkgs2[i]) + for i, pkg := range allPkgs { + testPkg(t, fset, version, pkg, fset2, pkgs2[i]) } } -func testPkgData(t *testing.T, fset *token.FileSet, version int, pkg *types.Package, exportdata []byte) { +// testPkgData imports a package from export data and compares it with pkg. +func testPkgData(t *testing.T, fset *token.FileSet, version int, pkg *types.Package, exportdata []byte) *types.Package { imports := make(map[string]*types.Package) fset2 := token.NewFileSet() _, pkg2, err := gcimporter.IImportData(fset2, imports, exportdata, pkg.Path()) @@ -189,6 +150,7 @@ func testPkgData(t *testing.T, fset *token.FileSet, version int, pkg *types.Pack } testPkg(t, fset, version, pkg, fset2, pkg2) + return pkg2 } func testPkg(t *testing.T, fset *token.FileSet, version int, pkg *types.Package, fset2 *token.FileSet, pkg2 *types.Package) {