From fc29c17471e34068632d2e2be52d34ec73981f28 Mon Sep 17 00:00:00 2001 From: elliebababa Date: Thu, 25 Feb 2021 13:36:05 -0800 Subject: [PATCH 1/3] cliccl/load: add `load show` args to display subset of backup metadata Previously, `cockroach load show ` is used for debugging purposes, and displays all the metadata of a single backup manifest file. With this PR, we extend `load show` with more options to display subset of data. Also, we add support of "nodelocal://0/" access. Release justification: Non-production code changes Release note (cli change): Add `load show` args to display subset of backup metadata. Users can display subsets of metadata of a single manifest by using `load show files/descriptors/metadata/spans `. --- pkg/blobs/client.go | 6 +- pkg/blobs/testutils.go | 2 +- pkg/ccl/cliccl/BUILD.bazel | 6 ++ pkg/ccl/cliccl/load.go | 183 +++++++++++++++++++++++++++++++------ 4 files changed, 163 insertions(+), 34 deletions(-) diff --git a/pkg/blobs/client.go b/pkg/blobs/client.go index 641431e69f46..d5e4cd43bbd0 100644 --- a/pkg/blobs/client.go +++ b/pkg/blobs/client.go @@ -128,8 +128,8 @@ type localClient struct { localStorage *LocalStorage } -// newLocalClient instantiates a local blob service client. -func newLocalClient(externalIODir string) (BlobClient, error) { +// NewLocalClient instantiates a local blob service client. +func NewLocalClient(externalIODir string) (BlobClient, error) { storage, err := NewLocalStorage(externalIODir) if err != nil { return nil, errors.Wrap(err, "creating local client") @@ -168,7 +168,7 @@ func NewBlobClientFactory( ) BlobClientFactory { return func(ctx context.Context, dialing roachpb.NodeID) (BlobClient, error) { if dialing == 0 || localNodeID == dialing { - return newLocalClient(externalIODir) + return NewLocalClient(externalIODir) } conn, err := dialer.Dial(ctx, dialing, rpc.DefaultClass) if err != nil { diff --git a/pkg/blobs/testutils.go b/pkg/blobs/testutils.go index 41573ff006da..0c1522606044 100644 --- a/pkg/blobs/testutils.go +++ b/pkg/blobs/testutils.go @@ -20,7 +20,7 @@ import ( // in tests that use nodelocal storage. func TestBlobServiceClient(externalIODir string) BlobClientFactory { return func(ctx context.Context, dialing roachpb.NodeID) (BlobClient, error) { - return newLocalClient(externalIODir) + return NewLocalClient(externalIODir) } } diff --git a/pkg/ccl/cliccl/BUILD.bazel b/pkg/ccl/cliccl/BUILD.bazel index f29606009627..235ce71b3bda 100644 --- a/pkg/ccl/cliccl/BUILD.bazel +++ b/pkg/ccl/cliccl/BUILD.bazel @@ -6,6 +6,7 @@ go_library( "cliccl.go", "debug.go", "demo.go", + "inspect.go", "load.go", "mtproxy.go", "start.go", @@ -23,8 +24,13 @@ go_library( "//pkg/ccl/storageccl/engineccl/enginepbccl:enginepbccl_go_proto", "//pkg/ccl/workloadccl/cliccl", "//pkg/cli", + "//pkg/cli/cliflags", + "//pkg/roachpb", + "//pkg/rpc/nodedialer", "//pkg/security", + "//pkg/server", "//pkg/settings/cluster", + "//pkg/sql/catalog/catalogkv", "//pkg/sql/catalog/descpb", "//pkg/storage/cloud", "//pkg/storage/cloudimpl", diff --git a/pkg/ccl/cliccl/load.go b/pkg/ccl/cliccl/load.go index 52a1e2809b82..dccfbfb3cb15 100644 --- a/pkg/ccl/cliccl/load.go +++ b/pkg/ccl/cliccl/load.go @@ -11,6 +11,7 @@ package cliccl import ( "context" "fmt" + "path/filepath" "strings" "time" @@ -18,22 +19,37 @@ import ( "github.com/cockroachdb/cockroach/pkg/blobs" "github.com/cockroachdb/cockroach/pkg/ccl/backupccl" "github.com/cockroachdb/cockroach/pkg/cli" + "github.com/cockroachdb/cockroach/pkg/cli/cliflags" + "github.com/cockroachdb/cockroach/pkg/roachpb" "github.com/cockroachdb/cockroach/pkg/security" + "github.com/cockroachdb/cockroach/pkg/server" "github.com/cockroachdb/cockroach/pkg/settings/cluster" + "github.com/cockroachdb/cockroach/pkg/sql/catalog/catalogkv" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" "github.com/cockroachdb/cockroach/pkg/storage/cloud" "github.com/cockroachdb/cockroach/pkg/storage/cloudimpl" + "github.com/cockroachdb/cockroach/pkg/util/hlc" "github.com/cockroachdb/cockroach/pkg/util/humanizeutil" "github.com/cockroachdb/cockroach/pkg/util/timeutil" "github.com/cockroachdb/errors" "github.com/spf13/cobra" ) +const ( + descriptors = "descriptors" + files = "files" + spans = "spans" + metadata = "metadata" +) + +var externalIODir string + func init() { loadShowCmd := &cobra.Command{ - Use: "show ", + Use: "show [descriptors|files|spans|metadata] ", Short: "show backups", - Long: "Shows information about a SQL backup.", + Long: "Shows subset(s) of meta information about a SQL backup.", + Args: cobra.MinimumNArgs(1), RunE: cli.MaybeDecorateGRPCError(runLoadShow), } @@ -45,35 +61,117 @@ func init() { return cmd.Usage() }, } + + f := loadCmds.Flags() + f.StringVarP( + &externalIODir, + cliflags.ExternalIODir.Name, + cliflags.ExternalIODir.Shorthand, + "", /*value*/ + cliflags.ExternalIODir.Usage()) + cli.AddCmd(loadCmds) loadCmds.AddCommand(loadShowCmd) } +func newBlobFactory(ctx context.Context, dialing roachpb.NodeID) (blobs.BlobClient, error) { + if dialing != 0 { + return nil, errors.Errorf(`only support nodelocal (0/self) under offline inspection`) + } + if externalIODir == "" { + externalIODir = filepath.Join(server.DefaultStorePath, "extern") + } + return blobs.NewLocalClient(externalIODir) +} + +func parseShowArgs(args []string) (options map[string]bool, path string, err error) { + options = make(map[string]bool) + for _, arg := range args { + switch strings.ToLower(arg) { + case descriptors: + options[descriptors] = true + case files: + options[files] = true + case spans: + options[spans] = true + case metadata: + options[metadata] = true + default: + if path != "" { + return nil, "", errors.New("more than one path is specifiied") + } + path = arg + } + } + + if len(options) == 0 { + options[descriptors] = true + options[files] = true + options[spans] = true + options[metadata] = true + } + + if len(args) == len(options) { + return nil, "", errors.New("backup_path argument is required") + } + return options, path, nil +} + func runLoadShow(cmd *cobra.Command, args []string) error { - if len(args) != 1 { - return errors.New("basepath argument is required") + + var options map[string]bool + var path string + var err error + if options, path, err = parseShowArgs(args); err != nil { + return err + } + + var showHeaders bool + if len(options) > 1 { + showHeaders = true } ctx := context.Background() - basepath := args[0] - if !strings.Contains(basepath, "://") { - basepath = cloudimpl.MakeLocalStorageURI(basepath) + if !strings.Contains(path, "://") { + path = cloudimpl.MakeLocalStorageURI(path) } externalStorageFromURI := func(ctx context.Context, uri string, user security.SQLUsername) (cloud.ExternalStorage, error) { return cloudimpl.ExternalStorageFromURI(ctx, uri, base.ExternalIODirConfig{}, - cluster.NoSettings, blobs.TestEmptyBlobClientFactory, user, nil, nil) + cluster.NoSettings, newBlobFactory, user, nil /*Internal Executor*/, nil /*kvDB*/) } + // This reads the raw backup descriptor (with table descriptors possibly not // upgraded from the old FK representation, or even older formats). If more // fields are added to the output, the table descriptors may need to be // upgraded. - desc, err := backupccl.ReadBackupManifestFromURI(ctx, basepath, security.RootUserName(), + desc, err := backupccl.ReadBackupManifestFromURI(ctx, path, security.RootUserName(), externalStorageFromURI, nil) if err != nil { return err } + + if _, ok := options[metadata]; ok { + showMeta(desc) + } + + if _, ok := options[spans]; ok { + showSpans(desc, showHeaders) + } + + if _, ok := options[files]; ok { + showFiles(desc, showHeaders) + } + + if _, ok := options[descriptors]; ok { + showDescriptor(desc) + } + + return nil +} + +func showMeta(desc backupccl.BackupManifest) { start := timeutil.Unix(0, desc.StartTime.WallTime).Format(time.RFC3339Nano) end := timeutil.Unix(0, desc.EndTime.WallTime).Format(time.RFC3339Nano) fmt.Printf("StartTime: %s (%s)\n", start, desc.StartTime) @@ -85,35 +183,60 @@ func runLoadShow(cmd *cobra.Command, args []string) error { fmt.Printf("ClusterID: %s\n", desc.ClusterID) fmt.Printf("NodeID: %s\n", desc.NodeID) fmt.Printf("BuildInfo: %s\n", desc.BuildInfo.Short()) - fmt.Printf("Spans:\n") +} + +func showSpans(desc backupccl.BackupManifest, showHeaders bool) { + tabfmt := "" + if showHeaders { + fmt.Printf("Spans:\n") + tabfmt = "\t" + } for _, s := range desc.Spans { - fmt.Printf(" %s\n", s) + fmt.Printf("%s%s\n", tabfmt, s) + } +} + +func showFiles(desc backupccl.BackupManifest, showHeaders bool) { + tabfmt := "" + if showHeaders { + fmt.Printf("Files:\n") + tabfmt = "\t" } - fmt.Printf("Files:\n") for _, f := range desc.Files { - fmt.Printf(" %s:\n", f.Path) - fmt.Printf(" Span: %s\n", f.Span) - fmt.Printf(" Sha512: %0128x\n", f.Sha512) - fmt.Printf(" DataSize: %d (%s)\n", f.EntryCounts.DataSize, humanizeutil.IBytes(f.EntryCounts.DataSize)) - fmt.Printf(" Rows: %d\n", f.EntryCounts.Rows) - fmt.Printf(" IndexEntries: %d\n", f.EntryCounts.IndexEntries) + fmt.Printf("%s%s:\n", tabfmt, f.Path) + fmt.Printf(" Span: %s\n", f.Span) + fmt.Printf(" Sha512: %0128x\n", f.Sha512) + fmt.Printf(" DataSize: %d (%s)\n", f.EntryCounts.DataSize, humanizeutil.IBytes(f.EntryCounts.DataSize)) + fmt.Printf(" Rows: %d\n", f.EntryCounts.Rows) + fmt.Printf(" IndexEntries: %d\n", f.EntryCounts.IndexEntries) } +} + +func showDescriptor(desc backupccl.BackupManifest) { // Note that these descriptors could be from any past version of the cluster, // in case more fields need to be added to the output. - fmt.Printf("Descriptors:\n") + dbIDToName := make(map[descpb.ID]string) for i := range desc.Descriptors { d := &desc.Descriptors[i] - table, database, _, _ := descpb.FromDescriptor(d) - var typeName string - if table != nil { - typeName = "table" - } else if database != nil { - typeName = "database" - } else { - continue + id := descpb.GetDescriptorID(d) + if d.GetDatabase() != nil { + dbIDToName[id] = descpb.GetDescriptorName(d) + } + } + + fmt.Printf("Databases:\n") + for i := range dbIDToName { + fmt.Printf(" %s\n", + dbIDToName[i]) + } + + fmt.Printf("Tables:\n") + for i := range desc.Descriptors { + d := &desc.Descriptors[i] + if descpb.TableFromDescriptor(d, hlc.Timestamp{}) != nil { + desc := catalogkv.UnwrapDescriptorRaw(nil, d) + fmt.Printf(" %s (%s) \n", + descpb.GetDescriptorName(d), dbIDToName[desc.GetParentID()]) } - fmt.Printf(" %d: %s (%s)\n", - descpb.GetDescriptorID(d), descpb.GetDescriptorName(d), typeName) } - return nil } From 4f2c4448f56724e74dc0f79d18d9c568b58ccba3 Mon Sep 17 00:00:00 2001 From: elliebababa Date: Tue, 2 Mar 2021 10:17:56 -0800 Subject: [PATCH 2/3] cli: export TestCLI for cliccl Previously, clitest only has private access within cli package, while cliccl lack sufficient tests for ccl cli tools. With this commit, clitest utils are exported for cliccl and cliccl can reuse them for unit tests. The `cliTest` struct and the `cliTestParams` struct are renamed as `TestCLI` and `TestCLIParams`. Release justification: Low risk, high benefit changes to existing functionality. Release note: None --- pkg/ccl/cliccl/BUILD.bazel | 19 +- pkg/ccl/cliccl/load.go | 52 ++-- pkg/ccl/cliccl/load_test.go | 109 +++++++++ pkg/cli/BUILD.bazel | 2 + pkg/cli/cli_debug_test.go | 6 +- pkg/cli/cli_test.go | 425 ++++----------------------------- pkg/cli/demo_locality_test.go | 4 +- pkg/cli/doctor_test.go | 8 +- pkg/cli/import_test.go | 6 +- pkg/cli/nodelocal_test.go | 12 +- pkg/cli/sql_test.go | 4 +- pkg/cli/sql_util_test.go | 22 +- pkg/cli/statement_diag_test.go | 4 +- pkg/cli/testutils.go | 355 +++++++++++++++++++++++++++ pkg/cli/userfiletable_test.go | 20 +- pkg/cli/zip_test.go | 22 +- 16 files changed, 618 insertions(+), 452 deletions(-) create mode 100644 pkg/ccl/cliccl/load_test.go diff --git a/pkg/ccl/cliccl/BUILD.bazel b/pkg/ccl/cliccl/BUILD.bazel index 235ce71b3bda..f350bfead8e0 100644 --- a/pkg/ccl/cliccl/BUILD.bazel +++ b/pkg/ccl/cliccl/BUILD.bazel @@ -6,7 +6,6 @@ go_library( "cliccl.go", "debug.go", "demo.go", - "inspect.go", "load.go", "mtproxy.go", "start.go", @@ -25,13 +24,14 @@ go_library( "//pkg/ccl/workloadccl/cliccl", "//pkg/cli", "//pkg/cli/cliflags", + "//pkg/keys", "//pkg/roachpb", - "//pkg/rpc/nodedialer", "//pkg/security", "//pkg/server", "//pkg/settings/cluster", - "//pkg/sql/catalog/catalogkv", "//pkg/sql/catalog/descpb", + "//pkg/sql/catalog/tabledesc", + "//pkg/sql/sessiondata", "//pkg/storage/cloud", "//pkg/storage/cloudimpl", "//pkg/storage/enginepb", @@ -54,11 +54,22 @@ go_library( go_test( name = "cliccl_test", size = "small", - srcs = ["main_test.go"], + srcs = [ + "load_test.go", + "main_test.go", + ], + embed = [":cliccl"], deps = [ + "//pkg/base", "//pkg/build", "//pkg/ccl/utilccl", + "//pkg/cli", "//pkg/server", + "//pkg/testutils", "//pkg/testutils/serverutils", + "//pkg/testutils/sqlutils", + "//pkg/util/leaktest", + "//pkg/util/log", + "@com_github_stretchr_testify//require", ], ) diff --git a/pkg/ccl/cliccl/load.go b/pkg/ccl/cliccl/load.go index dccfbfb3cb15..7e3b13512617 100644 --- a/pkg/ccl/cliccl/load.go +++ b/pkg/ccl/cliccl/load.go @@ -20,12 +20,14 @@ import ( "github.com/cockroachdb/cockroach/pkg/ccl/backupccl" "github.com/cockroachdb/cockroach/pkg/cli" "github.com/cockroachdb/cockroach/pkg/cli/cliflags" + "github.com/cockroachdb/cockroach/pkg/keys" "github.com/cockroachdb/cockroach/pkg/roachpb" "github.com/cockroachdb/cockroach/pkg/security" "github.com/cockroachdb/cockroach/pkg/server" "github.com/cockroachdb/cockroach/pkg/settings/cluster" - "github.com/cockroachdb/cockroach/pkg/sql/catalog/catalogkv" "github.com/cockroachdb/cockroach/pkg/sql/catalog/descpb" + "github.com/cockroachdb/cockroach/pkg/sql/catalog/tabledesc" + "github.com/cockroachdb/cockroach/pkg/sql/sessiondata" "github.com/cockroachdb/cockroach/pkg/storage/cloud" "github.com/cockroachdb/cockroach/pkg/storage/cloudimpl" "github.com/cockroachdb/cockroach/pkg/util/hlc" @@ -62,8 +64,8 @@ func init() { }, } - f := loadCmds.Flags() - f.StringVarP( + loadFlags := loadCmds.Flags() + loadFlags.StringVarP( &externalIODir, cliflags.ExternalIODir.Name, cliflags.ExternalIODir.Shorthand, @@ -72,6 +74,7 @@ func init() { cli.AddCmd(loadCmds) loadCmds.AddCommand(loadShowCmd) + loadShowCmd.Flags().AddFlagSet(loadFlags) } func newBlobFactory(ctx context.Context, dialing roachpb.NodeID) (blobs.BlobClient, error) { @@ -165,7 +168,9 @@ func runLoadShow(cmd *cobra.Command, args []string) error { } if _, ok := options[descriptors]; ok { - showDescriptor(desc) + if err := showDescriptor(desc); err != nil { + return err + } } return nil @@ -204,39 +209,56 @@ func showFiles(desc backupccl.BackupManifest, showHeaders bool) { } for _, f := range desc.Files { fmt.Printf("%s%s:\n", tabfmt, f.Path) - fmt.Printf(" Span: %s\n", f.Span) - fmt.Printf(" Sha512: %0128x\n", f.Sha512) - fmt.Printf(" DataSize: %d (%s)\n", f.EntryCounts.DataSize, humanizeutil.IBytes(f.EntryCounts.DataSize)) - fmt.Printf(" Rows: %d\n", f.EntryCounts.Rows) - fmt.Printf(" IndexEntries: %d\n", f.EntryCounts.IndexEntries) + fmt.Printf("%s Span: %s\n", tabfmt, f.Span) + fmt.Printf("%s Sha512: %0128x\n", tabfmt, f.Sha512) + fmt.Printf("%s DataSize: %d (%s)\n", tabfmt, f.EntryCounts.DataSize, humanizeutil.IBytes(f.EntryCounts.DataSize)) + fmt.Printf("%s Rows: %d\n", tabfmt, f.EntryCounts.Rows) + fmt.Printf("%s IndexEntries: %d\n", tabfmt, f.EntryCounts.IndexEntries) } } -func showDescriptor(desc backupccl.BackupManifest) { +func showDescriptor(desc backupccl.BackupManifest) error { // Note that these descriptors could be from any past version of the cluster, // in case more fields need to be added to the output. + dbIDs := make([]descpb.ID, 0) dbIDToName := make(map[descpb.ID]string) + schemaIDs := make([]descpb.ID, 0) + schemaIDs = append(schemaIDs, keys.PublicSchemaID) + schemaIDToName := make(map[descpb.ID]string) + schemaIDToName[keys.PublicSchemaID] = sessiondata.PublicSchemaName for i := range desc.Descriptors { d := &desc.Descriptors[i] id := descpb.GetDescriptorID(d) if d.GetDatabase() != nil { dbIDToName[id] = descpb.GetDescriptorName(d) + dbIDs = append(dbIDs, id) + } else if d.GetSchema() != nil { + schemaIDToName[id] = descpb.GetDescriptorName(d) + schemaIDs = append(schemaIDs, id) } } fmt.Printf("Databases:\n") - for i := range dbIDToName { + for _, id := range dbIDs { fmt.Printf(" %s\n", - dbIDToName[i]) + dbIDToName[id]) + } + + fmt.Printf("Schemas:\n") + for _, id := range schemaIDs { + fmt.Printf(" %s\n", + schemaIDToName[id]) } fmt.Printf("Tables:\n") for i := range desc.Descriptors { d := &desc.Descriptors[i] if descpb.TableFromDescriptor(d, hlc.Timestamp{}) != nil { - desc := catalogkv.UnwrapDescriptorRaw(nil, d) - fmt.Printf(" %s (%s) \n", - descpb.GetDescriptorName(d), dbIDToName[desc.GetParentID()]) + tbDesc := tabledesc.NewImmutable(*descpb.TableFromDescriptor(d, hlc.Timestamp{})) + dbName := dbIDToName[tbDesc.GetParentID()] + schemaName := schemaIDToName[tbDesc.GetParentSchemaID()] + fmt.Printf(" %s.%s.%s\n", dbName, schemaName, descpb.GetDescriptorName(d)) } } + return nil } diff --git a/pkg/ccl/cliccl/load_test.go b/pkg/ccl/cliccl/load_test.go new file mode 100644 index 000000000000..96293494ef77 --- /dev/null +++ b/pkg/ccl/cliccl/load_test.go @@ -0,0 +1,109 @@ +// Copyright 2021 The Cockroach Authors. +// +// Licensed as a CockroachDB Enterprise file under the Cockroach Community +// License (the "License"); you may not use this file except in compliance with +// the License. You may obtain a copy of the License at +// +// https://github.com/cockroachdb/cockroach/blob/master/licenses/CCL.txt + +package cliccl + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/cockroachdb/cockroach/pkg/base" + "github.com/cockroachdb/cockroach/pkg/cli" + "github.com/cockroachdb/cockroach/pkg/testutils" + "github.com/cockroachdb/cockroach/pkg/testutils/serverutils" + "github.com/cockroachdb/cockroach/pkg/testutils/sqlutils" + "github.com/cockroachdb/cockroach/pkg/util/leaktest" + "github.com/cockroachdb/cockroach/pkg/util/log" + "github.com/stretchr/testify/require" +) + +func TestLoadShow(t *testing.T) { + defer leaktest.AfterTest(t)() + defer log.Scope(t).Close(t) + + c := cli.NewCLITest(cli.TestCLIParams{T: t, NoServer: true}) + defer c.Cleanup() + + ctx := context.Background() + dir, cleanFn := testutils.TempDir(t) + defer cleanFn() + srv, db, _ := serverutils.StartServer(t, base.TestServerArgs{ExternalIODir: dir, Insecure: true}) + defer srv.Stopper().Stop(ctx) + + sqlDB := sqlutils.MakeSQLRunner(db) + sqlDB.Exec(t, `CREATE DATABASE testDB`) + sqlDB.Exec(t, `CREATE SCHEMA testDB.testschema`) + sqlDB.Exec(t, `CREATE TABLE testDB.testschema.fooTable (a INT)`) + sqlDB.Exec(t, `INSERT INTO testDB.testschema.fooTable VALUES (123)`) + const backupPath = "nodelocal://0/fooFolder" + sqlDB.Exec(t, `BACKUP testDB.testSchema.fooTable TO $1`, backupPath) + + // Test load show with metadata option + expectedMetadataOutputSubstr := []string{"StartTime:", "EndTime:", "DataSize: 20 (20 B)", "Rows: 1", "IndexEntries: 0", "FormatVersion: 1", "ClusterID:", "NodeID: 0", "BuildInfo:"} + t.Run("show-metadata", func(t *testing.T) { + out, err := c.RunWithCapture(fmt.Sprintf("load show %s metadata --external-io-dir=%s", backupPath, dir)) + require.NoError(t, err) + for _, substr := range expectedMetadataOutputSubstr { + require.True(t, strings.Contains(out, substr)) + } + }) + + // Test load show with spans option + expectedSpansOutput := "/Table/54/{1-2}\n" + t.Run("show-spans", func(t *testing.T) { + out, err := c.RunWithCapture(fmt.Sprintf("load show %s spans --external-io-dir=%s", backupPath, dir)) + require.NoError(t, err) + checkExpectedOutput(t, expectedSpansOutput, out) + }) + + // Test load show with files option + expectedFilesOutputSubstr := []string{".sst", "Span: /Table/54/{1-2}", "Sha512:", "DataSize: 20 (20 B)", "Rows: 1", "IndexEntries: 0"} + t.Run("show-files", func(t *testing.T) { + out, err := c.RunWithCapture(fmt.Sprintf("load show %s files --external-io-dir=%s", backupPath, dir)) + require.NoError(t, err) + for _, substr := range expectedFilesOutputSubstr { + require.Contains(t, out, substr) + } + }) + + // Test load show with descriptors option + expectedDescOutput := + `Databases: + testdb +Schemas: + public + testschema +Tables: + testdb.testschema.footable +` + t.Run("show-descriptors", func(t *testing.T) { + out, err := c.RunWithCapture(fmt.Sprintf("load show %s descriptors --external-io-dir=%s", backupPath, dir)) + require.NoError(t, err) + checkExpectedOutput(t, expectedDescOutput, out) + }) + + // Test load show without options should output all information + t.Run("show-without-options", func(t *testing.T) { + out, err := c.RunWithCapture(fmt.Sprintf("load show %s --external-io-dir=%s", backupPath, dir)) + require.NoError(t, err) + expectedOutputSubstr := append(expectedMetadataOutputSubstr, "Spans:\n\t"+expectedSpansOutput) + expectedOutputSubstr = append(expectedOutputSubstr, "Files:\n\t") + expectedOutputSubstr = append(expectedOutputSubstr, expectedFilesOutputSubstr...) + for _, substr := range expectedOutputSubstr { + require.Contains(t, out, substr) + } + }) +} + +func checkExpectedOutput(t *testing.T, expected string, out string) { + endOfCmd := strings.Index(out, "\n") + out = out[endOfCmd+1:] + require.Equal(t, expected, out) +} diff --git a/pkg/cli/BUILD.bazel b/pkg/cli/BUILD.bazel index 1804dbf82d35..fa070e84358a 100644 --- a/pkg/cli/BUILD.bazel +++ b/pkg/cli/BUILD.bazel @@ -109,6 +109,7 @@ go_library( "//pkg/roachpb", "//pkg/rpc", "//pkg/security", + "//pkg/security/securitytest", "//pkg/server", "//pkg/server/debug", "//pkg/server/dumpstore", @@ -141,6 +142,7 @@ go_library( "//pkg/storage/cloud", "//pkg/storage/cloudimpl", "//pkg/storage/enginepb", + "//pkg/testutils/serverutils", "//pkg/ts/tspb", "//pkg/util", "//pkg/util/cgroups", diff --git a/pkg/cli/cli_debug_test.go b/pkg/cli/cli_debug_test.go index 9d170f488c7d..35ec8a91eaf6 100644 --- a/pkg/cli/cli_debug_test.go +++ b/pkg/cli/cli_debug_test.go @@ -21,7 +21,7 @@ import ( ) func Example_debug_decode_key_value() { - cliTest{}.RunWithArgs([]string{"debug", "decode-value", "016b12bd8980c0b6c2e211ba5182000172647363", "884d186d03089b09120bbd8980c0b6c2e211ba51821a0bbd8980c0b9e7c5c610e99622060801100118012206080410041802220608021002180428053004"}) + TestCLI{}.RunWithArgs([]string{"debug", "decode-value", "016b12bd8980c0b6c2e211ba5182000172647363", "884d186d03089b09120bbd8980c0b6c2e211ba51821a0bbd8980c0b9e7c5c610e99622060801100118012206080410041802220608021002180428053004"}) // Output: // debug decode-value 016b12bd8980c0b6c2e211ba5182000172647363 884d186d03089b09120bbd8980c0b6c2e211ba51821a0bbd8980c0b9e7c5c610e99622060801100118012206080410041802220608021002180428053004 @@ -43,7 +43,7 @@ func TestDebugKeysHex(t *testing.T) { createStore(t, storePath) { - out, err := cliTest{}.RunWithCapture("debug keys " + storePath + + out, err := TestCLI{}.RunWithCapture("debug keys " + storePath + " --from hex:016b12bd898090d79e52e79b0144000174786e2d733fb094e9aa4d67974c71486b9823b900" + " --to hex:016b12bd898090d79e52e79b0144000174786e2d733fb094e9aa4d67974c71486b9823b900") if err != nil { @@ -56,7 +56,7 @@ func TestDebugKeysHex(t *testing.T) { } // Test invalid key, verify we get a good suggestion back. - out, err := cliTest{}.RunWithCapture("debug keys " + storePath + + out, err := TestCLI{}.RunWithCapture("debug keys " + storePath + " --to hex:01") if err != nil { t.Fatal(err) diff --git a/pkg/cli/cli_test.go b/pkg/cli/cli_test.go index e1431bae0ea4..8c3992590521 100644 --- a/pkg/cli/cli_test.go +++ b/pkg/cli/cli_test.go @@ -17,7 +17,6 @@ import ( "fmt" "io" "io/ioutil" - "net" "os" "path/filepath" "reflect" @@ -29,15 +28,10 @@ import ( "github.com/cockroachdb/cockroach/pkg/base" "github.com/cockroachdb/cockroach/pkg/build" - "github.com/cockroachdb/cockroach/pkg/cli/cliflags" - "github.com/cockroachdb/cockroach/pkg/roachpb" "github.com/cockroachdb/cockroach/pkg/security" - "github.com/cockroachdb/cockroach/pkg/security/securitytest" - "github.com/cockroachdb/cockroach/pkg/server" "github.com/cockroachdb/cockroach/pkg/sql/lex" "github.com/cockroachdb/cockroach/pkg/sql/sem/tree" "github.com/cockroachdb/cockroach/pkg/testutils" - "github.com/cockroachdb/cockroach/pkg/testutils/serverutils" "github.com/cockroachdb/cockroach/pkg/testutils/skip" "github.com/cockroachdb/cockroach/pkg/util/leaktest" "github.com/cockroachdb/cockroach/pkg/util/log" @@ -48,340 +42,13 @@ import ( "github.com/stretchr/testify/assert" ) -type cliTest struct { - *server.TestServer - certsDir string - cleanupFunc func() error - prevStderr *os.File - - // t is the testing.T instance used for this test. - // Example_xxx tests may have this set to nil. - t *testing.T - // logScope binds the lifetime of the log files to this test, when t - // is not nil - logScope *log.TestLogScope - // if true, doesn't print args during RunWithArgs - omitArgs bool -} - -type cliTestParams struct { - t *testing.T - insecure bool - noServer bool - storeSpecs []base.StoreSpec - locality roachpb.Locality - noNodelocal bool -} - -// testTempFilePrefix is a sentinel marker to be used as the prefix of a -// test file name. It is used to extract the file name from a uniquely -// generated (temp directory) file path. -const testTempFilePrefix = "test-temp-prefix-" - -func (c *cliTest) fail(err interface{}) { - if c.t != nil { - defer c.logScope.Close(c.t) - c.t.Fatal(err) - } else { - panic(err) - } -} - -func createTestCerts(certsDir string) (cleanup func() error) { - // Copy these assets to disk from embedded strings, so this test can - // run from a standalone binary. - // Disable embedded certs, or the security library will try to load - // our real files as embedded assets. - security.ResetAssetLoader() - - assets := []string{ - filepath.Join(security.EmbeddedCertsDir, security.EmbeddedCACert), - filepath.Join(security.EmbeddedCertsDir, security.EmbeddedCAKey), - filepath.Join(security.EmbeddedCertsDir, security.EmbeddedNodeCert), - filepath.Join(security.EmbeddedCertsDir, security.EmbeddedNodeKey), - filepath.Join(security.EmbeddedCertsDir, security.EmbeddedRootCert), - filepath.Join(security.EmbeddedCertsDir, security.EmbeddedRootKey), - - filepath.Join(security.EmbeddedCertsDir, security.EmbeddedTenantClientCACert), - } - - for _, a := range assets { - _, err := securitytest.RestrictedCopy(a, certsDir, filepath.Base(a)) - if err != nil { - panic(err) - } - } - - return func() error { - security.SetAssetLoader(securitytest.EmbeddedAssets) - return os.RemoveAll(certsDir) - } -} - -func newCLITest(params cliTestParams) cliTest { - c := cliTest{t: params.t} - - certsDir, err := ioutil.TempDir("", "cli-test") - if err != nil { - c.fail(err) - } - c.certsDir = certsDir - - if c.t != nil { - c.logScope = log.Scope(c.t) - } - - c.cleanupFunc = func() error { return nil } - - if !params.noServer { - if !params.insecure { - c.cleanupFunc = createTestCerts(certsDir) - baseCfg.SSLCertsDir = certsDir - } - - args := base.TestServerArgs{ - Insecure: params.insecure, - SSLCertsDir: c.certsDir, - StoreSpecs: params.storeSpecs, - Locality: params.locality, - ExternalIODir: filepath.Join(certsDir, "extern"), - } - if params.noNodelocal { - args.ExternalIODir = "" - } - s, err := serverutils.StartServerRaw(args) - if err != nil { - c.fail(err) - } - c.TestServer = s.(*server.TestServer) - - log.Infof(context.Background(), "server started at %s", c.ServingRPCAddr()) - log.Infof(context.Background(), "SQL listener at %s", c.ServingSQLAddr()) - } - - baseCfg.User = security.NodeUserName() - - // Ensure that CLI error messages and anything meant for the - // original stderr is redirected to stdout, where it can be - // captured. - c.prevStderr = stderr - stderr = os.Stdout - - return c -} - -// setCLIDefaultsForTests invokes initCLIDefaults but pretends the -// output is not a terminal, even if it happens to be. This ensures -// e.g. that tests ran with -v have the same output as those without. -func setCLIDefaultsForTests() { - initCLIDefaults() - cliCtx.terminalOutput = false - sqlCtx.showTimes = false - // Even though we pretend there is no terminal, most tests want - // pretty tables. - cliCtx.tableDisplayFormat = tableDisplayTable -} - -// stopServer stops the test server. -func (c *cliTest) stopServer() { - if c.TestServer != nil { - log.Infof(context.Background(), "stopping server at %s / %s", - c.ServingRPCAddr(), c.ServingSQLAddr()) - c.Stopper().Stop(context.Background()) - } -} - -// restartServer stops and restarts the test server. The ServingRPCAddr() may -// have changed after this method returns. -func (c *cliTest) restartServer(params cliTestParams) { - c.stopServer() - log.Info(context.Background(), "restarting server") - s, err := serverutils.StartServerRaw(base.TestServerArgs{ - Insecure: params.insecure, - SSLCertsDir: c.certsDir, - StoreSpecs: params.storeSpecs, - }) - if err != nil { - c.fail(err) - } - c.TestServer = s.(*server.TestServer) - log.Infof(context.Background(), "restarted server at %s / %s", - c.ServingRPCAddr(), c.ServingSQLAddr()) -} - -// cleanup cleans up after the test, stopping the server if necessary. -// The log files are removed if the test has succeeded. -func (c *cliTest) cleanup() { - defer func() { - if c.t != nil { - c.logScope.Close(c.t) - } - }() - - // Restore stderr. - stderr = c.prevStderr - - log.Info(context.Background(), "stopping server and cleaning up CLI test") - - c.stopServer() - - if err := c.cleanupFunc(); err != nil { - panic(err) - } -} - -func (c cliTest) Run(line string) { - a := strings.Fields(line) - c.RunWithArgs(a) -} - -// RunWithCapture runs c and returns a string containing the output of c -// and any error that may have occurred capturing the output. We do not propagate -// errors in executing c, because those will be caught when the test verifies -// the output of c. -func (c cliTest) RunWithCapture(line string) (out string, err error) { - return captureOutput(func() { - c.Run(line) - }) -} - -func (c cliTest) RunWithCaptureArgs(args []string) (string, error) { - return captureOutput(func() { - c.RunWithArgs(args) - }) -} - -// captureOutput runs f and returns a string containing the output and any -// error that may have occurred capturing the output. -func captureOutput(f func()) (out string, err error) { - // Heavily inspired by Go's testing/example.go:runExample(). - - // Funnel stdout into a pipe. - stdoutSave, stderrRedirSave := os.Stdout, stderr - r, w, err := os.Pipe() - if err != nil { - return "", err - } - os.Stdout = w - stderr = w - - // Send all bytes from piped stdout through the output channel. - type captureResult struct { - out string - err error - } - outC := make(chan captureResult) - go func() { - var buf bytes.Buffer - _, err := io.Copy(&buf, r) - r.Close() - outC <- captureResult{buf.String(), err} - }() - - // Clean up and record output in separate function to handle panics. - defer func() { - // Close pipe and restore normal stdout. - w.Close() - os.Stdout = stdoutSave - stderr = stderrRedirSave - outResult := <-outC - out, err = outResult.out, outResult.err - if x := recover(); x != nil { - err = errors.Errorf("panic: %v", x) - } - }() - - // Run the command. The output will be returned in the defer block. - f() - return -} - -func isSQLCommand(args []string) (bool, error) { - cmd, _, err := cockroachCmd.Find(args) - if err != nil { - return false, err - } - // We use --echo-sql as a marker of SQL-only commands. - if f := flagSetForCmd(cmd).Lookup(cliflags.EchoSQL.Name); f != nil { - return true, nil - } - return false, nil -} - -func (c cliTest) RunWithArgs(origArgs []string) { - TestingReset() - - if err := func() error { - args := append([]string(nil), origArgs[:1]...) - if c.TestServer != nil { - addr := c.ServingRPCAddr() - if isSQL, err := isSQLCommand(origArgs); err != nil { - return err - } else if isSQL { - addr = c.ServingSQLAddr() - } - h, p, err := net.SplitHostPort(addr) - if err != nil { - return err - } - args = append(args, fmt.Sprintf("--host=%s", net.JoinHostPort(h, p))) - if c.Cfg.Insecure { - args = append(args, "--insecure=true") - } else { - args = append(args, "--insecure=false") - args = append(args, fmt.Sprintf("--certs-dir=%s", c.certsDir)) - } - } - args = append(args, origArgs[1:]...) - - // `nodelocal upload` CLI tests create test files in unique temp - // directories. Given that the expected output for such tests is defined as - // a static comment, it is not possible to match against the full file path. - // So, we trim the file path upto the sentinel prefix marker, and use only - // the file name for comparing against the expected output. - if len(origArgs) >= 3 && strings.Contains(origArgs[2], testTempFilePrefix) { - splitFilePath := strings.Split(origArgs[2], testTempFilePrefix) - origArgs[2] = splitFilePath[1] - } - - if !c.omitArgs { - fmt.Fprintf(os.Stderr, "%s\n", args) - fmt.Println(strings.Join(origArgs, " ")) - } - - return Run(args) - }(); err != nil { - cliOutputError(os.Stdout, err, true /*showSeverity*/, false /*verbose*/) - } -} - -func (c cliTest) RunWithCAArgs(origArgs []string) { - TestingReset() - - if err := func() error { - args := append([]string(nil), origArgs[:1]...) - if c.TestServer != nil { - args = append(args, fmt.Sprintf("--ca-key=%s", filepath.Join(c.certsDir, security.EmbeddedCAKey))) - args = append(args, fmt.Sprintf("--certs-dir=%s", c.certsDir)) - } - args = append(args, origArgs[1:]...) - - fmt.Fprintf(os.Stderr, "%s\n", args) - fmt.Println(strings.Join(origArgs, " ")) - - return Run(args) - }(); err != nil { - fmt.Println(err) - } -} - func TestQuit(t *testing.T) { defer leaktest.AfterTest(t)() skip.UnderShort(t) - c := newCLITest(cliTestParams{t: t}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t}) + defer c.Cleanup() c.Run("quit") // Wait until this async command cleanups the server. @@ -389,8 +56,8 @@ func TestQuit(t *testing.T) { } func Example_logging() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithArgs([]string{`sql`, `--logtostderr=false`, `-e`, `select 1 as "1"`}) c.RunWithArgs([]string{`sql`, `--logtostderr=true`, `-e`, `select 1 as "1"`}) @@ -409,8 +76,8 @@ func Example_logging() { } func Example_demo() { - c := newCLITest(cliTestParams{noServer: true}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{NoServer: true}) + defer c.Cleanup() defer func(b bool) { testingForceRandomizeDemoPorts = b }(testingForceRandomizeDemoPorts) testingForceRandomizeDemoPorts = true @@ -497,8 +164,8 @@ func Example_demo() { } func Example_sql() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithArgs([]string{`sql`, `-e`, `show application_name`}) c.RunWithArgs([]string{`sql`, `-e`, `create database t; create table t.f (x int, y int); insert into t.f values (42, 69)`}) @@ -595,8 +262,8 @@ func Example_sql() { } func Example_sql_watch() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithArgs([]string{`sql`, `-e`, `create table d(x int); insert into d values(3)`}) c.RunWithArgs([]string{`sql`, `--watch`, `.1s`, `-e`, `update d set x=x-1 returning 1/x as dec`}) @@ -614,8 +281,8 @@ func Example_sql_watch() { } func Example_sql_format() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithArgs([]string{"sql", "-e", "create database t; create table t.times (bare timestamp, withtz timestamptz)"}) c.RunWithArgs([]string{"sql", "-e", "insert into t.times values ('2016-01-25 10:10:10', '2016-01-25 10:10:10-05:00')"}) @@ -632,8 +299,8 @@ func Example_sql_format() { } func Example_sql_column_labels() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() testData := []string{ `f"oo`, @@ -788,8 +455,8 @@ thenshort`, } func Example_sql_empty_table() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithArgs([]string{"sql", "-e", "create database t;" + "create table t.norows(x int);" + @@ -890,8 +557,8 @@ func Example_sql_empty_table() { } func Example_csv_tsv_quoting() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() testData := []string{ `ab`, @@ -1045,8 +712,8 @@ def`, } func Example_sql_table() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() testData := []struct { str, desc string @@ -1348,8 +1015,8 @@ func TestRenderHTML(t *testing.T) { } func Example_misc_table() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithArgs([]string{"sql", "-e", "create database t; create table t.t (s string, d string);"}) c.RunWithArgs([]string{"sql", "--format=table", "-e", "select ' hai' as x"}) @@ -1379,8 +1046,8 @@ func Example_misc_table() { } func Example_cert() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithCAArgs([]string{"cert", "create-client", "foo"}) c.RunWithCAArgs([]string{"cert", "create-client", "Ομηρος"}) @@ -1501,8 +1168,8 @@ Use "cockroach [command] --help" for more information about a command. } func Example_node() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() // Refresh time series data, which is required to retrieve stats. if err := c.WriteSummaries(); err != nil { @@ -1536,8 +1203,8 @@ func Example_node() { func TestCLITimeout(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t}) + defer c.Cleanup() // Wrap the meat of the test in a retry loop. Setting a timeout like this is // racy as the operation may have succeeded by the time the scheduler gives @@ -1568,8 +1235,8 @@ func TestNodeStatus(t *testing.T) { skip.WithIssue(t, 38151) start := timeutil.Now() - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() // Refresh time series data, which is required to retrieve stats. if err := c.WriteSummaries(); err != nil { @@ -1625,7 +1292,7 @@ func TestNodeStatus(t *testing.T) { checkNodeStatus(t, c, out, start) } -func checkNodeStatus(t *testing.T, c cliTest, output string, start time.Time) { +func checkNodeStatus(t *testing.T, c TestCLI, output string, start time.Time) { buf := bytes.NewBufferString(output) s := bufio.NewScanner(buf) @@ -1900,8 +1567,8 @@ func TestGenAutocomplete(t *testing.T) { func TestJunkPositionalArguments(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t, noServer: true}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t, NoServer: true}) + defer c.Cleanup() for i, test := range []string{ "start", @@ -1925,8 +1592,8 @@ func TestJunkPositionalArguments(t *testing.T) { func TestWorkload(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{noServer: true}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{NoServer: true}) + defer c.Cleanup() out, err := c.RunWithCapture("workload init --help") if err != nil { @@ -1943,10 +1610,10 @@ func Example_in_memory() { if err != nil { panic(err) } - c := newCLITest(cliTestParams{ - storeSpecs: []base.StoreSpec{spec}, + c := NewCLITest(TestCLIParams{ + StoreSpecs: []base.StoreSpec{spec}, }) - defer c.cleanup() + defer c.Cleanup() // Test some sql to ensure that the in memory store is working. c.RunWithArgs([]string{"sql", "-e", "create database t; create table t.f (x int, y int); insert into t.f values (42, 69)"}) @@ -1962,8 +1629,8 @@ func Example_in_memory() { } func Example_pretty_print_numerical_strings() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() // All strings in pretty-print output should be aligned to left regardless of their contents c.RunWithArgs([]string{"sql", "-e", "create database t; create table t.t (s string, d string);"}) @@ -1995,8 +1662,8 @@ func Example_pretty_print_numerical_strings() { } func Example_sqlfmt() { - c := newCLITest(cliTestParams{noServer: true}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{NoServer: true}) + defer c.Cleanup() c.RunWithArgs([]string{"sqlfmt", "-e", ";"}) c.RunWithArgs([]string{"sqlfmt", "-e", "delete from t"}) @@ -2048,8 +1715,8 @@ func Example_sqlfmt() { // The input file contains a mix of client-side and // server-side commands to ensure that both are supported with -f. func Example_read_from_file() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithArgs([]string{"sql", "-e", "select 1", "-f", "testdata/inputfile.sql"}) c.RunWithArgs([]string{"sql", "-f", "testdata/inputfile.sql"}) @@ -2075,8 +1742,8 @@ func Example_read_from_file() { // Example_includes tests the \i command. func Example_includes() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() c.RunWithArgs([]string{"sql", "-f", "testdata/i_twolevels1.sql"}) c.RunWithArgs([]string{"sql", "-f", "testdata/i_multiline.sql"}) diff --git a/pkg/cli/demo_locality_test.go b/pkg/cli/demo_locality_test.go index b20d15aa8901..d26c7c2873a6 100644 --- a/pkg/cli/demo_locality_test.go +++ b/pkg/cli/demo_locality_test.go @@ -18,8 +18,8 @@ import ( ) func Example_demo_locality() { - c := newCLITest(cliTestParams{noServer: true}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{NoServer: true}) + defer c.Cleanup() defer func(b bool) { testingForceRandomizeDemoPorts = b }(testingForceRandomizeDemoPorts) testingForceRandomizeDemoPorts = true diff --git a/pkg/cli/doctor_test.go b/pkg/cli/doctor_test.go index ee49f8e38201..e4d4d0c90802 100644 --- a/pkg/cli/doctor_test.go +++ b/pkg/cli/doctor_test.go @@ -21,8 +21,8 @@ import ( // This test doctoring a secure cluster. func TestDoctorCluster(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t}) + defer c.Cleanup() // Introduce a corruption in the descriptor table by adding a table and // removing its parent. @@ -50,8 +50,8 @@ func TestDoctorCluster(t *testing.T) { // This test the operation of zip over secure clusters. func TestDoctorZipDir(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t, noServer: true}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t, NoServer: true}) + defer c.Cleanup() out, err := c.RunWithCapture("debug doctor zipdir testdata/doctor/debugzip") if err != nil { diff --git a/pkg/cli/import_test.go b/pkg/cli/import_test.go index 715fc40ced0c..ad9eb78aa5e7 100644 --- a/pkg/cli/import_test.go +++ b/pkg/cli/import_test.go @@ -34,7 +34,7 @@ import ( // method ensures that the upload and deletion semantics to userfile are working // as expected. func runImportCLICommand( - ctx context.Context, t *testing.T, cliCmd string, dumpFilePath string, c cliTest, + ctx context.Context, t *testing.T, cliCmd string, dumpFilePath string, c TestCLI, ) string { knobs, unsetImportCLIKnobs := setImportCLITestingKnobs() defer unsetImportCLIKnobs() @@ -80,8 +80,8 @@ func runImportCLICommand( func TestImportCLI(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t}) + defer c.Cleanup() ctx := context.Background() diff --git a/pkg/cli/nodelocal_test.go b/pkg/cli/nodelocal_test.go index 33d0b7c58cc7..4ced6af86710 100644 --- a/pkg/cli/nodelocal_test.go +++ b/pkg/cli/nodelocal_test.go @@ -23,8 +23,8 @@ import ( ) func Example_nodelocal() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() file, cleanUp := createTestFile("test.csv", "content") defer cleanUp() @@ -49,8 +49,8 @@ func Example_nodelocal() { } func Example_nodelocal_disabled() { - c := newCLITest(cliTestParams{noNodelocal: true}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{NoNodelocal: true}) + defer c.Cleanup() file, cleanUp := createTestFile("test.csv", "non-empty-file") defer cleanUp() @@ -71,8 +71,8 @@ func Example_nodelocal_disabled() { func TestNodeLocalFileUpload(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t}) + defer c.Cleanup() dir, cleanFn := testutils.TempDir(t) defer cleanFn() diff --git a/pkg/cli/sql_test.go b/pkg/cli/sql_test.go index 53e3885c4c52..6aad84167c3a 100644 --- a/pkg/cli/sql_test.go +++ b/pkg/cli/sql_test.go @@ -25,8 +25,8 @@ import ( // Example_sql_lex tests the usage of the lexer in the sql subcommand. func Example_sql_lex() { - c := newCLITest(cliTestParams{insecure: true}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{Insecure: true}) + defer c.Cleanup() conn := makeSQLConn(fmt.Sprintf("postgres://%s@%s/?sslmode=disable", security.RootUser, c.ServingSQLAddr())) diff --git a/pkg/cli/sql_util_test.go b/pkg/cli/sql_util_test.go index 94b45d84a0f1..9b3a8a760dc6 100644 --- a/pkg/cli/sql_util_test.go +++ b/pkg/cli/sql_util_test.go @@ -30,9 +30,9 @@ import ( func TestConnRecover(t *testing.T) { defer leaktest.AfterTest(t)() - p := cliTestParams{t: t} - c := newCLITest(p) - defer c.cleanup() + p := TestCLIParams{T: t} + c := NewCLITest(p) + defer c.Cleanup() url, cleanup := sqlutils.PGUrl(t, c.ServingSQLAddr(), t.Name(), url.User(security.RootUser)) defer cleanup() @@ -96,7 +96,7 @@ func TestConnRecover(t *testing.T) { // simulateServerRestart restarts the test server and reconfigures the connection // to use the new test server's port number. This is necessary because the port // number is selected randomly. -func simulateServerRestart(c *cliTest, p cliTestParams, conn *sqlConn) func() { +func simulateServerRestart(c *TestCLI, p TestCLIParams, conn *sqlConn) func() { c.restartServer(p) url2, cleanup2 := sqlutils.PGUrl(c.t, c.ServingSQLAddr(), c.t.Name(), url.User(security.RootUser)) conn.url = url2.String() @@ -106,8 +106,8 @@ func simulateServerRestart(c *cliTest, p cliTestParams, conn *sqlConn) func() { func TestRunQuery(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t}) + defer c.Cleanup() url, cleanup := sqlutils.PGUrl(t, c.ServingSQLAddr(), t.Name(), url.User(security.RootUser)) defer cleanup() @@ -226,8 +226,8 @@ SET func TestUtfName(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t}) + defer c.Cleanup() url, cleanup := sqlutils.PGUrl(t, c.ServingSQLAddr(), t.Name(), url.User(security.RootUser)) defer cleanup() @@ -282,9 +282,9 @@ ALTER TABLE test_utf.żółw ADD CONSTRAINT żó UNIQUE (value)`)); err != nil { func TestTransactionRetry(t *testing.T) { defer leaktest.AfterTest(t)() - p := cliTestParams{t: t} - c := newCLITest(p) - defer c.cleanup() + p := TestCLIParams{T: t} + c := NewCLITest(p) + defer c.Cleanup() url, cleanup := sqlutils.PGUrl(t, c.ServingSQLAddr(), t.Name(), url.User(security.RootUser)) defer cleanup() diff --git a/pkg/cli/statement_diag_test.go b/pkg/cli/statement_diag_test.go index ce65bc8ced25..7cacca6049e1 100644 --- a/pkg/cli/statement_diag_test.go +++ b/pkg/cli/statement_diag_test.go @@ -20,8 +20,8 @@ import ( ) func Example_statement_diag() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() // First, set up some diagnostics state. commands := []string{ diff --git a/pkg/cli/testutils.go b/pkg/cli/testutils.go index 8bb80b5397cc..3a91e3c15321 100644 --- a/pkg/cli/testutils.go +++ b/pkg/cli/testutils.go @@ -10,9 +10,364 @@ package cli +import ( + "bytes" + "context" + "fmt" + "io" + "io/ioutil" + "net" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/cockroachdb/cockroach/pkg/base" + "github.com/cockroachdb/cockroach/pkg/cli/cliflags" + "github.com/cockroachdb/cockroach/pkg/roachpb" + "github.com/cockroachdb/cockroach/pkg/security" + "github.com/cockroachdb/cockroach/pkg/security/securitytest" + "github.com/cockroachdb/cockroach/pkg/server" + "github.com/cockroachdb/cockroach/pkg/testutils/serverutils" + "github.com/cockroachdb/cockroach/pkg/util/log" + "github.com/cockroachdb/errors" +) + // TestingReset resets global mutable state so that Run can be called multiple // times from the same test process. It is public for cliccl. func TestingReset() { // Reset the client contexts for each test. initCLIDefaults() } + +// TestCLI represents test for cli +type TestCLI struct { + *server.TestServer + certsDir string + cleanupFunc func() error + prevStderr *os.File + + // t is the testing.T instance used for this test. + // Example_xxx tests may have this set to nil. + t *testing.T + // logScope binds the lifetime of the log files to this test, when t + // is not nil. + logScope *log.TestLogScope + // if true, doesn't print args during RunWithArgs. + omitArgs bool +} + +// TestCLIParams contains parameters used by TestCLI. +type TestCLIParams struct { + T *testing.T + Insecure bool + NoServer bool + StoreSpecs []base.StoreSpec + Locality roachpb.Locality + NoNodelocal bool +} + +// testTempFilePrefix is a sentinel marker to be used as the prefix of a +// test file name. It is used to extract the file name from a uniquely +// generated (temp directory) file path. +const testTempFilePrefix = "test-temp-prefix-" + +func (c *TestCLI) fail(err interface{}) { + if c.t != nil { + defer c.logScope.Close(c.t) + c.t.Fatal(err) + } else { + panic(err) + } +} + +func createTestCerts(certsDir string) (cleanup func() error) { + // Copy these assets to disk from embedded strings, so this test can + // run from a standalone binary. + // Disable embedded certs, or the security library will try to load + // our real files as embedded assets. + security.ResetAssetLoader() + + assets := []string{ + filepath.Join(security.EmbeddedCertsDir, security.EmbeddedCACert), + filepath.Join(security.EmbeddedCertsDir, security.EmbeddedCAKey), + filepath.Join(security.EmbeddedCertsDir, security.EmbeddedNodeCert), + filepath.Join(security.EmbeddedCertsDir, security.EmbeddedNodeKey), + filepath.Join(security.EmbeddedCertsDir, security.EmbeddedRootCert), + filepath.Join(security.EmbeddedCertsDir, security.EmbeddedRootKey), + filepath.Join(security.EmbeddedCertsDir, security.EmbeddedTenantClientCACert), + } + + for _, a := range assets { + _, err := securitytest.RestrictedCopy(a, certsDir, filepath.Base(a)) + if err != nil { + panic(err) + } + } + + return func() error { + security.SetAssetLoader(securitytest.EmbeddedAssets) + return os.RemoveAll(certsDir) + } +} + +// NewCLITest export for cclcli. +func NewCLITest(params TestCLIParams) TestCLI { + c := TestCLI{t: params.T} + + certsDir, err := ioutil.TempDir("", "cli-test") + if err != nil { + c.fail(err) + } + c.certsDir = certsDir + + if c.t != nil { + c.logScope = log.Scope(c.t) + } + + c.cleanupFunc = func() error { return nil } + + if !params.NoServer { + if !params.Insecure { + c.cleanupFunc = createTestCerts(certsDir) + } + + args := base.TestServerArgs{ + Insecure: params.Insecure, + SSLCertsDir: c.certsDir, + StoreSpecs: params.StoreSpecs, + Locality: params.Locality, + ExternalIODir: filepath.Join(certsDir, "extern"), + } + if params.NoNodelocal { + args.ExternalIODir = "" + } + s, err := serverutils.StartServerRaw(args) + if err != nil { + c.fail(err) + } + c.TestServer = s.(*server.TestServer) + + log.Infof(context.Background(), "server started at %s", c.ServingRPCAddr()) + log.Infof(context.Background(), "SQL listener at %s", c.ServingSQLAddr()) + } + + baseCfg.User = security.NodeUserName() + + // Ensure that CLI error messages and anything meant for the + // original stderr is redirected to stdout, where it can be + // captured. + c.prevStderr = stderr + stderr = os.Stdout + + return c +} + +// setCLIDefaultsForTests invokes initCLIDefaults but pretends the +// output is not a terminal, even if it happens to be. This ensures +// e.g. that tests ran with -v have the same output as those without. +func setCLIDefaultsForTests() { + initCLIDefaults() + cliCtx.terminalOutput = false + sqlCtx.showTimes = false + // Even though we pretend there is no terminal, most tests want + // pretty tables. + cliCtx.tableDisplayFormat = tableDisplayTable +} + +// stopServer stops the test server. +func (c *TestCLI) stopServer() { + if c.TestServer != nil { + log.Infof(context.Background(), "stopping server at %s / %s", + c.ServingRPCAddr(), c.ServingSQLAddr()) + c.Stopper().Stop(context.Background()) + } +} + +// restartServer stops and restarts the test server. The ServingRPCAddr() may +// have changed after this method returns. +func (c *TestCLI) restartServer(params TestCLIParams) { + c.stopServer() + log.Info(context.Background(), "restarting server") + s, err := serverutils.StartServerRaw(base.TestServerArgs{ + Insecure: params.Insecure, + SSLCertsDir: c.certsDir, + StoreSpecs: params.StoreSpecs, + }) + if err != nil { + c.fail(err) + } + c.TestServer = s.(*server.TestServer) + log.Infof(context.Background(), "restarted server at %s / %s", + c.ServingRPCAddr(), c.ServingSQLAddr()) +} + +// Cleanup cleans up after the test, stopping the server if necessary. +// The log files are removed if the test has succeeded. +func (c *TestCLI) Cleanup() { + defer func() { + if c.t != nil { + c.logScope.Close(c.t) + } + }() + + // Restore stderr. + stderr = c.prevStderr + + log.Info(context.Background(), "stopping server and cleaning up CLI test") + + c.stopServer() + + if err := c.cleanupFunc(); err != nil { + panic(err) + } +} + +// Run line of commands. +func (c TestCLI) Run(line string) { + a := strings.Fields(line) + c.RunWithArgs(a) +} + +// RunWithCapture runs c and returns a string containing the output of c +// and any error that may have occurred capturing the output. We do not propagate +// errors in executing c, because those will be caught when the test verifies +// the output of c. +func (c TestCLI) RunWithCapture(line string) (out string, err error) { + return captureOutput(func() { + c.Run(line) + }) +} + +// RunWithCaptureArgs args version of RunWithCapture. +func (c TestCLI) RunWithCaptureArgs(args []string) (string, error) { + return captureOutput(func() { + c.RunWithArgs(args) + }) +} + +// captureOutput runs f and returns a string containing the output and any +// error that may have occurred capturing the output. +func captureOutput(f func()) (out string, err error) { + // Heavily inspired by Go's testing/example.go:runExample(). + + // Funnel stdout into a pipe. + stdoutSave, stderrRedirSave := os.Stdout, stderr + r, w, err := os.Pipe() + if err != nil { + return "", err + } + os.Stdout = w + stderr = w + + // Send all bytes from piped stdout through the output channel. + type captureResult struct { + out string + err error + } + outC := make(chan captureResult) + go func() { + var buf bytes.Buffer + _, err := io.Copy(&buf, r) + r.Close() + outC <- captureResult{buf.String(), err} + }() + + // Clean up and record output in separate function to handle panics. + defer func() { + // Close pipe and restore normal stdout. + w.Close() + os.Stdout = stdoutSave + stderr = stderrRedirSave + outResult := <-outC + out, err = outResult.out, outResult.err + if x := recover(); x != nil { + err = errors.Errorf("panic: %v", x) + } + }() + + // Run the command. The output will be returned in the defer block. + f() + return +} + +func isSQLCommand(args []string) (bool, error) { + cmd, _, err := cockroachCmd.Find(args) + if err != nil { + return false, err + } + // We use --echo-sql as a marker of SQL-only commands. + if f := flagSetForCmd(cmd).Lookup(cliflags.EchoSQL.Name); f != nil { + return true, nil + } + return false, nil +} + +// RunWithArgs add args according to TestCLI cfg. +func (c TestCLI) RunWithArgs(origArgs []string) { + TestingReset() + + if err := func() error { + args := append([]string(nil), origArgs[:1]...) + if c.TestServer != nil { + addr := c.ServingRPCAddr() + if isSQL, err := isSQLCommand(origArgs); err != nil { + return err + } else if isSQL { + addr = c.ServingSQLAddr() + } + h, p, err := net.SplitHostPort(addr) + if err != nil { + return err + } + args = append(args, fmt.Sprintf("--host=%s", net.JoinHostPort(h, p))) + if c.Cfg.Insecure { + args = append(args, "--insecure=true") + } else { + args = append(args, "--insecure=false") + args = append(args, fmt.Sprintf("--certs-dir=%s", c.certsDir)) + } + } + args = append(args, origArgs[1:]...) + + // `nodelocal upload` CLI tests create test files in unique temp + // directories. Given that the expected output for such tests is defined as + // a static comment, it is not possible to match against the full file path. + // So, we trim the file path upto the sentinel prefix marker, and use only + // the file name for comparing against the expected output. + if len(origArgs) >= 3 && strings.Contains(origArgs[2], testTempFilePrefix) { + splitFilePath := strings.Split(origArgs[2], testTempFilePrefix) + origArgs[2] = splitFilePath[1] + } + + if !c.omitArgs { + fmt.Fprintf(os.Stderr, "%s\n", args) + fmt.Println(strings.Join(origArgs, " ")) + } + + return Run(args) + }(); err != nil { + cliOutputError(os.Stdout, err, true /*showSeverity*/, false /*verbose*/) + } +} + +// RunWithCAArgs adds ca args at run time. +func (c TestCLI) RunWithCAArgs(origArgs []string) { + TestingReset() + + if err := func() error { + args := append([]string(nil), origArgs[:1]...) + if c.TestServer != nil { + args = append(args, fmt.Sprintf("--ca-key=%s", filepath.Join(c.certsDir, security.EmbeddedCAKey))) + args = append(args, fmt.Sprintf("--certs-dir=%s", c.certsDir)) + } + args = append(args, origArgs[1:]...) + + fmt.Fprintf(os.Stderr, "%s\n", args) + fmt.Println(strings.Join(origArgs, " ")) + + return Run(args) + }(); err != nil { + fmt.Println(err) + } +} diff --git a/pkg/cli/userfiletable_test.go b/pkg/cli/userfiletable_test.go index 3bb7969dad3d..b9ef8f036a87 100644 --- a/pkg/cli/userfiletable_test.go +++ b/pkg/cli/userfiletable_test.go @@ -30,8 +30,8 @@ import ( ) func Example_userfile_upload() { - c := newCLITest(cliTestParams{}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{}) + defer c.Cleanup() file, cleanUp := createTestFile("test.csv", "content") defer cleanUp() @@ -101,8 +101,8 @@ func checkUserFileContent( func TestUserFileUpload(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) - defer c.cleanup() + c := NewCLITest(TestCLIParams{T: t}) + defer c.Cleanup() dir, cleanFn := testutils.TempDir(t) defer cleanFn() @@ -173,7 +173,7 @@ func TestUserFileUpload(t *testing.T) { } } -func checkListedFiles(t *testing.T, c cliTest, uri string, args string, expectedFiles []string) { +func checkListedFiles(t *testing.T, c TestCLI, uri string, args string, expectedFiles []string) { cmd := []string{"userfile", "list", uri, args} cliOutput, err := c.RunWithCaptureArgs(cmd) require.NoError(t, err) @@ -188,7 +188,7 @@ func checkListedFiles(t *testing.T, c cliTest, uri string, args string, expected require.Equal(t, expectedFiles, listedFiles, "listed files from %v", cmd) } -func checkDeletedFiles(t *testing.T, c cliTest, uri, args string, expectedFiles []string) { +func checkDeletedFiles(t *testing.T, c TestCLI, uri, args string, expectedFiles []string) { cmd := []string{"userfile", "delete", uri, args} cliOutput, err := c.RunWithCaptureArgs(cmd) require.NoError(t, err) @@ -209,9 +209,9 @@ func checkDeletedFiles(t *testing.T, c cliTest, uri, args string, expectedFiles func TestUserfile(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) + c := NewCLITest(TestCLIParams{T: t}) c.omitArgs = true - defer c.cleanup() + defer c.Cleanup() dir, cleanFn := testutils.TempDir(t) defer cleanFn() @@ -392,9 +392,9 @@ func TestUserfile(t *testing.T) { func TestUsernameUserfileInteraction(t *testing.T) { defer leaktest.AfterTest(t)() - c := newCLITest(cliTestParams{t: t}) + c := NewCLITest(TestCLIParams{T: t}) c.omitArgs = true - defer c.cleanup() + defer c.Cleanup() dir, cleanFn := testutils.TempDir(t) defer cleanFn() diff --git a/pkg/cli/zip_test.go b/pkg/cli/zip_test.go index f9a6f4efcbe4..93797a01b4d0 100644 --- a/pkg/cli/zip_test.go +++ b/pkg/cli/zip_test.go @@ -115,12 +115,12 @@ func TestZip(t *testing.T) { dir, cleanupFn := testutils.TempDir(t) defer cleanupFn() - c := newCLITest(cliTestParams{ - storeSpecs: []base.StoreSpec{{ + c := NewCLITest(TestCLIParams{ + StoreSpecs: []base.StoreSpec{{ Path: dir, }}, }) - defer c.cleanup() + defer c.Cleanup() out, err := c.RunWithCapture("debug zip --cpu-profile-duration=1s " + os.DevNull) if err != nil { @@ -143,12 +143,12 @@ func TestZipSpecialNames(t *testing.T) { dir, cleanupFn := testutils.TempDir(t) defer cleanupFn() - c := newCLITest(cliTestParams{ - storeSpecs: []base.StoreSpec{{ + c := NewCLITest(TestCLIParams{ + StoreSpecs: []base.StoreSpec{{ Path: dir, }}, }) - defer c.cleanup() + defer c.Cleanup() c.RunWithArgs([]string{"sql", "-e", ` create database "a:b"; @@ -228,7 +228,7 @@ func TestUnavailableZip(t *testing.T) { defer close(ch) // Zip it. We fake a CLI test context for this. - c := cliTest{ + c := TestCLI{ t: t, TestServer: tc.Server(0).(*server.TestServer), } @@ -300,7 +300,7 @@ func TestPartialZip(t *testing.T) { tc.StopServer(1) // Zip it. We fake a CLI test context for this. - c := cliTest{ + c := TestCLI{ t: t, TestServer: tc.Server(0).(*server.TestServer), } @@ -456,12 +456,12 @@ func TestToHex(t *testing.T) { dir, cleanupFn := testutils.TempDir(t) defer cleanupFn() - c := newCLITest(cliTestParams{ - storeSpecs: []base.StoreSpec{{ + c := NewCLITest(TestCLIParams{ + StoreSpecs: []base.StoreSpec{{ Path: dir, }}, }) - defer c.cleanup() + defer c.Cleanup() // Create a job to have non-empty system.jobs table. c.RunWithArgs([]string{"sql", "-e", "CREATE STATISTICS foo FROM system.namespace"}) From aa7238acc195c8788c86bf0d0464b0f8e4adcba5 Mon Sep 17 00:00:00 2001 From: elliebababa Date: Mon, 8 Mar 2021 15:56:06 -0800 Subject: [PATCH 3/3] cliccl: update `load show` with `summary` subcommand Previously, `load show` provides different options to display subset of meta information of backup, but the option parsing could be error prone and prefer a better way to provide users with subset options(e.g.with --include). This commit updates `load show` with `summary` subcommand to display all metadata of an individual backup for debugging. Release justification: non-production code changes. Release note (cli change): Update `load show` with `summary` subcommand to display meta information of an individual backup. Users are now able to inspect backup metadata, files, spans and descriptors with cli command `cockroach load show summary ` without a running cluster. --- pkg/ccl/cliccl/load.go | 212 ++++++++++++++++-------------------- pkg/ccl/cliccl/load_test.go | 97 ++++++++--------- pkg/cli/testutils.go | 2 +- 3 files changed, 139 insertions(+), 172 deletions(-) diff --git a/pkg/ccl/cliccl/load.go b/pkg/ccl/cliccl/load.go index 7e3b13512617..3640d1e255dc 100644 --- a/pkg/ccl/cliccl/load.go +++ b/pkg/ccl/cliccl/load.go @@ -30,34 +30,37 @@ import ( "github.com/cockroachdb/cockroach/pkg/sql/sessiondata" "github.com/cockroachdb/cockroach/pkg/storage/cloud" "github.com/cockroachdb/cockroach/pkg/storage/cloudimpl" - "github.com/cockroachdb/cockroach/pkg/util/hlc" "github.com/cockroachdb/cockroach/pkg/util/humanizeutil" "github.com/cockroachdb/cockroach/pkg/util/timeutil" "github.com/cockroachdb/errors" "github.com/spf13/cobra" ) -const ( - descriptors = "descriptors" - files = "files" - spans = "spans" - metadata = "metadata" -) - var externalIODir string func init() { - loadShowCmd := &cobra.Command{ - Use: "show [descriptors|files|spans|metadata] ", + + loadShowSummaryCmd := &cobra.Command{ + Use: "summary ", + Short: "show backups summary", + Long: "Shows summary of meta information about a SQL backup.", + Args: cobra.ExactArgs(1), + RunE: cli.MaybeDecorateGRPCError(runLoadShowSummary), + } + + loadShowCmds := &cobra.Command{ + Use: "show [command]", Short: "show backups", - Long: "Shows subset(s) of meta information about a SQL backup.", + Long: "Shows information about a SQL backup.", Args: cobra.MinimumNArgs(1), - RunE: cli.MaybeDecorateGRPCError(runLoadShow), + RunE: func(cmd *cobra.Command, args []string) error { + return cmd.Usage() + }, } loadCmds := &cobra.Command{ Use: "load [command]", - Short: "loading commands", + Short: "load backup commands", Long: `Commands for bulk loading external files.`, RunE: func(cmd *cobra.Command, args []string) error { return cmd.Usage() @@ -73,13 +76,14 @@ func init() { cliflags.ExternalIODir.Usage()) cli.AddCmd(loadCmds) - loadCmds.AddCommand(loadShowCmd) - loadShowCmd.Flags().AddFlagSet(loadFlags) + loadCmds.AddCommand(loadShowCmds) + loadShowCmds.AddCommand(loadShowSummaryCmd) + loadShowSummaryCmd.Flags().AddFlagSet(loadFlags) } func newBlobFactory(ctx context.Context, dialing roachpb.NodeID) (blobs.BlobClient, error) { if dialing != 0 { - return nil, errors.Errorf(`only support nodelocal (0/self) under offline inspection`) + return nil, errors.Errorf("accessing node %d during nodelocal access is unsupported for CLI inspection; only local access is supported with nodelocal://self", dialing) } if externalIODir == "" { externalIODir = filepath.Join(server.DefaultStorePath, "extern") @@ -87,58 +91,14 @@ func newBlobFactory(ctx context.Context, dialing roachpb.NodeID) (blobs.BlobClie return blobs.NewLocalClient(externalIODir) } -func parseShowArgs(args []string) (options map[string]bool, path string, err error) { - options = make(map[string]bool) - for _, arg := range args { - switch strings.ToLower(arg) { - case descriptors: - options[descriptors] = true - case files: - options[files] = true - case spans: - options[spans] = true - case metadata: - options[metadata] = true - default: - if path != "" { - return nil, "", errors.New("more than one path is specifiied") - } - path = arg - } - } - - if len(options) == 0 { - options[descriptors] = true - options[files] = true - options[spans] = true - options[metadata] = true - } - - if len(args) == len(options) { - return nil, "", errors.New("backup_path argument is required") - } - return options, path, nil -} - -func runLoadShow(cmd *cobra.Command, args []string) error { - - var options map[string]bool - var path string - var err error - if options, path, err = parseShowArgs(args); err != nil { - return err - } - - var showHeaders bool - if len(options) > 1 { - showHeaders = true - } +func runLoadShowSummary(cmd *cobra.Command, args []string) error { - ctx := context.Background() + path := args[0] if !strings.Contains(path, "://") { path = cloudimpl.MakeLocalStorageURI(path) } + ctx := context.Background() externalStorageFromURI := func(ctx context.Context, uri string, user security.SQLUsername) (cloud.ExternalStorage, error) { return cloudimpl.ExternalStorageFromURI(ctx, uri, base.ExternalIODirConfig{}, @@ -154,25 +114,10 @@ func runLoadShow(cmd *cobra.Command, args []string) error { if err != nil { return err } - - if _, ok := options[metadata]; ok { - showMeta(desc) - } - - if _, ok := options[spans]; ok { - showSpans(desc, showHeaders) - } - - if _, ok := options[files]; ok { - showFiles(desc, showHeaders) - } - - if _, ok := options[descriptors]; ok { - if err := showDescriptor(desc); err != nil { - return err - } - } - + showMeta(desc) + showSpans(desc) + showFiles(desc) + showDescriptors(desc) return nil } @@ -190,75 +135,102 @@ func showMeta(desc backupccl.BackupManifest) { fmt.Printf("BuildInfo: %s\n", desc.BuildInfo.Short()) } -func showSpans(desc backupccl.BackupManifest, showHeaders bool) { - tabfmt := "" - if showHeaders { - fmt.Printf("Spans:\n") - tabfmt = "\t" +func showSpans(desc backupccl.BackupManifest) { + fmt.Printf("Spans:\n") + if len(desc.Spans) == 0 { + fmt.Printf(" (No spans included in the specified backup path.)\n") } for _, s := range desc.Spans { - fmt.Printf("%s%s\n", tabfmt, s) + fmt.Printf(" %s\n", s) } } -func showFiles(desc backupccl.BackupManifest, showHeaders bool) { - tabfmt := "" - if showHeaders { - fmt.Printf("Files:\n") - tabfmt = "\t" +func showFiles(desc backupccl.BackupManifest) { + fmt.Printf("Files:\n") + if len(desc.Files) == 0 { + fmt.Printf(" (No sst files included in the specified backup path.)\n") } for _, f := range desc.Files { - fmt.Printf("%s%s:\n", tabfmt, f.Path) - fmt.Printf("%s Span: %s\n", tabfmt, f.Span) - fmt.Printf("%s Sha512: %0128x\n", tabfmt, f.Sha512) - fmt.Printf("%s DataSize: %d (%s)\n", tabfmt, f.EntryCounts.DataSize, humanizeutil.IBytes(f.EntryCounts.DataSize)) - fmt.Printf("%s Rows: %d\n", tabfmt, f.EntryCounts.Rows) - fmt.Printf("%s IndexEntries: %d\n", tabfmt, f.EntryCounts.IndexEntries) + fmt.Printf(" %s:\n", f.Path) + fmt.Printf(" Span: %s\n", f.Span) + fmt.Printf(" DataSize: %d (%s)\n", f.EntryCounts.DataSize, humanizeutil.IBytes(f.EntryCounts.DataSize)) + fmt.Printf(" Rows: %d\n", f.EntryCounts.Rows) + fmt.Printf(" IndexEntries: %d\n", f.EntryCounts.IndexEntries) } } -func showDescriptor(desc backupccl.BackupManifest) error { +func showDescriptors(desc backupccl.BackupManifest) { // Note that these descriptors could be from any past version of the cluster, // in case more fields need to be added to the output. - dbIDs := make([]descpb.ID, 0) + dbIDs := make([]descpb.ID, 0, len(desc.Descriptors)) dbIDToName := make(map[descpb.ID]string) - schemaIDs := make([]descpb.ID, 0) + schemaIDs := make([]descpb.ID, 0, len(desc.Descriptors)) schemaIDs = append(schemaIDs, keys.PublicSchemaID) - schemaIDToName := make(map[descpb.ID]string) - schemaIDToName[keys.PublicSchemaID] = sessiondata.PublicSchemaName + schemaIDToFullyQualifiedName := make(map[descpb.ID]string) + schemaIDToFullyQualifiedName[keys.PublicSchemaID] = sessiondata.PublicSchemaName + typeIDs := make([]descpb.ID, 0, len(desc.Descriptors)) + typeIDToFullyQualifiedName := make(map[descpb.ID]string) + tableIDs := make([]descpb.ID, 0, len(desc.Descriptors)) + tableIDToFullyQualifiedName := make(map[descpb.ID]string) for i := range desc.Descriptors { d := &desc.Descriptors[i] id := descpb.GetDescriptorID(d) - if d.GetDatabase() != nil { + tableDesc, databaseDesc, typeDesc, schemaDesc := descpb.FromDescriptor(d) + if databaseDesc != nil { dbIDToName[id] = descpb.GetDescriptorName(d) dbIDs = append(dbIDs, id) - } else if d.GetSchema() != nil { - schemaIDToName[id] = descpb.GetDescriptorName(d) + } else if schemaDesc != nil { + dbName := dbIDToName[schemaDesc.GetParentID()] + schemaName := descpb.GetDescriptorName(d) + schemaIDToFullyQualifiedName[id] = dbName + "." + schemaName schemaIDs = append(schemaIDs, id) + } else if typeDesc != nil { + parentSchema := schemaIDToFullyQualifiedName[typeDesc.GetParentSchemaID()] + if parentSchema == sessiondata.PublicSchemaName { + parentSchema = dbIDToName[typeDesc.GetParentID()] + "." + parentSchema + } + typeName := descpb.GetDescriptorName(d) + typeIDToFullyQualifiedName[id] = parentSchema + "." + typeName + typeIDs = append(typeIDs, id) + } else if tableDesc != nil { + tbDesc := tabledesc.NewBuilder(tableDesc).BuildImmutable() + parentSchema := schemaIDToFullyQualifiedName[tbDesc.GetParentSchemaID()] + if parentSchema == sessiondata.PublicSchemaName { + parentSchema = dbIDToName[tableDesc.GetParentID()] + "." + parentSchema + } + tableName := descpb.GetDescriptorName(d) + tableIDToFullyQualifiedName[id] = parentSchema + "." + tableName + tableIDs = append(tableIDs, id) } } fmt.Printf("Databases:\n") for _, id := range dbIDs { - fmt.Printf(" %s\n", - dbIDToName[id]) + fmt.Printf(" %d: %s\n", + id, dbIDToName[id]) } fmt.Printf("Schemas:\n") for _, id := range schemaIDs { - fmt.Printf(" %s\n", - schemaIDToName[id]) + fmt.Printf(" %d: %s\n", + id, schemaIDToFullyQualifiedName[id]) + } + + fmt.Printf("Types:\n") + if len(typeIDs) == 0 { + fmt.Printf(" (No user-defined types included in the specified backup path.)\n") + } + for _, id := range typeIDs { + fmt.Printf(" %d: %s\n", + id, typeIDToFullyQualifiedName[id]) } fmt.Printf("Tables:\n") - for i := range desc.Descriptors { - d := &desc.Descriptors[i] - if descpb.TableFromDescriptor(d, hlc.Timestamp{}) != nil { - tbDesc := tabledesc.NewImmutable(*descpb.TableFromDescriptor(d, hlc.Timestamp{})) - dbName := dbIDToName[tbDesc.GetParentID()] - schemaName := schemaIDToName[tbDesc.GetParentSchemaID()] - fmt.Printf(" %s.%s.%s\n", dbName, schemaName, descpb.GetDescriptorName(d)) - } + if len(tableIDs) == 0 { + fmt.Printf(" (No tables included in the specified backup path.)\n") + } + for _, id := range tableIDs { + fmt.Printf(" %d: %s\n", + id, tableIDToFullyQualifiedName[id]) } - return nil } diff --git a/pkg/ccl/cliccl/load_test.go b/pkg/ccl/cliccl/load_test.go index 96293494ef77..79de92b2f3cc 100644 --- a/pkg/ccl/cliccl/load_test.go +++ b/pkg/ccl/cliccl/load_test.go @@ -11,7 +11,6 @@ package cliccl import ( "context" "fmt" - "strings" "testing" "github.com/cockroachdb/cockroach/pkg/base" @@ -39,71 +38,67 @@ func TestLoadShow(t *testing.T) { sqlDB := sqlutils.MakeSQLRunner(db) sqlDB.Exec(t, `CREATE DATABASE testDB`) + sqlDB.Exec(t, `USE testDB`) + + const dbOnlyBackupPath = "nodelocal://0/dbOnlyFooFolder" + sqlDB.Exec(t, `BACKUP DATABASE testDB TO $1`, dbOnlyBackupPath) + sqlDB.Exec(t, `CREATE SCHEMA testDB.testschema`) + sqlDB.Exec(t, `CREATE TYPE fooType AS ENUM ()`) + sqlDB.Exec(t, `CREATE TYPE testDB.testschema.fooType AS ENUM ()`) + sqlDB.Exec(t, `CREATE TABLE fooTable (a INT)`) sqlDB.Exec(t, `CREATE TABLE testDB.testschema.fooTable (a INT)`) sqlDB.Exec(t, `INSERT INTO testDB.testschema.fooTable VALUES (123)`) const backupPath = "nodelocal://0/fooFolder" - sqlDB.Exec(t, `BACKUP testDB.testSchema.fooTable TO $1`, backupPath) - - // Test load show with metadata option - expectedMetadataOutputSubstr := []string{"StartTime:", "EndTime:", "DataSize: 20 (20 B)", "Rows: 1", "IndexEntries: 0", "FormatVersion: 1", "ClusterID:", "NodeID: 0", "BuildInfo:"} - t.Run("show-metadata", func(t *testing.T) { - out, err := c.RunWithCapture(fmt.Sprintf("load show %s metadata --external-io-dir=%s", backupPath, dir)) - require.NoError(t, err) - for _, substr := range expectedMetadataOutputSubstr { - require.True(t, strings.Contains(out, substr)) - } - }) + sqlDB.Exec(t, `BACKUP DATABASE testDB TO $1`, backupPath) - // Test load show with spans option - expectedSpansOutput := "/Table/54/{1-2}\n" - t.Run("show-spans", func(t *testing.T) { - out, err := c.RunWithCapture(fmt.Sprintf("load show %s spans --external-io-dir=%s", backupPath, dir)) + t.Run("show-summary-without-types-or-tables", func(t *testing.T) { + out, err := c.RunWithCapture(fmt.Sprintf("load show summary %s --external-io-dir=%s", dbOnlyBackupPath, dir)) require.NoError(t, err) - checkExpectedOutput(t, expectedSpansOutput, out) - }) - - // Test load show with files option - expectedFilesOutputSubstr := []string{".sst", "Span: /Table/54/{1-2}", "Sha512:", "DataSize: 20 (20 B)", "Rows: 1", "IndexEntries: 0"} - t.Run("show-files", func(t *testing.T) { - out, err := c.RunWithCapture(fmt.Sprintf("load show %s files --external-io-dir=%s", backupPath, dir)) - require.NoError(t, err) - for _, substr := range expectedFilesOutputSubstr { + expectedMetadataOutputSubstr := []string{"StartTime:", "EndTime:", "DataSize: 0 (0 B)", "Rows: 0", "IndexEntries: 0", "FormatVersion: 1", "ClusterID:", "NodeID: 0", "BuildInfo:"} + expectedOutputSubstr := append(expectedMetadataOutputSubstr, ` +Spans: + (No spans included in the specified backup path.) +Files: + (No sst files included in the specified backup path.) +Databases: + 52: testdb +Schemas: + 29: public +Types: + (No user-defined types included in the specified backup path.) +Tables: + (No tables included in the specified backup path.)`) + for _, substr := range expectedOutputSubstr { require.Contains(t, out, substr) } }) - // Test load show with descriptors option - expectedDescOutput := - `Databases: - testdb + t.Run("show-summary-with-full-information", func(t *testing.T) { + out, err := c.RunWithCapture(fmt.Sprintf("load show summary %s --external-io-dir=%s", backupPath, dir)) + require.NoError(t, err) + expectedMetadataOutputSubstr := []string{"StartTime:", "EndTime:", "DataSize: 20 (20 B)", "Rows: 1", "IndexEntries: 0", "FormatVersion: 1", "ClusterID:", "NodeID: 0", "BuildInfo:"} + expectedSpansOutput := "Spans:\n\t/Table/58/{1-2}\n\t/Table/59/{1-2}\n" + expectedFilesOutputSubstr := []string{"Files:\n\t", ".sst", "Span: /Table/59/{1-2}", "DataSize: 20 (20 B)", "Rows: 1", "IndexEntries: 0"} + expectedDescOutput := ` +Databases: + 52: testdb Schemas: - public - testschema + 29: public + 53: testdb.testschema +Types: + 54: testdb.public.footype + 55: testdb.public._footype + 56: testdb.testschema.footype + 57: testdb.testschema._footype Tables: - testdb.testschema.footable -` - t.Run("show-descriptors", func(t *testing.T) { - out, err := c.RunWithCapture(fmt.Sprintf("load show %s descriptors --external-io-dir=%s", backupPath, dir)) - require.NoError(t, err) - checkExpectedOutput(t, expectedDescOutput, out) - }) - - // Test load show without options should output all information - t.Run("show-without-options", func(t *testing.T) { - out, err := c.RunWithCapture(fmt.Sprintf("load show %s --external-io-dir=%s", backupPath, dir)) - require.NoError(t, err) - expectedOutputSubstr := append(expectedMetadataOutputSubstr, "Spans:\n\t"+expectedSpansOutput) - expectedOutputSubstr = append(expectedOutputSubstr, "Files:\n\t") + 58: testdb.public.footable + 59: testdb.testschema.footable` + expectedOutputSubstr := append(expectedMetadataOutputSubstr, expectedSpansOutput) expectedOutputSubstr = append(expectedOutputSubstr, expectedFilesOutputSubstr...) + expectedOutputSubstr = append(expectedOutputSubstr, expectedDescOutput) for _, substr := range expectedOutputSubstr { require.Contains(t, out, substr) } }) } - -func checkExpectedOutput(t *testing.T, expected string, out string) { - endOfCmd := strings.Index(out, "\n") - out = out[endOfCmd+1:] - require.Equal(t, expected, out) -} diff --git a/pkg/cli/testutils.go b/pkg/cli/testutils.go index 3a91e3c15321..a2a1e7faff6b 100644 --- a/pkg/cli/testutils.go +++ b/pkg/cli/testutils.go @@ -40,7 +40,7 @@ func TestingReset() { initCLIDefaults() } -// TestCLI represents test for cli +// TestCLI wraps a test server and is used by tests to make assertions about the output of CLI commands. type TestCLI struct { *server.TestServer certsDir string