diff --git a/pkg/storage/parse.go b/pkg/storage/parse.go index d75e7663d..fff518bfb 100644 --- a/pkg/storage/parse.go +++ b/pkg/storage/parse.go @@ -4,6 +4,8 @@ package storage import ( "net/url" + "reflect" + "strconv" "strings" "github.com/pingcap/errors" @@ -46,19 +48,23 @@ func ParseBackend(rawURL string, options *BackendOptions) (*backup.StorageBacken } prefix := strings.Trim(u.Path, "/") s3 := &backup.S3{Bucket: u.Host, Prefix: prefix} - if options != nil { - if err := options.S3.apply(s3); err != nil { - return nil, err - } + if options == nil { + options = &BackendOptions{} + } + ExtractQueryParameters(u, &options.S3) + if err := options.S3.apply(s3); err != nil { + return nil, err } return &backup.StorageBackend{Backend: &backup.StorageBackend_S3{S3: s3}}, nil - case "gcs": + case "gs", "gcs": gcs := &backup.GCS{Bucket: u.Host, Prefix: u.Path[1:]} - if options != nil { - if err := options.GCS.apply(gcs); err != nil { - return nil, err - } + if options == nil { + options = &BackendOptions{} + } + ExtractQueryParameters(u, &options.GCS) + if err := options.GCS.apply(gcs); err != nil { + return nil, err } return &backup.StorageBackend{Backend: &backup.StorageBackend_Gcs{Gcs: gcs}}, nil @@ -67,6 +73,57 @@ func ParseBackend(rawURL string, options *BackendOptions) (*backup.StorageBacken } } +// ExtractQueryParameters moves the query parameters of the URL into the options +// using reflection. +// +// The options must be a pointer to a struct which contains only string or bool +// fields (more types will be supported in the future), and tagged for JSON +// serialization. +// +// All of the URL's query parameters will be removed after calling this method. +func ExtractQueryParameters(u *url.URL, options interface{}) { + type field struct { + index int + kind reflect.Kind + } + + // First, find all JSON fields in the options struct type. + o := reflect.Indirect(reflect.ValueOf(options)) + ty := o.Type() + numFields := ty.NumField() + tagToField := make(map[string]field, numFields) + for i := 0; i < numFields; i++ { + f := ty.Field(i) + tag := f.Tag.Get("json") + tagToField[tag] = field{index: i, kind: f.Type.Kind()} + } + + // Then, read content from the URL into the options. + for key, params := range u.Query() { + if len(params) == 0 { + continue + } + param := params[0] + normalizedKey := strings.ToLower(strings.ReplaceAll(key, "_", "-")) + if f, ok := tagToField[normalizedKey]; ok { + field := o.Field(f.index) + switch f.kind { + case reflect.Bool: + if v, e := strconv.ParseBool(param); e == nil { + field.SetBool(v) + } + case reflect.String: + field.SetString(param) + default: + panic("BackendOption introduced an unsupported kind, please handle it! " + f.kind.String()) + } + } + } + + // Clean up the URL finally. + u.RawQuery = "" +} + // FormatBackendURL obtains the raw URL which can be used the reconstruct the // backend. The returned URL does not contain options for further configurating // the backend. This is to avoid exposing secret tokens. diff --git a/pkg/storage/parse_test.go b/pkg/storage/parse_test.go index 3f1bc4d4f..51669a806 100644 --- a/pkg/storage/parse_test.go +++ b/pkg/storage/parse_test.go @@ -4,7 +4,8 @@ package storage import ( "io/ioutil" - "os" + "net/url" + "path/filepath" "testing" . "github.com/pingcap/check" @@ -54,6 +55,17 @@ func (r *testStorageSuite) TestCreateStorage(c *C) { c.Assert(s3.Prefix, Equals, "prefix") c.Assert(s3.Endpoint, Equals, "https://s3.example.com/") + s, err = ParseBackend("s3://bucket3/prefix/path?endpoint=https://127.0.0.1:9000&force_path_style=1&SSE=aws:kms&sse-kms-key-id=TestKey&xyz=abc", nil) + c.Assert(err, IsNil) + s3 = s.GetS3() + c.Assert(s3, NotNil) + c.Assert(s3.Bucket, Equals, "bucket3") + c.Assert(s3.Prefix, Equals, "prefix/path") + c.Assert(s3.Endpoint, Equals, "https://127.0.0.1:9000") + c.Assert(s3.ForcePathStyle, IsTrue) + c.Assert(s3.Sse, Equals, "aws:kms") + c.Assert(s3.SseKmsKeyId, Equals, "TestKey") + gcsOpt := &BackendOptions{ GCS: GCSBackendOptions{ Endpoint: "https://gcs.example.com/", @@ -68,15 +80,11 @@ func (r *testStorageSuite) TestCreateStorage(c *C) { c.Assert(gcs.Endpoint, Equals, "https://gcs.example.com/") c.Assert(gcs.CredentialsBlob, Equals, "") - fakeCredentialsFile, err := ioutil.TempFile("", "fakeCredentialsFile") + fakeCredentialsFile := filepath.Join(c.MkDir(), "fakeCredentialsFile") + err = ioutil.WriteFile(fakeCredentialsFile, []byte("fakeCredentials"), 0600) c.Assert(err, IsNil) - _, err = fakeCredentialsFile.Write([]byte("fakeCredentials")) - c.Assert(err, IsNil) - defer func() { - fakeCredentialsFile.Close() - os.Remove(fakeCredentialsFile.Name()) - }() - gcsOpt.GCS.CredentialsFile = fakeCredentialsFile.Name() + + gcsOpt.GCS.CredentialsFile = fakeCredentialsFile s, err = ParseBackend("gcs://bucket/more/prefix/", gcsOpt) c.Assert(err, IsNil) @@ -86,6 +94,16 @@ func (r *testStorageSuite) TestCreateStorage(c *C) { c.Assert(gcs.Prefix, Equals, "more/prefix/") c.Assert(gcs.Endpoint, Equals, "https://gcs.example.com/") c.Assert(gcs.CredentialsBlob, Equals, "fakeCredentials") + + err = ioutil.WriteFile(fakeCredentialsFile, []byte("fakeCreds2"), 0600) + c.Assert(err, IsNil) + s, err = ParseBackend("gs://bucket4/backup/?credentials-file="+url.QueryEscape(fakeCredentialsFile), nil) + c.Assert(err, IsNil) + gcs = s.GetGcs() + c.Assert(gcs, NotNil) + c.Assert(gcs.Bucket, Equals, "bucket4") + c.Assert(gcs.Prefix, Equals, "backup/") + c.Assert(gcs.CredentialsBlob, Equals, "fakeCreds2") } func (r *testStorageSuite) TestFormatBackendURL(c *C) { diff --git a/tests/br_s3/run.sh b/tests/br_s3/run.sh index 422a1270d..394ddcf0e 100755 --- a/tests/br_s3/run.sh +++ b/tests/br_s3/run.sh @@ -58,7 +58,7 @@ done # backup full echo "backup start..." -run_br --pd $PD_ADDR backup full -s "s3://mybucket/$DB" --s3.endpoint="http://$S3_ENDPOINT" +run_br --pd $PD_ADDR backup full -s "s3://mybucket/$DB?endpoint=http://$S3_ENDPOINT" for i in $(seq $DB_COUNT); do run_sql "DROP DATABASE $DB${i};"