Skip to content
This repository has been archived by the owner on Jul 24, 2024. It is now read-only.

Commit

Permalink
storage: support placing the S3/GCS options into the storage URL (#246)
Browse files Browse the repository at this point in the history
  • Loading branch information
kennytm authored Apr 24, 2020
1 parent 58c9d5f commit 591406d
Show file tree
Hide file tree
Showing 3 changed files with 94 additions and 19 deletions.
75 changes: 66 additions & 9 deletions pkg/storage/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ package storage

import (
"net/url"
"reflect"
"strconv"
"strings"

"github.com/pingcap/errors"
Expand Down Expand Up @@ -46,19 +48,23 @@ func ParseBackend(rawURL string, options *BackendOptions) (*backup.StorageBacken
}
prefix := strings.Trim(u.Path, "/")
s3 := &backup.S3{Bucket: u.Host, Prefix: prefix}
if options != nil {
if err := options.S3.apply(s3); err != nil {
return nil, err
}
if options == nil {
options = &BackendOptions{}
}
ExtractQueryParameters(u, &options.S3)
if err := options.S3.apply(s3); err != nil {
return nil, err
}
return &backup.StorageBackend{Backend: &backup.StorageBackend_S3{S3: s3}}, nil

case "gcs":
case "gs", "gcs":
gcs := &backup.GCS{Bucket: u.Host, Prefix: u.Path[1:]}
if options != nil {
if err := options.GCS.apply(gcs); err != nil {
return nil, err
}
if options == nil {
options = &BackendOptions{}
}
ExtractQueryParameters(u, &options.GCS)
if err := options.GCS.apply(gcs); err != nil {
return nil, err
}
return &backup.StorageBackend{Backend: &backup.StorageBackend_Gcs{Gcs: gcs}}, nil

Expand All @@ -67,6 +73,57 @@ func ParseBackend(rawURL string, options *BackendOptions) (*backup.StorageBacken
}
}

// ExtractQueryParameters moves the query parameters of the URL into the options
// using reflection.
//
// The options must be a pointer to a struct which contains only string or bool
// fields (more types will be supported in the future), and tagged for JSON
// serialization.
//
// All of the URL's query parameters will be removed after calling this method.
func ExtractQueryParameters(u *url.URL, options interface{}) {
type field struct {
index int
kind reflect.Kind
}

// First, find all JSON fields in the options struct type.
o := reflect.Indirect(reflect.ValueOf(options))
ty := o.Type()
numFields := ty.NumField()
tagToField := make(map[string]field, numFields)
for i := 0; i < numFields; i++ {
f := ty.Field(i)
tag := f.Tag.Get("json")
tagToField[tag] = field{index: i, kind: f.Type.Kind()}
}

// Then, read content from the URL into the options.
for key, params := range u.Query() {
if len(params) == 0 {
continue
}
param := params[0]
normalizedKey := strings.ToLower(strings.ReplaceAll(key, "_", "-"))
if f, ok := tagToField[normalizedKey]; ok {
field := o.Field(f.index)
switch f.kind {
case reflect.Bool:
if v, e := strconv.ParseBool(param); e == nil {
field.SetBool(v)
}
case reflect.String:
field.SetString(param)
default:
panic("BackendOption introduced an unsupported kind, please handle it! " + f.kind.String())
}
}
}

// Clean up the URL finally.
u.RawQuery = ""
}

// FormatBackendURL obtains the raw URL which can be used the reconstruct the
// backend. The returned URL does not contain options for further configurating
// the backend. This is to avoid exposing secret tokens.
Expand Down
36 changes: 27 additions & 9 deletions pkg/storage/parse_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@ package storage

import (
"io/ioutil"
"os"
"net/url"
"path/filepath"
"testing"

. "github.com/pingcap/check"
Expand Down Expand Up @@ -54,6 +55,17 @@ func (r *testStorageSuite) TestCreateStorage(c *C) {
c.Assert(s3.Prefix, Equals, "prefix")
c.Assert(s3.Endpoint, Equals, "https://s3.example.com/")

s, err = ParseBackend("s3://bucket3/prefix/path?endpoint=https://127.0.0.1:9000&force_path_style=1&SSE=aws:kms&sse-kms-key-id=TestKey&xyz=abc", nil)
c.Assert(err, IsNil)
s3 = s.GetS3()
c.Assert(s3, NotNil)
c.Assert(s3.Bucket, Equals, "bucket3")
c.Assert(s3.Prefix, Equals, "prefix/path")
c.Assert(s3.Endpoint, Equals, "https://127.0.0.1:9000")
c.Assert(s3.ForcePathStyle, IsTrue)
c.Assert(s3.Sse, Equals, "aws:kms")
c.Assert(s3.SseKmsKeyId, Equals, "TestKey")

gcsOpt := &BackendOptions{
GCS: GCSBackendOptions{
Endpoint: "https://gcs.example.com/",
Expand All @@ -68,15 +80,11 @@ func (r *testStorageSuite) TestCreateStorage(c *C) {
c.Assert(gcs.Endpoint, Equals, "https://gcs.example.com/")
c.Assert(gcs.CredentialsBlob, Equals, "")

fakeCredentialsFile, err := ioutil.TempFile("", "fakeCredentialsFile")
fakeCredentialsFile := filepath.Join(c.MkDir(), "fakeCredentialsFile")
err = ioutil.WriteFile(fakeCredentialsFile, []byte("fakeCredentials"), 0600)
c.Assert(err, IsNil)
_, err = fakeCredentialsFile.Write([]byte("fakeCredentials"))
c.Assert(err, IsNil)
defer func() {
fakeCredentialsFile.Close()
os.Remove(fakeCredentialsFile.Name())
}()
gcsOpt.GCS.CredentialsFile = fakeCredentialsFile.Name()

gcsOpt.GCS.CredentialsFile = fakeCredentialsFile

s, err = ParseBackend("gcs://bucket/more/prefix/", gcsOpt)
c.Assert(err, IsNil)
Expand All @@ -86,6 +94,16 @@ func (r *testStorageSuite) TestCreateStorage(c *C) {
c.Assert(gcs.Prefix, Equals, "more/prefix/")
c.Assert(gcs.Endpoint, Equals, "https://gcs.example.com/")
c.Assert(gcs.CredentialsBlob, Equals, "fakeCredentials")

err = ioutil.WriteFile(fakeCredentialsFile, []byte("fakeCreds2"), 0600)
c.Assert(err, IsNil)
s, err = ParseBackend("gs://bucket4/backup/?credentials-file="+url.QueryEscape(fakeCredentialsFile), nil)
c.Assert(err, IsNil)
gcs = s.GetGcs()
c.Assert(gcs, NotNil)
c.Assert(gcs.Bucket, Equals, "bucket4")
c.Assert(gcs.Prefix, Equals, "backup/")
c.Assert(gcs.CredentialsBlob, Equals, "fakeCreds2")
}

func (r *testStorageSuite) TestFormatBackendURL(c *C) {
Expand Down
2 changes: 1 addition & 1 deletion tests/br_s3/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ done

# backup full
echo "backup start..."
run_br --pd $PD_ADDR backup full -s "s3://mybucket/$DB" --s3.endpoint="http://$S3_ENDPOINT"
run_br --pd $PD_ADDR backup full -s "s3://mybucket/$DB?endpoint=http://$S3_ENDPOINT"

for i in $(seq $DB_COUNT); do
run_sql "DROP DATABASE $DB${i};"
Expand Down

0 comments on commit 591406d

Please sign in to comment.