Skip to content
This repository has been archived by the owner on Jul 24, 2024. It is now read-only.

Merge master to Release 4.0 #206

Merged
merged 49 commits into from
Apr 2, 2020
Merged
Show file tree
Hide file tree
Changes from 37 commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
0a43d2a
restore: merge tidb-tools/pkg/restore-util (#146)
overvenus Jan 22, 2020
3863a3a
Fixed handling for a dbName that do not exist in the backup being res…
Feb 1, 2020
6b65080
validate: fix debug meta test ci (#153)
3pointer Feb 5, 2020
0dfe82d
*: extracts runBackup/runRestore in cmd into pkg/task (#156)
kennytm Feb 10, 2020
8c97452
restore: fix restore summary log (#150)
3pointer Feb 13, 2020
008ec45
restore: enhance error handling (#152)
5kbpers Feb 13, 2020
1e1fc97
Incremental BR: support DDL (#155)
5kbpers Feb 20, 2020
4492b8f
Reduce TiDB dependencies (#158)
kennytm Feb 22, 2020
3c9d42f
go.mod: update tidb (#168)
overvenus Feb 23, 2020
028963d
BR support TLS (#161)
3pointer Feb 27, 2020
e462f80
upgrade golangci and prepare for go 1.14 (#171)
overvenus Feb 28, 2020
4657932
backup: add raw backup command (#101)
3pointer Mar 4, 2020
0a1a044
restore: speed up retry on not leader (#179)
overvenus Mar 5, 2020
09fb715
conn, restore: paginate scan regions (#165)
overvenus Mar 5, 2020
82b5016
Batch restore (#167)
overvenus Mar 6, 2020
3fc9ee2
README, docker: add quick start (#181)
overvenus Mar 7, 2020
237fe5b
*: update tidb dependency build with go1.14 (#176)
3pointer Mar 10, 2020
0672ab3
*: add license header (#182)
overvenus Mar 11, 2020
9caa6de
conn: support not shutting down the storage when closing the connecti…
kennytm Mar 11, 2020
7d59284
conn: use GetDomain to avoid some TiDB breaking changes (#186)
overvenus Mar 11, 2020
0e25496
fix check safepoint & unhide experimental features (#175)
5kbpers Mar 11, 2020
3419d8a
support backupts (#172)
3pointer Mar 12, 2020
e476c82
*: update pd deps to v4 (#184)
kennytm Mar 12, 2020
9c2bf9d
restore: support online restore (#114)
disksing Mar 13, 2020
512855d
metrics: add grafana scripts (#140)
WangXiangUSTC Mar 15, 2020
6b88e51
filter out all TiFlash nodes when retrieving lists of stores from PD …
kennytm Mar 16, 2020
156f003
Create integration test for S3 storage (#174)
kennytm Mar 16, 2020
bbedfc8
Fix summary log (#191)
3pointer Mar 16, 2020
c3d26d9
Implement Raw Restore (#104)
MyonKeminta Mar 17, 2020
6268cde
restore: remove tiflash replica before restore (#194)
5kbpers Mar 18, 2020
4ea6c1c
summary: put summary log at last (#197)
3pointer Mar 19, 2020
2669204
*: abstract the progress channel (updateCh) into the glue package (#196)
kennytm Mar 20, 2020
f9f6e19
*: refline logs (#189)
TennyZhuang Mar 20, 2020
e168a60
tests: disable TLS test (#204)
overvenus Mar 24, 2020
4bc66c3
*: add S3 quick start and few enhancement of log (#202)
overvenus Mar 25, 2020
2f083c8
restore: add error field to `DownloadResponse` (#195)
5kbpers Mar 25, 2020
ff446f2
Merge branch 'master' into release-4.0
3pointer Mar 25, 2020
1f27b57
restore: populate restore cancel error (#207)
overvenus Mar 25, 2020
ae7688a
enhance usability of br (#208)
3pointer Mar 25, 2020
8638d9a
task: do not run checksum if restore failed (#209)
kennytm Mar 26, 2020
f112da7
fix incremental bug in llroad test (#199)
3pointer Mar 26, 2020
0b21731
Merge branch 'master' into release-4.0
3pointer Mar 26, 2020
8fbedbf
add skip create sqls (#211)
3pointer Mar 30, 2020
f031777
Revert "tests: disable TLS test (#204)" (#218)
kennytm Mar 31, 2020
f7dc2db
doc: add `minio` to dependence list. (#221)
YuJuncen Apr 1, 2020
01de3f5
move waiting reject stores in import file (#222)
3pointer Apr 1, 2020
3341f40
Max index length (#220)
3pointer Apr 2, 2020
7de169d
glue: create schema/table directly with info (#216)
kennytm Apr 2, 2020
3c4a077
Merge branch 'master' into release-4.0
3pointer Apr 2, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 15 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ go-ycsb load mysql -p workload=core \
mysql -uroot -htidb -P4000 -E -e "SELECT COUNT(*) FROM test.usertable"

# Build BR and backup!
make release && \
make build && \
bin/br backup full --pd pd0:2379 --storage "local:///data/backup/full" \
--log-file "/logs/br_backup.log"

Expand All @@ -69,6 +69,20 @@ bin/br restore full --pd pd0:2379 --storage "local:///data/backup/full" \

# How many rows do we get again? Expected to be 100000 rows.
mysql -uroot -htidb -P4000 -E -e "SELECT COUNT(*) FROM test.usertable"

# Test S3 compatible storage (MinIO).
# Create a bucket to save backup by mc (a MinIO Client).
mc config host add minio $S3_ENDPOINT $MINIO_ACCESS_KEY $MINIO_SECRET_KEY && \
mc mb minio/mybucket

# Backup to S3 compatible storage.
bin/br backup full --pd pd0:2379 --storage "s3://mybucket/full" \
--s3.endpoint="$S3_ENDPOINT"

# Drop database and restore!
mysql -uroot -htidb -P4000 -E -e "DROP DATABASE test; SHOW DATABASES;" && \
bin/br restore full --pd pd0:2379 --storage "s3://mybucket/full" \
--s3.endpoint="$S3_ENDPOINT"
```

## Contributing
Expand Down
28 changes: 22 additions & 6 deletions cmd/cmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,11 @@ import (
"context"
"net/http"
"net/http/pprof"
"os"
"path/filepath"
"sync"
"sync/atomic"
"time"

"github.com/pingcap/log"
"github.com/pingcap/tidb/util/logutil"
Expand All @@ -16,15 +19,17 @@ import (
"go.uber.org/zap"

"github.com/pingcap/br/pkg/gluetidb"
"github.com/pingcap/br/pkg/summary"
"github.com/pingcap/br/pkg/task"
"github.com/pingcap/br/pkg/utils"
)

var (
initOnce = sync.Once{}
defaultContext context.Context
hasLogFile uint64
tidbGlue = gluetidb.Glue{}
initOnce = sync.Once{}
defaultContext context.Context
hasLogFile uint64
tidbGlue = gluetidb.Glue{}
envLogToTermKey = "BR_LOG_TO_TERM"
)

const (
Expand All @@ -41,6 +46,10 @@ const (
flagVersionShort = "V"
)

func timestampLogFileName() string {
return filepath.Join(os.TempDir(), "br.log."+time.Now().Format(time.RFC3339))
}

// AddFlags adds flags to the given cmd.
func AddFlags(cmd *cobra.Command) {
cmd.Version = utils.BRInfo()
Expand All @@ -49,8 +58,8 @@ func AddFlags(cmd *cobra.Command) {

cmd.PersistentFlags().StringP(FlagLogLevel, "L", "info",
"Set the log level")
cmd.PersistentFlags().String(FlagLogFile, "",
"Set the log file path. If not set, logs will output to stdout")
cmd.PersistentFlags().String(FlagLogFile, timestampLogFileName(),
"Set the log file path. If not set, logs will output to temp file")
cmd.PersistentFlags().String(FlagStatusAddr, "",
"Set the HTTP listening address for the status report service. Set to empty string to disable")
task.DefineCommonFlags(cmd.PersistentFlags())
Expand All @@ -73,8 +82,15 @@ func Init(cmd *cobra.Command) (err error) {
if err != nil {
return
}
_, outputLogToTerm := os.LookupEnv(envLogToTermKey)
if outputLogToTerm {
// Log to term if env `BR_LOG_TO_TERM` is set.
conf.File.Filename = ""
}
if len(conf.File.Filename) != 0 {
atomic.StoreUint64(&hasLogFile, 1)
summary.InitCollector(true)
cmd.Printf("Detial BR log in %s\n", conf.File.Filename)
}
lg, p, e := log.InitLogger(conf)
if e != nil {
Expand Down
26 changes: 24 additions & 2 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
---
# Source: tidb-docker-compose/templates/docker-compose.yml
version: '2.1'
version: '3.2'

services:
control:
Expand All @@ -10,11 +10,13 @@ services:
dockerfile: ./docker/Dockerfile
volumes:
- ./docker/data:/data
- ./docker/logs:/logs
- ./docker/logs:/tmp
command: -c "/usr/bin/tail -f /dev/null"
depends_on:
- "tidb"
restart: on-failure
env_file:
- ./docker/minio.env

pd0:
image: pingcap/pd:latest
Expand Down Expand Up @@ -64,6 +66,8 @@ services:
# soft: 1000000
# hard: 1000000
restart: on-failure
env_file:
- ./docker/minio.env

tikv1:
image: pingcap/tikv:latest
Expand All @@ -87,6 +91,8 @@ services:
# soft: 1000000
# hard: 1000000
restart: on-failure
env_file:
- ./docker/minio.env

tikv2:
image: pingcap/tikv:latest
Expand All @@ -110,6 +116,8 @@ services:
# soft: 1000000
# hard: 1000000
restart: on-failure
env_file:
- ./docker/minio.env

tikv3:
image: pingcap/tikv:latest
Expand All @@ -133,6 +141,8 @@ services:
# soft: 1000000
# hard: 1000000
restart: on-failure
env_file:
- ./docker/minio.env

tikv4:
image: pingcap/tikv:latest
Expand All @@ -156,6 +166,8 @@ services:
# soft: 1000000
# hard: 1000000
restart: on-failure
env_file:
- ./docker/minio.env

tidb:
image: pingcap/tidb:latest
Expand Down Expand Up @@ -185,6 +197,16 @@ services:
# hard: 1000000
restart: on-failure

minio:
image: minio/minio
ports:
- 24927:24927
volumes:
- ./docker/data/s3:/data/s3
command: server --address=:24927 /data/s3
env_file:
- ./docker/minio.env

tidb-vision:
image: pingcap/tidb-vision:latest
environment:
Expand Down
9 changes: 6 additions & 3 deletions docker/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
FROM golang:1.13.8-buster as builder

# For loading data to TiDB
FROM golang:1.13.8-buster as go-ycsb-builder
WORKDIR /go/src/github.com/pingcap/
RUN git clone https://github.com/pingcap/go-ycsb.git && \
cd go-ycsb && \
make

# For operating minio S3 compatible storage
FROM minio/mc as mc-builder

FROM golang:1.13.8-buster

RUN apt-get update && apt-get install -y --no-install-recommends \
Expand All @@ -19,6 +21,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
WORKDIR /go/src/github.com/pingcap/br
COPY . .

COPY --from=builder /go/src/github.com/pingcap/go-ycsb/bin/go-ycsb /go/bin/go-ycsb
COPY --from=go-ycsb-builder /go/src/github.com/pingcap/go-ycsb/bin/go-ycsb /go/bin/go-ycsb
COPY --from=mc-builder /usr/bin/mc /usr/bin/mc

ENTRYPOINT ["/bin/bash"]
6 changes: 6 additions & 0 deletions docker/minio.env
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
MINIO_ACCESS_KEY=brs3accesskey
MINIO_SECRET_KEY=brs3secretkey
MINIO_BROWSER=off
AWS_ACCESS_KEY_ID=brs3accesskey
AWS_SECRET_ACCESS_KEY=brs3secretkey
S3_ENDPOINT=http://minio:24927
1 change: 0 additions & 1 deletion go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,6 @@ github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989/go.mod h1:O17Xtb
github.com/pingcap/kvproto v0.0.0-20191211054548-3c6b38ea5107/go.mod h1:WWLmULLO7l8IOcQG+t+ItJ3fEcrL5FxF0Wu+HrMy26w=
github.com/pingcap/kvproto v0.0.0-20200214064158-62d31900d88e/go.mod h1:IOdRDPLyda8GX2hE/jO7gqaCV/PNFh8BZQCQZXfIOqI=
github.com/pingcap/kvproto v0.0.0-20200221034943-a2aa1d1e20a8/go.mod h1:IOdRDPLyda8GX2hE/jO7gqaCV/PNFh8BZQCQZXfIOqI=
github.com/pingcap/kvproto v0.0.0-20200228095611-2cf9a243b8d5 h1:knEvP4R5v5b2T107/Q6VzB0C8/6T7NXB/V7Vl1FtQsg=
github.com/pingcap/kvproto v0.0.0-20200228095611-2cf9a243b8d5/go.mod h1:IOdRDPLyda8GX2hE/jO7gqaCV/PNFh8BZQCQZXfIOqI=
github.com/pingcap/kvproto v0.0.0-20200317112120-78042b285b75 h1:DB3NTM0ilba/6sW+vccdEnP10bVvrVunDwWvRa0hSKc=
github.com/pingcap/kvproto v0.0.0-20200317112120-78042b285b75/go.mod h1:IOdRDPLyda8GX2hE/jO7gqaCV/PNFh8BZQCQZXfIOqI=
Expand Down
9 changes: 5 additions & 4 deletions pkg/backup/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import (
"go.uber.org/zap"

"github.com/pingcap/br/pkg/conn"
"github.com/pingcap/br/pkg/glue"
"github.com/pingcap/br/pkg/rtree"
"github.com/pingcap/br/pkg/storage"
"github.com/pingcap/br/pkg/summary"
Expand Down Expand Up @@ -309,7 +310,7 @@ func (bc *Client) BackupRanges(
ctx context.Context,
ranges []rtree.Range,
req kvproto.BackupRequest,
updateCh chan<- struct{},
updateCh glue.Progress,
) error {
start := time.Now()
defer func() {
Expand Down Expand Up @@ -374,7 +375,7 @@ func (bc *Client) BackupRange(
ctx context.Context,
startKey, endKey []byte,
req kvproto.BackupRequest,
updateCh chan<- struct{},
updateCh glue.Progress,
) (err error) {
start := time.Now()
defer func() {
Expand Down Expand Up @@ -486,7 +487,7 @@ func (bc *Client) fineGrainedBackup(
rateLimit uint64,
concurrency uint32,
rangeTree rtree.RangeTree,
updateCh chan<- struct{},
updateCh glue.Progress,
) error {
bo := tikv.NewBackoffer(ctx, backupFineGrainedMaxBackoff)
for {
Expand Down Expand Up @@ -561,7 +562,7 @@ func (bc *Client) fineGrainedBackup(
rangeTree.Put(resp.StartKey, resp.EndKey, resp.Files)

// Update progress
updateCh <- struct{}{}
updateCh.Inc()
}
}

Expand Down
5 changes: 3 additions & 2 deletions pkg/backup/push.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/pingcap/log"
"go.uber.org/zap"

"github.com/pingcap/br/pkg/glue"
"github.com/pingcap/br/pkg/rtree"
)

Expand All @@ -38,7 +39,7 @@ func newPushDown(ctx context.Context, mgr ClientMgr, cap int) *pushDown {
func (push *pushDown) pushBackup(
req backup.BackupRequest,
stores []*metapb.Store,
updateCh chan<- struct{},
updateCh glue.Progress,
) (rtree.RangeTree, error) {
// Push down backup tasks to all tikv instances.
res := rtree.NewRangeTree()
Expand Down Expand Up @@ -90,7 +91,7 @@ func (push *pushDown) pushBackup(
resp.GetStartKey(), resp.GetEndKey(), resp.GetFiles())

// Update progress
updateCh <- struct{}{}
updateCh.Inc()
} else {
errPb := resp.GetError()
switch v := errPb.Detail.(type) {
Expand Down
7 changes: 4 additions & 3 deletions pkg/backup/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import (
"go.uber.org/zap"

"github.com/pingcap/br/pkg/checksum"
"github.com/pingcap/br/pkg/glue"
"github.com/pingcap/br/pkg/summary"
"github.com/pingcap/br/pkg/utils"
)
Expand Down Expand Up @@ -67,7 +68,7 @@ func (pending *Schemas) Start(
store kv.Storage,
backupTS uint64,
concurrency uint,
updateCh chan<- struct{},
updateCh glue.Progress,
) {
workerPool := utils.NewWorkerPool(concurrency, "Schemas")
go func() {
Expand All @@ -82,7 +83,7 @@ func (pending *Schemas) Start(

if pending.skipChecksum {
pending.backupSchemaCh <- schema
updateCh <- struct{}{}
updateCh.Inc()
return
}

Expand Down Expand Up @@ -110,7 +111,7 @@ func (pending *Schemas) Start(
zap.Duration("take", time.Since(start)))
pending.backupSchemaCh <- schema

updateCh <- struct{}{}
updateCh.Inc()
})
}
pending.wg.Wait()
Expand Down
27 changes: 23 additions & 4 deletions pkg/backup/schema_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ package backup
import (
"context"
"math"
"sync/atomic"

. "github.com/pingcap/check"
"github.com/pingcap/tidb-tools/pkg/filter"
Expand All @@ -30,6 +31,24 @@ func (s *testBackupSchemaSuite) TearDownSuite(c *C) {
testleak.AfterTest(c)()
}

type simpleProgress struct {
counter int64
}

func (sp *simpleProgress) Inc() {
atomic.AddInt64(&sp.counter, 1)
}

func (sp *simpleProgress) Close() {}

func (sp *simpleProgress) reset() {
atomic.StoreInt64(&sp.counter, 0)
}

func (sp *simpleProgress) get() int64 {
return atomic.LoadInt64(&sp.counter)
}

func (s *testBackupSchemaSuite) TestBuildBackupRangeAndSchema(c *C) {
c.Assert(s.mock.Start(), IsNil)
defer s.mock.Stop()
Expand Down Expand Up @@ -73,10 +92,10 @@ func (s *testBackupSchemaSuite) TestBuildBackupRangeAndSchema(c *C) {
s.mock.Domain, s.mock.Storage, testFilter, math.MaxUint64)
c.Assert(err, IsNil)
c.Assert(backupSchemas.Len(), Equals, 1)
updateCh := make(chan struct{}, 2)
updateCh := new(simpleProgress)
backupSchemas.Start(context.Background(), s.mock.Storage, math.MaxUint64, 1, updateCh)
schemas, err := backupSchemas.finishTableChecksum()
<-updateCh
c.Assert(updateCh.get(), Equals, int64(1))
c.Assert(err, IsNil)
c.Assert(len(schemas), Equals, 1)
// Cluster returns a dummy checksum (all fields are 1).
Expand All @@ -93,10 +112,10 @@ func (s *testBackupSchemaSuite) TestBuildBackupRangeAndSchema(c *C) {
s.mock.Domain, s.mock.Storage, noFilter, math.MaxUint64)
c.Assert(err, IsNil)
c.Assert(backupSchemas.Len(), Equals, 2)
updateCh.reset()
backupSchemas.Start(context.Background(), s.mock.Storage, math.MaxUint64, 2, updateCh)
schemas, err = backupSchemas.finishTableChecksum()
<-updateCh
<-updateCh
c.Assert(updateCh.get(), Equals, int64(2))
c.Assert(err, IsNil)
c.Assert(len(schemas), Equals, 2)
// Cluster returns a dummy checksum (all fields are 1).
Expand Down
Loading