Skip to content

Commit

Permalink
*(all): update dependencies on tidb and pd. (#8126)
Browse files Browse the repository at this point in the history
close #8110, ref #8115
  • Loading branch information
asddongmen committed Feb 8, 2023
1 parent caa7398 commit 5a38ed9
Show file tree
Hide file tree
Showing 41 changed files with 729 additions and 423 deletions.
21 changes: 15 additions & 6 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,23 @@ ifeq (${CDC_ENABLE_VENDOR}, 1)
GOVENDORFLAG := -mod=vendor
endif

GOBUILD := CGO_ENABLED=0 $(GO) build $(BUILD_FLAG) -trimpath $(GOVENDORFLAG)
# Since TiDB add a new dependency on github.com/cloudfoundry/gosigar,
# We need to add CGO_ENABLED=1 to make it work when build TiCDC in Darwin OS.
# These logic is to check if the OS is Darwin, if so, add CGO_ENABLED=1.
# ref: https://github.com/cloudfoundry/gosigar/issues/58#issuecomment-1150925711
# ref: https://github.com/pingcap/tidb/pull/39526#issuecomment-1407952955
OS := "$(shell go env GOOS)"
ifeq (${OS}, "linux")
CGO := 0
else ifeq (${OS}, "darwin")
CGO := 1
endif

GOBUILD := CGO_ENABLED=$(CGO) $(GO) build $(BUILD_FLAG) -trimpath $(GOVENDORFLAG)
GOBUILDNOVENDOR := CGO_ENABLED=0 $(GO) build $(BUILD_FLAG) -trimpath
GOTEST := CGO_ENABLED=1 $(GO) test -p $(P) --race
GOTEST := CGO_ENABLED=1 $(GO) test -p $(P) --race --tags=intest
GOTESTNORACE := CGO_ENABLED=1 $(GO) test -p $(P)

ARCH := "$(shell uname -s)"
LINUX := "Linux"
MAC := "Darwin"
CDC_PKG := github.com/pingcap/tiflow
DM_PKG := github.com/pingcap/tiflow/dm
ENGINE_PKG := github.com/pingcap/tiflow/engine
Expand Down Expand Up @@ -163,7 +172,7 @@ unit_test_in_verify_ci: check_failpoint_ctl tools/bin/gotestsum tools/bin/gocov
mkdir -p "$(TEST_DIR)"
$(FAILPOINT_ENABLE)
@export log_level=error;\
CGO_ENABLED=1 tools/bin/gotestsum --junitfile cdc-junit-report.xml -- -v -timeout 5m -p $(P) --race \
CGO_ENABLED=1 tools/bin/gotestsum --junitfile cdc-junit-report.xml -- -v -timeout 5m -p $(P) --race --tags=intest \
-covermode=atomic -coverprofile="$(TEST_DIR)/cov.unit.out" $(PACKAGES_TICDC) \
|| { $(FAILPOINT_DISABLE); exit 1; }
tools/bin/gocov convert "$(TEST_DIR)/cov.unit.out" | tools/bin/gocov-xml > cdc-coverage.xml
Expand Down
2 changes: 1 addition & 1 deletion cdc/api/v2/changefeed_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ func TestUpdateChangefeed(t *testing.T) {
cp.EXPECT().IsOwner().Return(true).AnyTimes()

// case 1 invalid id
invalidID := "#Invalid_"
invalidID := "Invalid_#"
w := httptest.NewRecorder()
req, _ := http.NewRequestWithContext(context.Background(), update.method,
fmt.Sprintf(update.url, invalidID), nil)
Expand Down
35 changes: 27 additions & 8 deletions cdc/entry/schema/snapshot.go
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ func NewSingleSnapshotFromMeta(meta *timeta.Meta, currentTs uint64, forceReplica
// meta is nil only in unit tests
if meta == nil {
snap := NewEmptySnapshot(forceReplicate)
snap.InitConcurrentDDLTables()
snap.InitPreExistingTables()
snap.inner.currentTs = currentTs
return snap, nil
}
Expand Down Expand Up @@ -210,12 +210,16 @@ const (
mdlCreateTable = "create table mysql.tidb_mdl_info(job_id BIGINT NOT NULL PRIMARY KEY, version BIGINT NOT NULL, table_ids text(65535));"
)

// InitConcurrentDDLTables imitates the creating table logic for concurrent DDL.
// InitPreExistingTables initializes the pre-existing tables in an empty Snapshot.
// Since v6.2.0, tables of concurrent DDL will be directly written as meta KV in
// TiKV, without being written to history DDL jobs. So the Snapshot which is not
// build from meta needs this method to handle history DDL.
func (s *Snapshot) InitConcurrentDDLTables() {
tableIDs := [...]int64{ddl.JobTableID, ddl.ReorgTableID, ddl.HistoryTableID}
// Since v6.5.0, Backfill tables is written as meta KV in TiKV, so the Snapshot
// which is not build from meta needs this method to handle history DDL.
// See:https://github.com/pingcap/tidb/pull/39616
func (s *Snapshot) InitPreExistingTables() {
ddlJobTableIDs := [...]int64{ddl.JobTableID, ddl.ReorgTableID, ddl.HistoryTableID}
backfillTableIDs := [...]int64{ddl.BackfillTableID, ddl.BackfillHistoryTableID}

mysqlDBInfo := &timodel.DBInfo{
ID: mysqlDBID,
Expand All @@ -231,10 +235,20 @@ func (s *Snapshot) InitConcurrentDDLTables() {
stmt, _ := p.ParseOneStmt(table.SQL, "", "")
tblInfo, _ := ddl.BuildTableInfoFromAST(stmt.(*ast.CreateTableStmt))
tblInfo.State = timodel.StatePublic
tblInfo.ID = tableIDs[i]
tblInfo.ID = ddlJobTableIDs[i]
wrapped := model.WrapTableInfo(mysqlDBID, mysql.SystemDB, dummyTS, tblInfo)
_ = s.inner.createTable(wrapped, dummyTS)
}

for i, table := range session.BackfillTables {
stmt, _ := p.ParseOneStmt(table.SQL, "", "")
tblInfo, _ := ddl.BuildTableInfoFromAST(stmt.(*ast.CreateTableStmt))
tblInfo.State = timodel.StatePublic
tblInfo.ID = backfillTableIDs[i]
wrapped := model.WrapTableInfo(mysqlDBID, mysql.SystemDB, dummyTS, tblInfo)
_ = s.inner.createTable(wrapped, dummyTS)
}

stmt, _ := p.ParseOneStmt(mdlCreateTable, "", "")
tblInfo, _ := ddl.BuildTableInfoFromAST(stmt.(*ast.CreateTableStmt))
tblInfo.State = timodel.StatePublic
Expand Down Expand Up @@ -514,8 +528,14 @@ func (s *Snapshot) DoHandleDDL(job *timodel.Job) error {
}

// TableCount counts tables in the snapshot. It's only for tests.
func (s *Snapshot) TableCount(includeIneligible bool) (count int) {
s.IterTables(includeIneligible, func(i *model.TableInfo) { count += 1 })
func (s *Snapshot) TableCount(includeIneligible bool,
filter func(schema, table string) bool,
) (count int) {
s.IterTables(includeIneligible, func(i *model.TableInfo) {
if filter(i.TableName.Schema, i.TableName.Table) {
count++
}
})
return
}

Expand Down Expand Up @@ -552,7 +572,6 @@ func (s *Snapshot) DumpToString() string {
schema, _ := s.inner.schemaByID(schemaID)
tableNames = append(tableNames, fmt.Sprintf("%s.%s:%d", schema.Name.O, table, target))
})

return fmt.Sprintf("%s\n%s\n%s\n%s\n%s",
strings.Join(schemas, "\t"),
strings.Join(tables, "\t"),
Expand Down
5 changes: 3 additions & 2 deletions cdc/entry/schema/snapshot_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -199,9 +199,10 @@ func TestTable(t *testing.T) {
require.False(t, snap.IsIneligibleTableID(12))
require.False(t, snap.IsIneligibleTableID(12+65536))
}

// IterTables should get no available tables.
require.Equal(t, snap.TableCount(true), 0)
require.Equal(t, snap.TableCount(true, func(table, schema string) bool {
return true
}), 0)
}
}

Expand Down
4 changes: 2 additions & 2 deletions cdc/entry/schema_storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ func NewSchemaStorage(
)
if meta == nil {
snap = schema.NewEmptySnapshot(forceReplicate)
snap.InitConcurrentDDLTables()
snap.InitPreExistingTables()
} else {
snap, err = schema.NewSnapshotFromMeta(meta, startTs, forceReplicate)
if err != nil {
Expand Down Expand Up @@ -188,7 +188,7 @@ func (s *schemaStorageImpl) HandleDDLJob(job *timodel.Job) error {
snap = lastSnap.Copy()
} else {
snap = schema.NewEmptySnapshot(s.forceReplicate)
snap.InitConcurrentDDLTables()
snap.InitPreExistingTables()
}
if err := snap.HandleDDL(job); err != nil {
log.Error("handle DDL failed",
Expand Down
21 changes: 15 additions & 6 deletions cdc/entry/schema_storage_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -618,7 +618,7 @@ func TestMultiVersionStorage(t *testing.T) {
require.False(t, exist)

lastSchemaTs := storage.DoGC(0)
// Snapshot.InitConcurrentDDLTables will create a schema with ts = 1
// Snapshot.InitPreExistingTables will create a schema with ts = 1
require.Equal(t, uint64(1), lastSchemaTs)

snap, err = storage.GetSnapshot(ctx, 100)
Expand Down Expand Up @@ -729,12 +729,21 @@ func TestExplicitTables(t *testing.T) {
snap3, err := schema.NewSnapshotFromMeta(meta2, ver2.Ver, true /* forceReplicate */)
require.Nil(t, err)

require.Equal(t, snap2.TableCount(true)-snap1.TableCount(true), 5)
// some system tables are also ineligible
require.GreaterOrEqual(t, snap2.TableCount(false), 4)
// we don't need to count system tables since TiCDC
// don't replicate them and TiDB change them frequently,
// so we don't need to consider them in the table count
systemTablesFilter := func(dbName, tableName string) bool {
return dbName != "mysql" && dbName != "information_schema"
}
require.Equal(t, 5, snap2.TableCount(true,
systemTablesFilter)-snap1.TableCount(true, systemTablesFilter))
// only test simple_test1 included
require.Equal(t, 1, snap2.TableCount(false, systemTablesFilter))

require.Equal(t, snap3.TableCount(true)-snap1.TableCount(true), 5)
require.Equal(t, snap3.TableCount(false), 45)
require.Equal(t, 5, snap3.TableCount(true,
systemTablesFilter)-snap1.TableCount(true, systemTablesFilter))
// since we create a snapshot from meta2 and forceReplicate is true, so all tables are included
require.Equal(t, 5, snap3.TableCount(false, systemTablesFilter))
}

/*
Expand Down
4 changes: 2 additions & 2 deletions cdc/owner/schema_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ func TestAllTables(t *testing.T) {
require.Equal(t, tableName, model.TableName{
Schema: "test",
Table: "t1",
TableID: 76,
TableID: 84,
})
// add ineligible table
require.Nil(t, schema.HandleDDL(helper.DDL2Job("create table test.t2(id int)")))
Expand All @@ -104,7 +104,7 @@ func TestAllTables(t *testing.T) {
require.Equal(t, tableName, model.TableName{
Schema: "test",
Table: "t1",
TableID: 76,
TableID: 84,
})
}

Expand Down
2 changes: 1 addition & 1 deletion cdc/server/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import (
"github.com/pingcap/tiflow/pkg/security"
"github.com/pingcap/tiflow/pkg/util"
"github.com/stretchr/testify/require"
"github.com/tikv/pd/pkg/tempurl"
"github.com/tikv/pd/pkg/utils/tempurl"
clientv3 "go.etcd.io/etcd/client/v3"
"go.etcd.io/etcd/server/v3/embed"
"golang.org/x/sync/errgroup"
Expand Down
2 changes: 1 addition & 1 deletion cdc/sink/codec/maxwell/maxwell_message.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import (
"github.com/pingcap/tiflow/cdc/model"
"github.com/pingcap/tiflow/cdc/sink/codec/internal"
cerror "github.com/pingcap/tiflow/pkg/errors"
"github.com/tikv/pd/pkg/tsoutil"
"github.com/tikv/pd/pkg/utils/tsoutil"
)

type maxwellMessage struct {
Expand Down
2 changes: 1 addition & 1 deletion dm/master/election_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ import (
"github.com/pingcap/tiflow/dm/pkg/etcdutil"
"github.com/pingcap/tiflow/dm/pkg/log"
"github.com/stretchr/testify/require"
"github.com/tikv/pd/pkg/tempurl"
"github.com/tikv/pd/pkg/utils/tempurl"
)

func TestFailToStartLeader(t *testing.T) {
Expand Down
2 changes: 1 addition & 1 deletion dm/master/etcd_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import (
"github.com/pingcap/tiflow/dm/pkg/log"
"github.com/pingcap/tiflow/dm/pkg/terror"
"github.com/pingcap/tiflow/dm/pkg/utils"
"github.com/tikv/pd/pkg/tempurl"
"github.com/tikv/pd/pkg/utils/tempurl"
clientv3 "go.etcd.io/etcd/client/v3"
"go.etcd.io/etcd/server/v3/embed"
)
Expand Down
2 changes: 1 addition & 1 deletion dm/master/openapi_view_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ import (
"github.com/pingcap/tiflow/dm/pkg/utils"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/tikv/pd/pkg/tempurl"
"github.com/tikv/pd/pkg/utils/tempurl"
)

// some data for test.
Expand Down
2 changes: 1 addition & 1 deletion dm/master/server_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ import (
"github.com/pingcap/tiflow/pkg/version"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"github.com/tikv/pd/pkg/tempurl"
"github.com/tikv/pd/pkg/utils/tempurl"
clientv3 "go.etcd.io/etcd/client/v3"
"go.etcd.io/etcd/server/v3/verify"
"go.etcd.io/etcd/tests/v3/integration"
Expand Down
2 changes: 1 addition & 1 deletion dm/master/workerrpc/workerrpc_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import (
"github.com/pingcap/tiflow/dm/pb"
"github.com/pingcap/tiflow/dm/pbmock"
"github.com/pingcap/tiflow/dm/pkg/terror"
"github.com/tikv/pd/pkg/tempurl"
"github.com/tikv/pd/pkg/utils/tempurl"
)

var _ = Suite(&testWorkerRPCSuite{})
Expand Down
2 changes: 1 addition & 1 deletion dm/pkg/election/election_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import (
"github.com/pingcap/tiflow/dm/pkg/log"
"github.com/pingcap/tiflow/dm/pkg/terror"
"github.com/pingcap/tiflow/dm/pkg/utils"
"github.com/tikv/pd/pkg/tempurl"
"github.com/tikv/pd/pkg/utils/tempurl"
clientv3 "go.etcd.io/etcd/client/v3"
"go.etcd.io/etcd/server/v3/embed"
)
Expand Down
2 changes: 1 addition & 1 deletion dm/pkg/etcdutil/etcdutil_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import (
"github.com/pingcap/tiflow/dm/pkg/log"
"github.com/pingcap/tiflow/dm/pkg/terror"
"github.com/stretchr/testify/require"
"github.com/tikv/pd/pkg/tempurl"
"github.com/tikv/pd/pkg/utils/tempurl"
"go.etcd.io/etcd/api/v3/etcdserverpb"
v3rpc "go.etcd.io/etcd/api/v3/v3rpc/rpctypes"
clientv3 "go.etcd.io/etcd/client/v3"
Expand Down
12 changes: 6 additions & 6 deletions dm/pkg/retry/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,17 +36,17 @@ var (
"Unsupported collation",
"Invalid default value for",
"Unsupported drop primary key",
"Error 1059: Identifier name", // Limitations on identifier length
"Error 1117: Too many columns",
"Error 1069: Too many keys specified",
"Error 1059",
"Error 1117",
"Error 1069",
}

// UnsupportedDMLMsgs list the error messages of some un-recoverable DML, which is used in task auto recovery.
UnsupportedDMLMsgs = []string{
"Error 1062: Duplicate",
"Error 1406: Data too long for column",
"Error 1062",
"Error 1406",
"Error 1366",
"Error 8025: entry too large",
"Error 8025",
}

// ReplicationErrMsgs list the error message of un-recoverable replication error.
Expand Down
2 changes: 1 addition & 1 deletion dm/tests/all_mode/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -552,7 +552,7 @@ function run() {
run_sql_source1 "create table all_mode.db_error (c int primary key);"
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status $ILLEGAL_CHAR_NAME" \
"Error 1049: Unknown database" 1
"Error 1049 (42000): Unknown database" 1

# stop task, task state should be cleaned
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
Expand Down
2 changes: 1 addition & 1 deletion dm/tests/check_task/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ function test_privileges_can_migrate() {
run_sql_source1 "create table checktask1.test_privilege(id int primary key, b varchar(10))"
run_sql_source1 "insert into checktask1.test_privilege values (1, 'a'),(2, 'b');"
run_sql_tidb "create user 'test1'@'%' identified by '123456';"
run_sql_tidb "grant select, create, insert, update, delete, alter, drop, index on *.* to 'test1'@'%';"
run_sql_tidb "grant select, create, insert, update, delete, alter, drop, index, config on *.* to 'test1'@'%';"
run_sql_tidb "flush privileges;"
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"start-task $cur/conf/task-priv.yaml --remove-meta" \
Expand Down
4 changes: 2 additions & 2 deletions dm/tests/full_mode/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,8 @@ function fail_acquire_global_lock() {
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status test" \
"\"stage\": \"Paused\"" 2 \
"LOCK TABLES \`full_mode\`.\`t1\` READ: Error 1044: Access denied" 1 \
"LOCK TABLES \`full_mode\`.\`t2\` READ: Error 1044: Access denied" 1
"LOCK TABLES \`full_mode\`.\`t1\` READ: Error 1044 (42000): Access denied" 1 \
"LOCK TABLES \`full_mode\`.\`t2\` READ: Error 1044 (42000): Access denied" 1

cleanup_process $*
cleanup_data full_mode
Expand Down
2 changes: 1 addition & 1 deletion dm/tests/handle_error/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -521,7 +521,7 @@ function DM_EXEC_ERROR_SKIP_CASE() {

run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status test" \
"Error 1062: Duplicate " 1
"Error 1062 (23000): Duplicate " 1

run_sql_tidb "insert into ${db}.${tb} values(5,3,3);"
run_sql_tidb "insert into ${db}.${tb} values(6,4,4);"
Expand Down
3 changes: 1 addition & 2 deletions dm/tests/lightning_mode/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@ function run() {
"checksum mismatched, KV number in source files: 6, KV number in TiDB cluster: 3" 1 \
'"unit": "Load"' 2
run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"resume-task test" \
"\"result\": true" 3
"resume-task test"
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status test" \
'unit": "Sync"' 2
Expand Down
2 changes: 1 addition & 1 deletion dm/tests/many_tables/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ function run() {
run_sql_tidb "select count(*) from dm_meta.test_syncer_checkpoint"
check_contains "count(*): $(($TABLE_NUM + 1))"

check_log_contains $WORK_DIR/worker1/log/dm-worker.log 'Error 8004: Transaction is too large'
check_log_contains $WORK_DIR/worker1/log/dm-worker.log 'Error 8004 (HY000): Transaction is too large'

# check https://github.com/pingcap/tiflow/issues/5063
check_time=100
Expand Down
2 changes: 1 addition & 1 deletion dm/tests/print_status/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ function check_print_status() {

# check load unit print status
status_file=$WORK_DIR/worker1/log/loader_status.log
grep -oP "\[unit=lightning-load\] \[IsCanceled=false\] \[finished_bytes=59637\] \[total_bytes=59637\] \[progress=.*\]" $WORK_DIR/worker1/log/dm-worker.log >$status_file
grep -oP "\[unit=lightning-load\] \[IsCanceled=false\] \[finished_bytes=59834\] \[total_bytes=59834\] \[progress=.*\]" $WORK_DIR/worker1/log/dm-worker.log >$status_file
status_count=$(wc -l $status_file | awk '{print $1}')
[ $status_count -eq 1 ]
# must have a non-zero speed in log
Expand Down
2 changes: 1 addition & 1 deletion dm/tests/shardddl1_1/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,7 @@ function DM_027_CASE() {
run_sql_source1 "insert into ${shardddl1}.${tb3} values (5,6)"
run_dm_ctl_with_retry $WORK_DIR "127.0.0.1:$MASTER_PORT" \
"query-status test" \
"Error 1054: Unknown column 'val' in 'field list'" 1
"Error 1054 (42S22): Unknown column 'val' in 'field list'" 1
}

function DM_027() {
Expand Down
Loading

0 comments on commit 5a38ed9

Please sign in to comment.