Skip to content
This repository has been archived by the owner on Jul 24, 2024. It is now read-only.

*: refine logs #723

Merged
merged 8 commits into from
Jan 28, 2021
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions cmd/cmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ import (
"github.com/spf13/cobra"

"github.com/pingcap/br/pkg/gluetidb"
brlogutil "github.com/pingcap/br/pkg/logutil"
"github.com/pingcap/br/pkg/redact"
"github.com/pingcap/br/pkg/summary"
"github.com/pingcap/br/pkg/task"
"github.com/pingcap/br/pkg/utils"
Expand Down Expand Up @@ -127,7 +127,7 @@ func Init(cmd *cobra.Command) (err error) {
err = e
return
}
brlogutil.InitRedact(redactLog || redactInfoLog)
redact.InitRedact(redactLog || redactInfoLog)

slowLogFilename, e := cmd.Flags().GetString(FlagSlowLogFile)
if e != nil {
Expand Down
4 changes: 2 additions & 2 deletions cmd/debug.go
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@ func newCheckSumCommand() *cobra.Command {
zap.Uint64("totalBytes", file.GetTotalBytes()),
zap.Uint64("startVersion", file.GetStartVersion()),
zap.Uint64("endVersion", file.GetEndVersion()),
zap.Stringer("startKey", logutil.WrapKey(file.GetStartKey())),
zap.Stringer("endKey", logutil.WrapKey(file.GetEndKey())),
logutil.Key("startKey", file.GetStartKey()),
logutil.Key("endKey", file.GetEndKey()),
)

var data []byte
Expand Down
33 changes: 17 additions & 16 deletions pkg/backup/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ import (
berrors "github.com/pingcap/br/pkg/errors"
"github.com/pingcap/br/pkg/glue"
"github.com/pingcap/br/pkg/logutil"
"github.com/pingcap/br/pkg/redact"
"github.com/pingcap/br/pkg/rtree"
"github.com/pingcap/br/pkg/storage"
"github.com/pingcap/br/pkg/summary"
Expand Down Expand Up @@ -518,10 +519,10 @@ func (bc *Client) BackupRange(
}
}()
log.Info("backup started",
zap.Stringer("StartKey", logutil.WrapKey(startKey)),
zap.Stringer("EndKey", logutil.WrapKey(endKey)),
zap.Uint64("RateLimit", req.RateLimit),
zap.Uint32("Concurrency", req.Concurrency))
logutil.Key("startKey", startKey),
logutil.Key("endKey", endKey),
zap.Uint64("rateLimit", req.RateLimit),
zap.Uint32("concurrency", req.Concurrency))

var allStores []*metapb.Store
allStores, err = conn.GetAllTiKVStores(ctx, bc.mgr.GetPDClient(), conn.SkipTiFlash)
Expand Down Expand Up @@ -554,8 +555,8 @@ func (bc *Client) BackupRange(

if req.IsRawKv {
log.Info("backup raw ranges",
zap.Stringer("startKey", logutil.WrapKey(startKey)),
zap.Stringer("endKey", logutil.WrapKey(endKey)),
logutil.Key("startKey", startKey),
logutil.Key("endKey", endKey),
zap.String("cf", req.Cf))
} else {
log.Info("backup time range",
Expand Down Expand Up @@ -590,14 +591,14 @@ func (bc *Client) findRegionLeader(ctx context.Context, key []byte) (*metapb.Pee
}
if region.Leader != nil {
log.Info("find leader",
zap.Reflect("Leader", region.Leader), zap.Stringer("Key", logutil.WrapKey(key)))
zap.Reflect("Leader", region.Leader), logutil.Key("key", key))
return region.Leader, nil
}
log.Warn("no region found", zap.Stringer("Key", logutil.WrapKey(key)))
log.Warn("no region found", logutil.Key("key", key))
time.Sleep(time.Millisecond * time.Duration(100*i))
continue
}
log.Error("can not find leader", zap.Stringer("key", logutil.WrapKey(key)))
log.Error("can not find leader", logutil.Key("key", key))
return nil, errors.Annotatef(berrors.ErrBackupNoLeader, "can not find leader")
}

Expand Down Expand Up @@ -687,8 +688,8 @@ func (bc *Client) fineGrainedBackup(
zap.Reflect("error", resp.Error))
}
log.Info("put fine grained range",
zap.Stringer("StartKey", logutil.WrapKey(resp.StartKey)),
zap.Stringer("EndKey", logutil.WrapKey(resp.EndKey)),
logutil.Key("startKey", resp.StartKey),
logutil.Key("endKey", resp.EndKey),
)
rangeTree.Put(resp.StartKey, resp.EndKey, resp.Files)

Expand Down Expand Up @@ -851,7 +852,7 @@ func SendBackup(
if span := opentracing.SpanFromContext(ctx); span != nil && span.Tracer() != nil {
span1 := span.Tracer().StartSpan(
fmt.Sprintf("Client.SendBackup, storeID = %d, StartKey = %s, EndKey = %s",
storeID, logutil.WrapKey(req.StartKey), logutil.WrapKey(req.EndKey)),
storeID, redact.Key(req.StartKey), redact.Key(req.EndKey)),
opentracing.ChildOf(span.Context()))
defer span1.Finish()
ctx = opentracing.ContextWithSpan(ctx, span1)
Expand All @@ -861,8 +862,8 @@ func SendBackup(
backupLoop:
for retry := 0; retry < backupRetryTimes; retry++ {
log.Info("try backup",
zap.Stringer("StartKey", logutil.WrapKey(req.StartKey)),
zap.Stringer("EndKey", logutil.WrapKey(req.EndKey)),
logutil.Key("startKey", req.StartKey),
logutil.Key("endKey", req.EndKey),
zap.Uint64("storeID", storeID),
zap.Int("retry time", retry),
)
Expand Down Expand Up @@ -911,8 +912,8 @@ backupLoop:
}
// TODO: handle errors in the resp.
log.Info("range backuped",
zap.Stringer("StartKey", logutil.WrapKey(resp.GetStartKey())),
zap.Stringer("EndKey", logutil.WrapKey(resp.GetEndKey())))
logutil.Key("startKey", resp.GetStartKey()),
logutil.Key("endKey", resp.GetEndKey()))
err = respFn(resp)
if err != nil {
return errors.Trace(err)
Expand Down
4 changes: 2 additions & 2 deletions pkg/kv/kv.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import (
"go.uber.org/zap"
"go.uber.org/zap/zapcore"

"github.com/pingcap/br/pkg/logutil"
"github.com/pingcap/br/pkg/redact"
)

var extraHandleColumnInfo = model.NewExtraHandleColInfo()
Expand Down Expand Up @@ -129,7 +129,7 @@ func (row rowArrayMarshaler) MarshalLogArray(encoder zapcore.ArrayEncoder) error
}
_ = encoder.AppendObject(zapcore.ObjectMarshalerFunc(func(enc zapcore.ObjectEncoder) error {
enc.AddString("kind", kindStr[kind])
enc.AddString("val", logutil.RedactString(str))
enc.AddString("val", redact.String(str))
return nil
}))
}
Expand Down
9 changes: 6 additions & 3 deletions pkg/kv/kv_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,9 @@ import (
"testing"

. "github.com/pingcap/check"
"github.com/pingcap/log"
"github.com/pingcap/tidb/types"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)

type rowSuite struct{}
Expand All @@ -37,6 +37,9 @@ func (s *rowSuite) TestMarshal(c *C) {
dats[2] = types.MaxValueDatum()
dats[3] = types.MinNotNullDatum()

// save coverage for MarshalLogArray
log.Info("row marshal", zap.Array("row", rowArrayMarshaler(dats)))
encoder := zapcore.NewConsoleEncoder(zapcore.EncoderConfig{})
out, err := encoder.EncodeEntry(zapcore.Entry{}, []zap.Field{zap.Array("row", rowArrayMarshaler(dats))})
c.Assert(err, IsNil)
c.Assert(out.String(), Equals,
"{\"row\": [{\"kind\": \"int64\", \"val\": \"1\"}, {\"kind\": \"null\", \"val\": \"NULL\"}, {\"kind\": \"max\", \"val\": \"+inf\"}, {\"kind\": \"min\", \"val\": \"-inf\"}]}\n")
}
161 changes: 98 additions & 63 deletions pkg/logutil/logging.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,23 +8,23 @@ import (
"strings"

"github.com/google/uuid"
"github.com/pingcap/errors"
"github.com/pingcap/kvproto/pkg/backup"
"github.com/pingcap/kvproto/pkg/import_sstpb"
"github.com/pingcap/kvproto/pkg/metapb"
"github.com/pingcap/tidb/kv"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"

"github.com/pingcap/br/pkg/redact"
)

type zapMarshalFileMixIn struct{ *backup.File }
type zapFileMarshaler struct{ *backup.File }

func (file zapMarshalFileMixIn) MarshalLogObject(enc zapcore.ObjectEncoder) error {
func (file zapFileMarshaler) MarshalLogObject(enc zapcore.ObjectEncoder) error {
enc.AddString("name", file.GetName())
enc.AddString("CF", file.GetCf())
enc.AddString("sha256", hex.EncodeToString(file.GetSha256()))
enc.AddString("startKey", WrapKey(file.GetStartKey()).String())
enc.AddString("endKey", WrapKey(file.GetEndKey()).String())
enc.AddString("startKey", redact.Key(file.GetStartKey()))
enc.AddString("endKey", redact.Key(file.GetEndKey()))
enc.AddUint64("startVersion", file.GetStartVersion())
enc.AddUint64("endVersion", file.GetEndVersion())
enc.AddUint64("totalKvs", file.GetTotalKvs())
Expand All @@ -33,107 +33,142 @@ func (file zapMarshalFileMixIn) MarshalLogObject(enc zapcore.ObjectEncoder) erro
return nil
}

type zapMarshalRewriteRuleMixIn struct{ *import_sstpb.RewriteRule }
type zapFilesMarshaler []*backup.File

func (fs zapFilesMarshaler) MarshalLogArray(encoder zapcore.ArrayEncoder) error {
for _, file := range fs {
encoder.AppendString(file.GetName())
}
return nil
}

func (fs zapFilesMarshaler) MarshalLogObject(encoder zapcore.ObjectEncoder) error {
total := len(fs)
encoder.AddInt("total", total)
if total <= 4 {
_ = encoder.AddArray("files", fs)
} else {
encoder.AddString("files", fmt.Sprintf("%s ...(skip %d)... %s",
fs[0].GetName(), total-2, fs[total-1].GetName()))
kennytm marked this conversation as resolved.
Show resolved Hide resolved
}

totalKVs := uint64(0)
totalSize := uint64(0)
for _, file := range fs {
totalKVs += file.GetTotalKvs()
totalSize += file.GetTotalBytes()
}
encoder.AddUint64("totalKVs", totalKVs)
encoder.AddUint64("totalBytes", totalSize)
encoder.AddInt("totalFileCount", len(fs))
return nil
}

// File make the zap fields for a file.
func File(file *backup.File) zapcore.Field {
return zap.Object("file", zapFileMarshaler{file})
}

// Files make the zap field for a set of file.
func Files(fs []*backup.File) zapcore.Field {
return zap.Object("files", zapFilesMarshaler(fs))
}

type zapRewriteRuleMarshaler struct{ *import_sstpb.RewriteRule }

func (rewriteRule zapMarshalRewriteRuleMixIn) MarshalLogObject(enc zapcore.ObjectEncoder) error {
func (rewriteRule zapRewriteRuleMarshaler) MarshalLogObject(enc zapcore.ObjectEncoder) error {
enc.AddString("oldKeyPrefix", hex.EncodeToString(rewriteRule.GetOldKeyPrefix()))
enc.AddString("newKeyPrefix", hex.EncodeToString(rewriteRule.GetNewKeyPrefix()))
enc.AddUint64("newTimestamp", rewriteRule.GetNewTimestamp())
return nil
}

type zapMarshalRegionMixIn struct{ *metapb.Region }
// RewriteRule make the zap fields for a rewrite rule.
func RewriteRule(rewriteRule *import_sstpb.RewriteRule) zapcore.Field {
return zap.Object("rewriteRule", zapRewriteRuleMarshaler{rewriteRule})
}

func (region zapMarshalRegionMixIn) MarshalLogObject(enc zapcore.ObjectEncoder) error {
type zapMarshalRegionMarshaler struct{ *metapb.Region }

func (region zapMarshalRegionMarshaler) MarshalLogObject(enc zapcore.ObjectEncoder) error {
peers := make([]string, 0, len(region.GetPeers()))
for _, peer := range region.GetPeers() {
peers = append(peers, peer.String())
}
enc.AddUint64("ID", region.Id)
enc.AddString("startKey", WrapKey(region.GetStartKey()).String())
enc.AddString("endKey", WrapKey(region.GetEndKey()).String())
enc.AddString("startKey", redact.Key(region.GetStartKey()))
enc.AddString("endKey", redact.Key(region.GetEndKey()))
enc.AddString("epoch", region.GetRegionEpoch().String())
enc.AddString("peers", strings.Join(peers, ","))
return nil
}

type zapMarshalSSTMetaMixIn struct{ *import_sstpb.SSTMeta }
// Region make the zap fields for a region.
func Region(region *metapb.Region) zapcore.Field {
return zap.Object("region", zapMarshalRegionMarshaler{region})
}

// Leader make the zap fields for a peer.
// nolint:interfacer
func Leader(peer *metapb.Peer) zapcore.Field {
return zap.String("leader", peer.String())
}

type zapSSTMetaMarshaler struct{ *import_sstpb.SSTMeta }

func (sstMeta zapMarshalSSTMetaMixIn) MarshalLogObject(enc zapcore.ObjectEncoder) error {
func (sstMeta zapSSTMetaMarshaler) MarshalLogObject(enc zapcore.ObjectEncoder) error {
enc.AddString("CF", sstMeta.GetCfName())
enc.AddBool("endKeyExclusive", sstMeta.EndKeyExclusive)
enc.AddUint32("CRC32", sstMeta.Crc32)
enc.AddUint64("length", sstMeta.Length)
enc.AddUint64("regionID", sstMeta.RegionId)
enc.AddString("regionEpoch", sstMeta.RegionEpoch.String())
enc.AddString("rangeStart", WrapKey(sstMeta.GetRange().GetStart()).String())
enc.AddString("rangeEnd", WrapKey(sstMeta.GetRange().GetEnd()).String())
enc.AddString("startKey", redact.Key(sstMeta.GetRange().GetStart()))
enc.AddString("endKey", redact.Key(sstMeta.GetRange().GetEnd()))

sstUUID, err := uuid.FromBytes(sstMeta.GetUuid())
if err != nil {
return errors.Trace(err)
enc.AddString("UUID", fmt.Sprintf("invalid UUID %s", hex.EncodeToString(sstMeta.GetUuid())))
} else {
enc.AddString("UUID", sstUUID.String())
}
enc.AddString("UUID", sstUUID.String())
return nil
}

type zapArrayMarshalKeysMixIn [][]byte
// SSTMeta make the zap fields for a SST meta.
func SSTMeta(sstMeta *import_sstpb.SSTMeta) zapcore.Field {
return zap.Object("sstMeta", zapSSTMetaMarshaler{sstMeta})
}

type zapKeysMarshaler [][]byte

func (keys zapArrayMarshalKeysMixIn) MarshalLogArray(enc zapcore.ArrayEncoder) error {
func (keys zapKeysMarshaler) MarshalLogArray(encoder zapcore.ArrayEncoder) error {
for _, key := range keys {
enc.AppendString(WrapKey(key).String())
encoder.AppendString(redact.Key(key))
}
return nil
}

type files []*backup.File

func (fs files) MarshalLogObject(encoder zapcore.ObjectEncoder) error {
totalKVs := uint64(0)
totalSize := uint64(0)
for _, file := range fs {
totalKVs += file.GetTotalKvs()
totalSize += file.GetTotalBytes()
func (keys zapKeysMarshaler) MarshalLogObject(encoder zapcore.ObjectEncoder) error {
total := len(keys)
encoder.AddInt("total", total)
if total <= 4 {
_ = encoder.AddArray("keys", keys)
} else {
encoder.AddString("keys", fmt.Sprintf("%s ...(skip %d)... %s",
redact.Key(keys[0]), total-2, redact.Key(keys[total-1])))
kennytm marked this conversation as resolved.
Show resolved Hide resolved
}
encoder.AddUint64("totalKVs", totalKVs)
encoder.AddUint64("totalBytes", totalSize)
encoder.AddInt("totalFileCount", len(fs))
return nil
}

// WrapKey wrap a key as a Stringer that can print proper upper hex format.
func WrapKey(key []byte) fmt.Stringer {
return RedactStringer(kv.Key(key))
}

// WrapKeys wrap keys as an ArrayMarshaler that can print proper upper hex format.
func WrapKeys(keys [][]byte) zapcore.ArrayMarshaler {
return zapArrayMarshalKeysMixIn(keys)
// Key constructs a field that carries upper hex format key.
func Key(fieldKey string, key []byte) zap.Field {
return zap.String(fieldKey, redact.Key(key))
}

// RewriteRule make the zap fields for a rewrite rule.
func RewriteRule(rewriteRule *import_sstpb.RewriteRule) zapcore.Field {
return zap.Object("rewriteRule", zapMarshalRewriteRuleMixIn{rewriteRule})
}

// Region make the zap fields for a region.
func Region(region *metapb.Region) zapcore.Field {
return zap.Object("region", zapMarshalRegionMixIn{region})
}

// File make the zap fields for a file.
func File(file *backup.File) zapcore.Field {
return zap.Object("file", zapMarshalFileMixIn{file})
}

// SSTMeta make the zap fields for a SST meta.
func SSTMeta(sstMeta *import_sstpb.SSTMeta) zapcore.Field {
return zap.Object("sstMeta", zapMarshalSSTMetaMixIn{sstMeta})
}

// Files make the zap field for a set of file.
func Files(fs []*backup.File) zapcore.Field {
return zap.Object("fs", files(fs))
// Keys constructs a field that carries upper hex format keys.
func Keys(keys [][]byte) zapcore.Field {
return zap.Object("keys", zapKeysMarshaler(keys))
}

// ShortError make the zap field to display error without verbose representation (e.g. the stack trace).
Expand Down
Loading