Skip to content
This repository has been archived by the owner on Dec 8, 2021. It is now read-only.

Commit

Permalink
restore: add metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
lonng committed Dec 25, 2018
1 parent 470258f commit 0bb02d6
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 0 deletions.
3 changes: 3 additions & 0 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,11 @@ require (
github.com/cznic/y v0.0.0-20160420101755-9fdf92d4aac0
github.com/go-sql-driver/mysql v1.4.0
github.com/gogo/protobuf v1.1.1
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57 // indirect
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6 // indirect
github.com/joho/sqltocsv v0.0.0-20180904231936-b24deec2b806
github.com/pingcap/check v0.0.0-20171206051426-1c287c953996
github.com/pingcap/errors v0.11.0
github.com/pingcap/gofail v0.0.0-20181121072748-c3f835e5a7d8
github.com/pingcap/kvproto v0.0.0-20181105061835-1b5d69cd1d26
github.com/pingcap/parser v0.0.0-20181113072426-4a9a1b13b591
Expand Down
16 changes: 16 additions & 0 deletions lightning/metric/metric.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,22 @@ var (
Buckets: prometheus.ExponentialBuckets(1024, 2, 8),
},
)
ChunkParserReadBlockSecondsHistogram = prometheus.NewHistogram(
prometheus.HistogramOpts{
Namespace: "lightning",
Name: "chunk_parser_read_block_seconds",
Help: "time needed to chunk parser read a block",
Buckets: prometheus.ExponentialBuckets(0.001, 3.1622776601683795, 10),
},
)
ChunkParserReadRowSecondsHistogram = prometheus.NewHistogram(
prometheus.HistogramOpts{
Namespace: "lightning",
Name: "chunk_parser_read_row_seconds",
Help: "time needed to chunk parser read a row",
Buckets: prometheus.ExponentialBuckets(0.001, 3.1622776601683795, 10),
},
)
BlockEncodeSecondsHistogram = prometheus.NewHistogram(
prometheus.HistogramOpts{
Namespace: "lightning",
Expand Down
5 changes: 5 additions & 0 deletions lightning/mydump/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,11 @@ package mydump
import (
"bytes"
"io"
"time"

"github.com/pkg/errors"

"github.com/pingcap/tidb-lightning/lightning/metric"
)

// ChunkParser is a parser of the data files (the file containing only INSERT
Expand Down Expand Up @@ -81,6 +84,7 @@ const (
)

func (parser *ChunkParser) readBlock() error {
startTime := time.Now()
n, err := io.ReadFull(parser.reader, parser.blockBuf)
switch err {
case io.ErrUnexpectedEOF, io.EOF:
Expand All @@ -95,6 +99,7 @@ func (parser *ChunkParser) readBlock() error {
parser.appendBuf.Write(parser.remainBuf.Bytes())
parser.appendBuf.Write(parser.blockBuf[:n])
parser.buf = parser.appendBuf.Bytes()
metric.ChunkParserReadBlockSecondsHistogram.Observe(time.Since(startTime).Seconds())
return nil
default:
return errors.Trace(err)
Expand Down
2 changes: 2 additions & 0 deletions lightning/restore/restore.go
Original file line number Diff line number Diff line change
Expand Up @@ -1355,6 +1355,7 @@ func (cr *chunkRestore) restore(
var sep byte = ' '
readLoop:
for cr.parser.Pos() < endOffset {
readRowStartTime := time.Now()
err := cr.parser.ReadRow()
switch errors.Cause(err) {
case nil:
Expand All @@ -1369,6 +1370,7 @@ func (cr *chunkRestore) restore(
buffer.WriteString(" VALUES ")
sep = ','
}
metric.ChunkParserReadRowSecondsHistogram.Observe(time.Since(readRowStartTime).Seconds())
lastRow := cr.parser.LastRow()
if cr.chunk.ShouldIncludeRowID {
buffer.Write(lastRow.Row[:len(lastRow.Row)-1])
Expand Down

0 comments on commit 0bb02d6

Please sign in to comment.