Skip to content

Commit

Permalink
cherry pick pingcap#35326 to release-5.4
Browse files Browse the repository at this point in the history
Signed-off-by: ti-srebot <ti-srebot@pingcap.com>
  • Loading branch information
xuyifangreeneyes authored and ti-srebot committed Jun 20, 2022
1 parent 4d888ce commit 58612a7
Showing 1 changed file with 57 additions and 19 deletions.
76 changes: 57 additions & 19 deletions statistics/handle/handle.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import (
"fmt"
"sort"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
Expand Down Expand Up @@ -1067,6 +1068,7 @@ func (h *Handle) SaveTableStatsToStorage(results *statistics.AnalyzeResults, nee
}
}
// 2. Save histograms.
const maxInsertLength = 1024 * 1024
for _, result := range results.Ars {
for i, hg := range result.Hist {
// It's normal virtual column, skip it.
Expand All @@ -1090,8 +1092,26 @@ func (h *Handle) SaveTableStatsToStorage(results *statistics.AnalyzeResults, nee
return err
}
if topN := result.TopNs[i]; topN != nil {
for _, meta := range topN.TopN {
if _, err = exec.ExecuteInternal(ctx, "insert into mysql.stats_top_n (table_id, is_index, hist_id, value, count) values (%?, %?, %?, %?, %?)", tableID, result.IsIndex, hg.ID, meta.Encoded, meta.Count); err != nil {
for j := 0; j < len(topN.TopN); {
end := j + batchInsertSize
if end > len(topN.TopN) {
end = len(topN.TopN)
}
sql := new(strings.Builder)
sql.WriteString("insert into mysql.stats_top_n (table_id, is_index, hist_id, value, count) values ")
for k := j; k < end; k++ {
val := sqlexec.MustEscapeSQL("(%?, %?, %?, %?, %?)", tableID, result.IsIndex, hg.ID, topN.TopN[k].Encoded, topN.TopN[k].Count)
if k > j {
val = "," + val
}
if k > j && sql.Len()+len(val) > maxInsertLength {
end = k
break
}
sql.WriteString(val)
}
j = end
if _, err = exec.ExecuteInternal(ctx, sql.String()); err != nil {
return err
}
}
Expand All @@ -1113,25 +1133,43 @@ func (h *Handle) SaveTableStatsToStorage(results *statistics.AnalyzeResults, nee
}
sc := h.mu.ctx.GetSessionVars().StmtCtx
var lastAnalyzePos []byte
for j := range hg.Buckets {
count := hg.Buckets[j].Count
if j > 0 {
count -= hg.Buckets[j-1].Count
}
var upperBound types.Datum
upperBound, err = hg.GetUpper(j).ConvertTo(sc, types.NewFieldType(mysql.TypeBlob))
if err != nil {
return err
for j := 0; j < len(hg.Buckets); {
end := j + batchInsertSize
if end > len(hg.Buckets) {
end = len(hg.Buckets)
}
if j == len(hg.Buckets)-1 {
lastAnalyzePos = upperBound.GetBytes()
}
var lowerBound types.Datum
lowerBound, err = hg.GetLower(j).ConvertTo(sc, types.NewFieldType(mysql.TypeBlob))
if err != nil {
return err
sql := new(strings.Builder)
sql.WriteString("insert into mysql.stats_buckets (table_id, is_index, hist_id, bucket_id, count, repeats, lower_bound, upper_bound, ndv) values ")
for k := j; k < end; k++ {
count := hg.Buckets[k].Count
if k > 0 {
count -= hg.Buckets[k-1].Count
}
var upperBound types.Datum
upperBound, err = hg.GetUpper(k).ConvertTo(sc, types.NewFieldType(mysql.TypeBlob))
if err != nil {
return err
}
if k == len(hg.Buckets)-1 {
lastAnalyzePos = upperBound.GetBytes()
}
var lowerBound types.Datum
lowerBound, err = hg.GetLower(k).ConvertTo(sc, types.NewFieldType(mysql.TypeBlob))
if err != nil {
return err
}
val := sqlexec.MustEscapeSQL("(%?, %?, %?, %?, %?, %?, %?, %?, %?)", tableID, result.IsIndex, hg.ID, k, count, hg.Buckets[k].Repeat, lowerBound.GetBytes(), upperBound.GetBytes(), hg.Buckets[k].NDV)
if k > j {
val = "," + val
}
if k > j && sql.Len()+len(val) > maxInsertLength {
end = k
break
}
sql.WriteString(val)
}
if _, err = exec.ExecuteInternal(ctx, "insert into mysql.stats_buckets(table_id, is_index, hist_id, bucket_id, count, repeats, lower_bound, upper_bound, ndv) values(%?, %?, %?, %?, %?, %?, %?, %?, %?)", tableID, result.IsIndex, hg.ID, j, count, hg.Buckets[j].Repeat, lowerBound.GetBytes(), upperBound.GetBytes(), hg.Buckets[j].NDV); err != nil {
j = end
if _, err = exec.ExecuteInternal(ctx, sql.String()); err != nil {
return err
}
}
Expand Down

0 comments on commit 58612a7

Please sign in to comment.