Skip to content

Commit

Permalink
*: use std/slices to replace exp/slices (#46433)
Browse files Browse the repository at this point in the history
ref #45933
  • Loading branch information
hawkingrei authored Aug 28, 2023
1 parent bfacaf8 commit c11a999
Show file tree
Hide file tree
Showing 10 changed files with 47 additions and 40 deletions.
1 change: 0 additions & 1 deletion br/pkg/lightning/checkpoints/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ go_library(
"//util/sqlexec",
"@com_github_joho_sqltocsv//:sqltocsv",
"@com_github_pingcap_errors//:errors",
"@org_golang_x_exp//slices",
"@org_uber_go_zap//:zap",
],
)
Expand Down
14 changes: 11 additions & 3 deletions br/pkg/lightning/checkpoints/checkpoints.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,15 @@
package checkpoints

import (
"cmp"
"context"
"database/sql"
"encoding/json"
"fmt"
"io"
"math"
"path"
"slices"
"sort"
"strings"
"sync"
Expand All @@ -39,7 +41,6 @@ import (
"github.com/pingcap/tidb/parser/model"
"github.com/pingcap/tidb/util/mathutil"
"go.uber.org/zap"
"golang.org/x/exp/slices"
)

// CheckpointStatus is the status of a checkpoint.
Expand Down Expand Up @@ -246,6 +247,13 @@ func (key *ChunkCheckpointKey) String() string {
return fmt.Sprintf("%s:%d", key.Path, key.Offset)
}

func (key *ChunkCheckpointKey) compare(other *ChunkCheckpointKey) int {
if c := cmp.Compare(key.Path, other.Path); c != 0 {
return c
}
return cmp.Compare(key.Offset, other.Offset)
}

func (key *ChunkCheckpointKey) less(other *ChunkCheckpointKey) bool {
switch {
case key.Path < other.Path:
Expand Down Expand Up @@ -1351,8 +1359,8 @@ func (cpdb *FileCheckpointsDB) Get(_ context.Context, tableName string) (*TableC
})
}

slices.SortFunc(engine.Chunks, func(i, j *ChunkCheckpoint) bool {
return i.Key.less(&j.Key)
slices.SortFunc(engine.Chunks, func(i, j *ChunkCheckpoint) int {
return i.Key.compare(&j.Key)
})

cp.Engines[engineID] = engine
Expand Down
1 change: 0 additions & 1 deletion executor/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,6 @@ go_library(
"@org_golang_google_grpc//credentials",
"@org_golang_google_grpc//credentials/insecure",
"@org_golang_google_grpc//status",
"@org_golang_x_exp//slices",
"@org_golang_x_sync//errgroup",
"@org_uber_go_atomic//:atomic",
"@org_uber_go_zap//:zap",
Expand Down
25 changes: 14 additions & 11 deletions executor/inspection_result.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@
package executor

import (
"cmp"
"context"
"fmt"
"math"
"slices"
"strconv"
"strings"

Expand All @@ -34,7 +36,6 @@ import (
"github.com/pingcap/tidb/util/set"
"github.com/pingcap/tidb/util/size"
"github.com/pingcap/tidb/util/sqlexec"
"golang.org/x/exp/slices"
)

type (
Expand Down Expand Up @@ -169,20 +170,22 @@ func (e *inspectionResultRetriever) retrieve(ctx context.Context, sctx sessionct
continue
}
// make result stable
slices.SortFunc(results, func(i, j inspectionResult) bool {
if i.degree != j.degree {
return i.degree > j.degree
slices.SortFunc(results, func(i, j inspectionResult) int {
if c := cmp.Compare(i.degree, j.degree); c != 0 {
return -c
}
if lhs, rhs := i.item, j.item; lhs != rhs {
return lhs < rhs
// lhs and rhs
if c := cmp.Compare(i.item, j.item); c != 0 {
return c
}
if i.actual != j.actual {
return i.actual < j.actual
if c := cmp.Compare(i.actual, j.actual); c != 0 {
return c
}
if lhs, rhs := i.tp, j.tp; lhs != rhs {
return lhs < rhs
// lhs and rhs
if c := cmp.Compare(i.tp, j.tp); c != 0 {
return c
}
return i.instance < j.instance
return cmp.Compare(i.instance, j.instance)
})
for _, result := range results {
if len(result.instance) == 0 {
Expand Down
8 changes: 4 additions & 4 deletions executor/mem_reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ package executor

import (
"context"
"slices"

"github.com/pingcap/errors"
"github.com/pingcap/tidb/distsql"
Expand All @@ -33,7 +34,6 @@ import (
"github.com/pingcap/tidb/util/codec"
"github.com/pingcap/tidb/util/rowcodec"
"github.com/pingcap/tidb/util/tracing"
"golang.org/x/exp/slices"
)

type memReader interface {
Expand Down Expand Up @@ -143,7 +143,7 @@ func (m *memIndexReader) getMemRows(ctx context.Context) ([][]types.Datum, error
}

if m.keepOrder && m.table.GetPartitionInfo() != nil {
slices.SortFunc(m.addedRows, func(a, b []types.Datum) bool {
slices.SortFunc(m.addedRows, func(a, b []types.Datum) int {
ret, err1 := m.compare(m.ctx.GetSessionVars().StmtCtx, a, b)
if err1 != nil {
err = err1
Expand Down Expand Up @@ -421,7 +421,7 @@ func (m *memTableReader) getMemRows(ctx context.Context) ([][]types.Datum, error
}

if m.keepOrder && m.table.GetPartitionInfo() != nil {
slices.SortFunc(m.addedRows, func(a, b []types.Datum) bool {
slices.SortFunc(m.addedRows, func(a, b []types.Datum) int {
ret, err1 := m.compare(m.ctx.GetSessionVars().StmtCtx, a, b)
if err1 != nil {
err = err1
Expand Down Expand Up @@ -935,7 +935,7 @@ func (m *memIndexMergeReader) getMemRows(ctx context.Context) ([][]types.Datum,
// Didn't set keepOrder = true for memTblReader,
// In indexMerge, non-partitioned tables are also need reordered.
if m.keepOrder {
slices.SortFunc(rows, func(a, b []types.Datum) bool {
slices.SortFunc(rows, func(a, b []types.Datum) int {
ret, err1 := m.compare(m.ctx.GetSessionVars().StmtCtx, a, b)
if err1 != nil {
err = err1
Expand Down
19 changes: 10 additions & 9 deletions executor/union_scan.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,10 +209,11 @@ func (us *UnionScanExec) getOneRow(ctx context.Context) ([]types.Datum, error) {
} else if snapshotRow == nil {
row = addedRow
} else {
isSnapshotRow, err = us.compare(us.Ctx().GetSessionVars().StmtCtx, snapshotRow, addedRow)
isSnapshotRowInt, err := us.compare(us.Ctx().GetSessionVars().StmtCtx, snapshotRow, addedRow)
if err != nil {
return nil, err
}
isSnapshotRow = isSnapshotRowInt < 0
if isSnapshotRow {
row = snapshotRow
} else {
Expand Down Expand Up @@ -292,26 +293,26 @@ type compareExec struct {
handleCols plannercore.HandleCols
}

func (ce compareExec) compare(sctx *stmtctx.StatementContext, a, b []types.Datum) (ret bool, err error) {
func (ce compareExec) compare(sctx *stmtctx.StatementContext, a, b []types.Datum) (ret int, err error) {
var cmp int
for _, colOff := range ce.usedIndex {
aColumn := a[colOff]
bColumn := b[colOff]
cmp, err = aColumn.Compare(sctx, &bColumn, ce.collators[colOff])
if err != nil {
return false, err
return 0, err
}
if cmp == 0 {
continue
}
if cmp > 0 && !ce.desc || cmp < 0 && ce.desc {
return false, nil
if ce.desc {
return -cmp, nil
}
return true, nil
return cmp, nil
}
cmp, err = ce.handleCols.Compare(a, b, ce.collators)
if cmp > 0 && !ce.desc || cmp < 0 && ce.desc {
return false, err
if ce.desc {
return -cmp, err
}
return true, err
return cmp, err
}
6 changes: 3 additions & 3 deletions expression/scalar_function.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ package expression
import (
"bytes"
"fmt"
"sort"
"slices"
"unsafe"

"github.com/pingcap/errors"
Expand Down Expand Up @@ -515,8 +515,8 @@ func simpleCanonicalizedHashCode(sf *ScalarFunction, sc *stmtctx.StatementContex
// encode original function name.
sf.canonicalhashcode = codec.EncodeCompactBytes(sf.canonicalhashcode, hack.Slice(sf.FuncName.L))
// reorder parameters hashcode, eg: a+b and b+a should has the same hashcode here.
sort.Slice(argsHashCode, func(i, j int) bool {
return bytes.Compare(argsHashCode[i], argsHashCode[j]) <= 0
slices.SortFunc(argsHashCode, func(i, j []byte) int {
return bytes.Compare(i, j)
})
for _, argCode := range argsHashCode {
sf.canonicalhashcode = append(sf.canonicalhashcode, argCode...)
Expand Down
1 change: 0 additions & 1 deletion parser/goyacc/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ go_library(
"@com_github_cznic_sortutil//:sortutil",
"@com_github_cznic_strutil//:strutil",
"@com_github_pingcap_errors//:errors",
"@org_golang_x_exp//slices",
"@org_modernc_parser//yacc",
"@org_modernc_y//:y",
],
Expand Down
2 changes: 1 addition & 1 deletion parser/goyacc/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -135,13 +135,13 @@ import (
"log"
"os"
"runtime"
"slices"
"sort"
"strings"

"github.com/cznic/mathutil"
"github.com/cznic/sortutil"
"github.com/cznic/strutil"
"golang.org/x/exp/slices"
parser "modernc.org/parser/yacc"
"modernc.org/y"
)
Expand Down
10 changes: 4 additions & 6 deletions statistics/cmsketch.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ import (
"github.com/pingcap/tidb/util/chunk"
"github.com/pingcap/tidb/util/dbterror"
"github.com/pingcap/tidb/util/hack"
"github.com/pingcap/tidb/util/mathutil"
"github.com/pingcap/tipb/go-tipb"
"github.com/twmb/murmur3"
)
Expand Down Expand Up @@ -100,7 +99,6 @@ func newTopNHelper(sample [][]byte, numTop uint32) *topNHelper {
}
}
sort.SliceStable(sorted, func(i, j int) bool { return sorted[i].cnt > sorted[j].cnt })

failpoint.Inject("StabilizeV1AnalyzeTopN", func(val failpoint.Value) {
if val.(bool) {
// The earlier TopN entry will modify the CMSketch, therefore influence later TopN entry's row count.
Expand Down Expand Up @@ -144,7 +142,7 @@ func NewCMSketchAndTopN(d, w int32, sample [][]byte, numTop uint32, rowCount uin
helper := newTopNHelper(sample, numTop)
// rowCount is not a accurate value when fast analyzing
// In some cases, if user triggers fast analyze when rowCount is close to sampleSize, unexpected bahavior might happen.
rowCount = mathutil.Max(rowCount, uint64(len(sample)))
rowCount = max(rowCount, uint64(len(sample)))
estimateNDV, scaleRatio := calculateEstimateNDV(helper, rowCount)
defaultVal := calculateDefaultVal(helper, estimateNDV, scaleRatio, rowCount)
c, t := buildCMSAndTopN(helper, d, w, scaleRatio, defaultVal)
Expand Down Expand Up @@ -183,7 +181,7 @@ func calculateDefaultVal(helper *topNHelper, estimateNDV, scaleRatio, rowCount u
return 1
}
estimateRemainingCount := rowCount - (helper.sampleSize-helper.onlyOnceItems)*scaleRatio
return estimateRemainingCount / mathutil.Max(1, estimateNDV-sampleNDV+helper.onlyOnceItems)
return estimateRemainingCount / max(1, estimateNDV-sampleNDV+helper.onlyOnceItems)
}

// MemoryUsage returns the total memory usage of a CMSketch.
Expand Down Expand Up @@ -387,7 +385,7 @@ func (c *CMSketch) MergeCMSketch4IncrementalAnalyze(rc *CMSketch, _ uint32) erro
for i := range c.table {
c.count = 0
for j := range c.table[i] {
c.table[i][j] = mathutil.Max(c.table[i][j], rc.table[i][j])
c.table[i][j] = max(c.table[i][j], rc.table[i][j])
c.count += uint64(c.table[i][j])
}
}
Expand Down Expand Up @@ -522,7 +520,7 @@ func (c *CMSketch) GetWidthAndDepth() (width, depth int32) {
// CalcDefaultValForAnalyze calculate the default value for Analyze.
// The value of it is count / NDV in CMSketch. This means count and NDV are not include topN.
func (c *CMSketch) CalcDefaultValForAnalyze(ndv uint64) {
c.defaultValue = c.count / mathutil.Max(1, ndv)
c.defaultValue = c.count / max(1, ndv)
}

// TopN stores most-common values, which is used to estimate point queries.
Expand Down

0 comments on commit c11a999

Please sign in to comment.