Skip to content

Commit

Permalink
Merge branch 'master' into ttl_session_rollback
Browse files Browse the repository at this point in the history
  • Loading branch information
lcwangchao authored Dec 14, 2022
2 parents 1072a0c + 1e7c552 commit 65e8ca6
Show file tree
Hide file tree
Showing 11 changed files with 273 additions and 44 deletions.
14 changes: 14 additions & 0 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
version: 2
updates:
- package-ecosystem: "gomod"
directory: "/"
schedule:
interval: "weekly"
day: "friday"
time: "18:00"
timezone: "Asia/Shanghai"
allow:
- dependency-name: "golang.org/*"
- dependency-name: "github.com/golangci/golangci-lint"
open-pull-requests-limit: 2

2 changes: 1 addition & 1 deletion executor/showtest/show_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1515,7 +1515,7 @@ func TestShowBuiltin(t *testing.T) {
res := tk.MustQuery("show builtins;")
require.NotNil(t, res)
rows := res.Rows()
const builtinFuncNum = 283
const builtinFuncNum = 284
require.Equal(t, builtinFuncNum, len(rows))
require.Equal(t, rows[0][0].(string), "abs")
require.Equal(t, rows[builtinFuncNum-1][0].(string), "yearweek")
Expand Down
34 changes: 18 additions & 16 deletions executor/slow_query_sql_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,6 @@ func TestSlowQueryWithoutSlowLog(t *testing.T) {
}

func TestSlowQuerySensitiveQuery(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
originCfg := config.GetGlobalConfig()
newCfg := *originCfg

Expand All @@ -57,11 +55,15 @@ func TestSlowQuerySensitiveQuery(t *testing.T) {
newCfg.Log.SlowQueryFile = f.Name()
config.StoreGlobalConfig(&newCfg)
defer func() {
tk.MustExec("set tidb_slow_log_threshold=300;")
config.StoreGlobalConfig(originCfg)
require.NoError(t, os.Remove(newCfg.Log.SlowQueryFile))
}()
require.NoError(t, logutil.InitLogger(newCfg.Log.ToLogConfig()))
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
defer func() {
tk.MustExec("set tidb_slow_log_threshold=300;")
}()

tk.MustExec(fmt.Sprintf("set @@tidb_slow_query_file='%v'", f.Name()))
tk.MustExec("set tidb_slow_log_threshold=0;")
Expand All @@ -80,8 +82,6 @@ func TestSlowQuerySensitiveQuery(t *testing.T) {
}

func TestSlowQueryPrepared(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
originCfg := config.GetGlobalConfig()
newCfg := *originCfg

Expand All @@ -91,12 +91,16 @@ func TestSlowQueryPrepared(t *testing.T) {
newCfg.Log.SlowQueryFile = f.Name()
config.StoreGlobalConfig(&newCfg)
defer func() {
tk.MustExec("set tidb_slow_log_threshold=300;")
tk.MustExec("set tidb_redact_log=0;")
config.StoreGlobalConfig(originCfg)
require.NoError(t, os.Remove(newCfg.Log.SlowQueryFile))
}()
require.NoError(t, logutil.InitLogger(newCfg.Log.ToLogConfig()))
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
defer func() {
tk.MustExec("set tidb_slow_log_threshold=300;")
tk.MustExec("set tidb_redact_log=0;")
}()

tk.MustExec(fmt.Sprintf("set @@tidb_slow_query_file='%v'", f.Name()))
tk.MustExec("set tidb_slow_log_threshold=0;")
Expand All @@ -116,8 +120,6 @@ func TestSlowQueryPrepared(t *testing.T) {
}

func TestLogSlowLogIndex(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
f, err := os.CreateTemp("", "tidb-slow-*.log")
require.NoError(t, err)
require.NoError(t, f.Close())
Expand All @@ -127,6 +129,8 @@ func TestLogSlowLogIndex(t *testing.T) {
conf.Log.SlowQueryFile = f.Name()
})
require.NoError(t, logutil.InitLogger(config.GetGlobalConfig().Log.ToLogConfig()))
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)

tk.MustExec(fmt.Sprintf("set @@tidb_slow_query_file='%v'", f.Name()))
tk.MustExec("use test")
Expand All @@ -140,9 +144,6 @@ func TestLogSlowLogIndex(t *testing.T) {
}

func TestSlowQuery(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)

f, err := os.CreateTemp("", "tidb-slow-*.log")
require.NoError(t, err)
_, err = f.WriteString(`
Expand Down Expand Up @@ -197,6 +198,8 @@ SELECT original_sql, bind_sql, default_db, status, create_time, update_time, cha
require.NoError(t, os.Remove(newCfg.Log.SlowQueryFile))
}()
require.NoError(t, logutil.InitLogger(newCfg.Log.ToLogConfig()))
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)

tk.MustExec(fmt.Sprintf("set @@tidb_slow_query_file='%v'", f.Name()))
tk.MustQuery("select count(*) from `information_schema`.`slow_query` where time > '2020-10-16 20:08:13' and time < '2020-10-16 21:08:13'").Check(testkit.Rows("1"))
Expand All @@ -208,10 +211,6 @@ SELECT original_sql, bind_sql, default_db, status, create_time, update_time, cha
}

func TestIssue37066(t *testing.T) {
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
require.NoError(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil))

originCfg := config.GetGlobalConfig()
newCfg := *originCfg
f, err := os.CreateTemp("", "tidb-slow-*.log")
Expand All @@ -224,6 +223,9 @@ func TestIssue37066(t *testing.T) {
require.NoError(t, os.Remove(newCfg.Log.SlowQueryFile))
}()
require.NoError(t, logutil.InitLogger(newCfg.Log.ToLogConfig()))
store := testkit.CreateMockStore(t)
tk := testkit.NewTestKit(t, store)
require.NoError(t, tk.Session().Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil))
tk.MustExec(fmt.Sprintf("set @@tidb_slow_query_file='%v'", f.Name()))
tk.MustExec("set tidb_slow_log_threshold=0;")
defer func() {
Expand Down
1 change: 1 addition & 0 deletions expression/builtin.go
Original file line number Diff line number Diff line change
Expand Up @@ -874,6 +874,7 @@ var funcs = map[string]functionClass{
ast.JSONObject: &jsonObjectFunctionClass{baseFunctionClass{ast.JSONObject, 0, -1}},
ast.JSONArray: &jsonArrayFunctionClass{baseFunctionClass{ast.JSONArray, 0, -1}},
ast.JSONContains: &jsonContainsFunctionClass{baseFunctionClass{ast.JSONContains, 2, 3}},
ast.JSONOverlaps: &jsonOverlapsFunctionClass{baseFunctionClass{ast.JSONOverlaps, 2, 2}},
ast.JSONContainsPath: &jsonContainsPathFunctionClass{baseFunctionClass{ast.JSONContainsPath, 3, -1}},
ast.JSONValid: &jsonValidFunctionClass{baseFunctionClass{ast.JSONValid, 1, 1}},
ast.JSONArrayAppend: &jsonArrayAppendFunctionClass{baseFunctionClass{ast.JSONArrayAppend, 3, -1}},
Expand Down
58 changes: 58 additions & 0 deletions expression/builtin_json.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ var (
_ functionClass = &jsonObjectFunctionClass{}
_ functionClass = &jsonArrayFunctionClass{}
_ functionClass = &jsonContainsFunctionClass{}
_ functionClass = &jsonOverlapsFunctionClass{}
_ functionClass = &jsonContainsPathFunctionClass{}
_ functionClass = &jsonValidFunctionClass{}
_ functionClass = &jsonArrayAppendFunctionClass{}
Expand Down Expand Up @@ -72,6 +73,7 @@ var (
_ builtinFunc = &builtinJSONRemoveSig{}
_ builtinFunc = &builtinJSONMergeSig{}
_ builtinFunc = &builtinJSONContainsSig{}
_ builtinFunc = &builtinJSONOverlapsSig{}
_ builtinFunc = &builtinJSONStorageSizeSig{}
_ builtinFunc = &builtinJSONDepthSig{}
_ builtinFunc = &builtinJSONSearchSig{}
Expand Down Expand Up @@ -820,6 +822,62 @@ func (b *builtinJSONContainsSig) evalInt(row chunk.Row) (res int64, isNull bool,
return 0, false, nil
}

type jsonOverlapsFunctionClass struct {
baseFunctionClass
}

type builtinJSONOverlapsSig struct {
baseBuiltinFunc
}

func (b *builtinJSONOverlapsSig) Clone() builtinFunc {
newSig := &builtinJSONOverlapsSig{}
newSig.cloneFrom(&b.baseBuiltinFunc)
return newSig
}

func (c *jsonOverlapsFunctionClass) verifyArgs(args []Expression) error {
if err := c.baseFunctionClass.verifyArgs(args); err != nil {
return err
}
if evalType := args[0].GetType().EvalType(); evalType != types.ETJson && evalType != types.ETString {
return types.ErrInvalidJSONData.GenWithStackByArgs(1, "json_overlaps")
}
if evalType := args[1].GetType().EvalType(); evalType != types.ETJson && evalType != types.ETString {
return types.ErrInvalidJSONData.GenWithStackByArgs(2, "json_overlaps")
}
return nil
}

func (c *jsonOverlapsFunctionClass) getFunction(ctx sessionctx.Context, args []Expression) (builtinFunc, error) {
if err := c.verifyArgs(args); err != nil {
return nil, err
}

argTps := []types.EvalType{types.ETJson, types.ETJson}
bf, err := newBaseBuiltinFuncWithTp(ctx, c.funcName, args, types.ETInt, argTps...)
if err != nil {
return nil, err
}
sig := &builtinJSONOverlapsSig{bf}
return sig, nil
}

func (b *builtinJSONOverlapsSig) evalInt(row chunk.Row) (res int64, isNull bool, err error) {
obj, isNull, err := b.args[0].EvalJSON(b.ctx, row)
if isNull || err != nil {
return res, isNull, err
}
target, isNull, err := b.args[1].EvalJSON(b.ctx, row)
if isNull || err != nil {
return res, isNull, err
}
if types.OverlapsBinaryJSON(obj, target) {
return 1, false, nil
}
return 0, false, nil
}

type jsonValidFunctionClass struct {
baseFunctionClass
}
Expand Down
65 changes: 65 additions & 0 deletions expression/builtin_json_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -466,6 +466,71 @@ func TestJSONContains(t *testing.T) {
}
}

func TestJSONOverlaps(t *testing.T) {
ctx := createContext(t)
fc := funcs[ast.JSONOverlaps]
tbl := []struct {
input []any
expected any
err error
}{
{[]any{`[1,2,[1,3]]`, `a:1`}, 1, types.ErrInvalidJSONText},
{[]any{`a:1`, `1`}, 1, types.ErrInvalidJSONText},
{[]any{nil, `1`}, nil, nil},
{[]any{`1`, nil}, nil, nil},

{[]any{`[1, 2]`, `[2,3]`}, 1, nil},
{[]any{`[1, 2]`, `[2]`}, 1, nil},
{[]any{`[1, 2]`, `2`}, 1, nil},
{[]any{`[{"a":1}]`, `{"a":1}`}, 1, nil},
{[]any{`[{"a":1}]`, `{"a":1,"b":2}`}, 0, nil},
{[]any{`[{"a":1}]`, `{"a":2}`}, 0, nil},
{[]any{`{"a":[1,2]}`, `{"a":[1]}`}, 0, nil},
{[]any{`{"a":[1,2]}`, `{"a":[2,1]}`}, 0, nil},
{[]any{`[1,1,1]`, `1`}, 1, nil},
{[]any{`1`, `1`}, 1, nil},
{[]any{`0`, `1`}, 0, nil},
{[]any{`[[1,2], 3]`, `[1,[2,3]]`}, 0, nil},
{[]any{`[[1,2], 3]`, `[1,3]`}, 1, nil},
{[]any{`{"a":1,"b":10,"d":10}`, `{"a":5,"e":10,"f":1,"d":20}`}, 0, nil},
{[]any{`[4,5,"6",7]`, `6`}, 0, nil},
{[]any{`[4,5,6,7]`, `"6"`}, 0, nil},

{[]any{`[2,3]`, `[1, 2]`}, 1, nil},
{[]any{`[2]`, `[1, 2]`}, 1, nil},
{[]any{`2`, `[1, 2]`}, 1, nil},
{[]any{`{"a":1}`, `[{"a":1}]`}, 1, nil},
{[]any{`{"a":1,"b":2}`, `[{"a":1}]`}, 0, nil},
{[]any{`{"a":2}`, `[{"a":1}]`}, 0, nil},
{[]any{`{"a":[1]}`, `{"a":[1,2]}`}, 0, nil},
{[]any{`{"a":[2,1]}`, `{"a":[1,2]}`}, 0, nil},
{[]any{`1`, `[1,1,1]`}, 1, nil},
{[]any{`1`, `1`}, 1, nil},
{[]any{`1`, `0`}, 0, nil},
{[]any{`[1,[2,3]]`, `[[1,2], 3]`}, 0, nil},
{[]any{`[1,3]`, `[[1,2], 3]`}, 1, nil},
{[]any{`{"a":5,"e":10,"f":1,"d":20}`, `{"a":1,"b":10,"d":10}`}, 0, nil},
{[]any{`6`, `[4,5,"6",7]`}, 0, nil},
{[]any{`"6"`, `[4,5,6,7]`}, 0, nil},
}
for _, tt := range tbl {
args := types.MakeDatums(tt.input...)
f, err := fc.getFunction(ctx, datumsToConstants(args))
require.NoError(t, err, tt.input)
d, err := evalBuiltinFunc(f, chunk.Row{})
if tt.err == nil {
require.NoError(t, err, tt.input)
if tt.expected == nil {
require.True(t, d.IsNull(), tt.input)
} else {
require.Equal(t, int64(tt.expected.(int)), d.GetInt64(), tt.input)
}
} else {
require.True(t, tt.err.(*terror.Error).Equal(err), tt.input)
}
}
}

func TestJSONContainsPath(t *testing.T) {
ctx := createContext(t)
fc := funcs[ast.JSONContainsPath]
Expand Down
45 changes: 45 additions & 0 deletions expression/builtin_json_vec.go
Original file line number Diff line number Diff line change
Expand Up @@ -359,6 +359,51 @@ func (b *builtinJSONContainsSig) vecEvalInt(input *chunk.Chunk, result *chunk.Co
return nil
}

func (b *builtinJSONOverlapsSig) vectorized() bool {
return true
}

func (b *builtinJSONOverlapsSig) vecEvalInt(input *chunk.Chunk, result *chunk.Column) error {
nr := input.NumRows()

objCol, err := b.bufAllocator.get()
if err != nil {
return err
}
defer b.bufAllocator.put(objCol)

if err := b.args[0].VecEvalJSON(b.ctx, input, objCol); err != nil {
return err
}

targetCol, err := b.bufAllocator.get()
if err != nil {
return err
}
defer b.bufAllocator.put(targetCol)

if err := b.args[1].VecEvalJSON(b.ctx, input, targetCol); err != nil {
return err
}

result.ResizeInt64(nr, false)
resI64s := result.Int64s()

result.MergeNulls(objCol, targetCol)
for i := 0; i < nr; i++ {
if result.IsNull(i) {
continue
}
if types.OverlapsBinaryJSON(objCol.GetJSON(i), targetCol.GetJSON(i)) {
resI64s[i] = 1
} else {
resI64s[i] = 0
}
}

return nil
}

func (b *builtinJSONQuoteSig) vectorized() bool {
return true
}
Expand Down
7 changes: 7 additions & 0 deletions expression/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2721,6 +2721,13 @@ func TestFuncJSON(t *testing.T) {
// #16267
tk.MustQuery(`select json_array(922337203685477580) = json_array(922337203685477581);`).Check(testkit.Rows("0"))

tk.MustQuery("select json_overlaps('[[1,2], 3]', '[1, 3]');").Check(testkit.Rows("1"))
tk.MustQuery("select json_overlaps('[{\"a\":1}]', '{\"a\":1}');").Check(testkit.Rows("1"))
tk.MustQuery("select json_overlaps('{\"a\":1}', '[{\"a\":1}]');").Check(testkit.Rows("1"))
tk.MustQuery("select json_overlaps('[1,[2,3]]', '[[1,2], 3]');").Check(testkit.Rows("0"))
tk.MustQuery("select json_overlaps('{\"a\":[1,2]}', '{\"a\":[2,1]}');").Check(testkit.Rows("0"))
tk.MustQuery("select json_overlaps('{\"a\":[1,2]}', '{\"a\":[2,1]}');").Check(testkit.Rows("0"))

// #10461
tk.MustExec("drop table if exists tx1")
tk.MustExec("create table tx1(id int key, a double, b double, c double, d double)")
Expand Down
1 change: 1 addition & 0 deletions parser/ast/functions.go
Original file line number Diff line number Diff line change
Expand Up @@ -331,6 +331,7 @@ const (
JSONInsert = "json_insert"
JSONReplace = "json_replace"
JSONRemove = "json_remove"
JSONOverlaps = "json_overlaps"
JSONContains = "json_contains"
JSONMemberOf = "json_memberof"
JSONContainsPath = "json_contains_path"
Expand Down
Loading

0 comments on commit 65e8ca6

Please sign in to comment.