Skip to content

Commit

Permalink
planner: Fix Join reorder occurs "index out of range" error (#24102) (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
ti-srebot authored Dec 20, 2021
1 parent 918feed commit 54c5966
Show file tree
Hide file tree
Showing 4 changed files with 61 additions and 4 deletions.
23 changes: 23 additions & 0 deletions planner/core/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3873,3 +3873,26 @@ func (s *testIntegrationSuite) TestIssue27797(c *C) {
result = tk.MustQuery("select col2 from IDT_HP24172 where col1 = 8388607 and col1 in (select col1 from IDT_HP24172);")
result.Check(testkit.Rows("<nil>"))
}

// https://github.com/pingcap/tidb/issues/24095
func (s *testIntegrationSuite) TestIssue24095(c *C) {
tk := testkit.NewTestKit(c, s.store)
tk.MustExec("use test;")
tk.MustExec("drop table if exists t;")
tk.MustExec("create table t (id int, value decimal(10,5));")
tk.MustExec("desc format = 'brief' select count(*) from t join (select t.id, t.value v1 from t join t t1 on t.id = t1.id order by t.value limit 1) v on v.id = t.id and v.v1 = t.value;")

var input []string
var output []struct {
SQL string
Plan []string
}
s.testData.GetTestCases(c, &input, &output)
for i, tt := range input {
s.testData.OnRecord(func() {
output[i].SQL = tt
output[i].Plan = s.testData.ConvertRowsToStrings(tk.MustQuery("explain format = 'brief' " + tt).Rows())
})
tk.MustQuery("explain format = 'brief' " + tt).Check(testkit.Rows(output[i].Plan...))
}
}
12 changes: 8 additions & 4 deletions planner/core/rule_join_reorder.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,14 @@ func (s *joinReOrderSolver) optimizeRecursive(ctx sessionctx.Context, p LogicalP
return nil, err
}
schemaChanged := false
for i, col := range p.Schema().Columns {
if !col.Equal(nil, originalSchema.Columns[i]) {
schemaChanged = true
break
if len(p.Schema().Columns) != len(originalSchema.Columns) {
schemaChanged = true
} else {
for i, col := range p.Schema().Columns {
if !col.Equal(nil, originalSchema.Columns[i]) {
schemaChanged = true
break
}
}
}
if schemaChanged {
Expand Down
6 changes: 6 additions & 0 deletions planner/core/testdata/integration_suite_in.json
Original file line number Diff line number Diff line change
Expand Up @@ -300,5 +300,11 @@
"select a from ta group by @n:=@n+1",
"select a from ta group by @n:=@n+a"
]
},
{
"name": "TestIssue24095",
"cases": [
"select count(*) from t join (select t.id, t.value v1 from t join t t1 on t.id = t1.id order by t.value limit 1) v on v.id = t.id and v.v1 = t.value;"
]
}
]
24 changes: 24 additions & 0 deletions planner/core/testdata/integration_suite_out.json
Original file line number Diff line number Diff line change
Expand Up @@ -1614,5 +1614,29 @@
]
}
]
},
{
"Name": "TestIssue24095",
"Cases": [
{
"SQL": "select count(*) from t join (select t.id, t.value v1 from t join t t1 on t.id = t1.id order by t.value limit 1) v on v.id = t.id and v.v1 = t.value;",
"Plan": [
"StreamAgg 1.00 root funcs:count(1)->Column#10",
"└─HashJoin 1.00 root inner join, equal:[eq(test.t.id, test.t.id) eq(test.t.value, test.t.value)]",
" ├─Selection(Build) 0.80 root not(isnull(test.t.id)), not(isnull(test.t.value))",
" │ └─TopN 1.00 root test.t.value, offset:0, count:1",
" │ └─HashJoin 12487.50 root inner join, equal:[eq(test.t.id, test.t.id)]",
" │ ├─TableReader(Build) 9990.00 root data:Selection",
" │ │ └─Selection 9990.00 cop[tikv] not(isnull(test.t.id))",
" │ │ └─TableFullScan 10000.00 cop[tikv] table:t1 keep order:false, stats:pseudo",
" │ └─TableReader(Probe) 9990.00 root data:Selection",
" │ └─Selection 9990.00 cop[tikv] not(isnull(test.t.id))",
" │ └─TableFullScan 10000.00 cop[tikv] table:t keep order:false, stats:pseudo",
" └─TableReader(Probe) 9980.01 root data:Selection",
" └─Selection 9980.01 cop[tikv] not(isnull(test.t.id)), not(isnull(test.t.value))",
" └─TableFullScan 10000.00 cop[tikv] table:t keep order:false, stats:pseudo"
]
}
]
}
]

0 comments on commit 54c5966

Please sign in to comment.