Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -598,8 +598,9 @@ public static class Lock implements AutoCloseable {
public Lock(LogicalPlan plan, CascadesContext cascadesContext) {
this.cascadesContext = cascadesContext;
// tables can also be load from dump file
if (cascadesContext.tables == null) {
if (cascadesContext.getTables() == null || cascadesContext.getTables().isEmpty()) {
cascadesContext.extractTables(plan);
cascadesContext.getStatementContext().setTables(cascadesContext.getTables());
}
for (TableIf table : cascadesContext.tables.values()) {
if (!table.needReadLockWhenPlan()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -344,8 +344,8 @@ private void setRuntimeFilterWaitTimeByTableRowCountAndType() {

private void initCascadesContext(LogicalPlan plan, PhysicalProperties requireProperties) {
cascadesContext = CascadesContext.initContext(statementContext, plan, requireProperties);
if (statementContext.getConnectContext().getTables() != null) {
cascadesContext.setTables(statementContext.getConnectContext().getTables());
if (statementContext.getTables() != null) {
cascadesContext.setTables(statementContext.getTables());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.doris.datasource.mvcc.MvccSnapshot;
import org.apache.doris.datasource.mvcc.MvccTable;
import org.apache.doris.datasource.mvcc.MvccTableInfo;
import org.apache.doris.nereids.exceptions.AnalysisException;
import org.apache.doris.nereids.hint.Hint;
import org.apache.doris.nereids.memo.Group;
import org.apache.doris.nereids.rules.analysis.ColumnAliasGenerator;
Expand All @@ -51,6 +52,7 @@
import org.apache.doris.statistics.Statistics;

import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.base.Stopwatch;
import com.google.common.base.Supplier;
import com.google.common.base.Suppliers;
Expand Down Expand Up @@ -146,6 +148,9 @@ public class StatementContext implements Closeable {
// placeholder params for prepared statement
private List<Placeholder> placeholders;

// tables used for plan replayer
private Map<List<String>, TableIf> tables = null;

// for create view support in nereids
// key is the start and end position of the sql substring that needs to be replaced,
// and value is the new string used for replacement.
Expand Down Expand Up @@ -205,6 +210,30 @@ public StatementContext(ConnectContext connectContext, OriginStatement originSta
}
}

public Map<List<String>, TableIf> getTables() {
if (tables == null) {
tables = Maps.newHashMap();
}
return tables;
}

public void setTables(Map<List<String>, TableIf> tables) {
this.tables = tables;
}

/** get table by table name, try to get from information from dumpfile first */
public TableIf getTableInMinidumpCache(List<String> tableQualifier) {
if (!getConnectContext().getSessionVariable().isPlayNereidsDump()) {
return null;
}
Preconditions.checkState(tables != null, "tables should not be null");
TableIf table = tables.getOrDefault(tableQualifier, null);
if (getConnectContext().getSessionVariable().isPlayNereidsDump() && table == null) {
throw new AnalysisException("Minidump cache can not find table:" + tableQualifier);
}
return table;
}

public void setConnectContext(ConnectContext connectContext) {
this.connectContext = connectContext;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,17 @@ public Map<String, Histogram> getTotalHistogramMap() {
/** Nereids minidump entry, argument should be absolute address of minidump path */
public static void main(String[] args) {
assert (args.length == 1);
Minidump minidump = MinidumpUtils.loadMinidumpInputs(args[0]);
Minidump minidump = null;
try {
minidump = MinidumpUtils.loadMinidumpInputs(args[0]);
} catch (Exception e) {
e.printStackTrace();
}

StatementContext statementContext = new StatementContext(ConnectContext.get(),
new OriginStatement(minidump.getSql(), 0));
statementContext.setTables(minidump.getTables());
ConnectContext.get().setStatementContext(statementContext);
JSONObject resultPlan = MinidumpUtils.executeSql(minidump.getSql());
JSONObject minidumpResult = new JSONObject(minidump.getResultPlanJson());

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ public static void setConnectContext(Minidump minidump) {
connectContext.setThreadLocalInfo();
Env.getCurrentEnv().setColocateTableIndex(minidump.getColocateTableIndex());
connectContext.setSessionVariable(minidump.getSessionVariable());
connectContext.setTables(minidump.getTables());
connectContext.setDatabase(minidump.getDbName());
connectContext.getSessionVariable().setEnableMinidump(false);
connectContext.getSessionVariable().setPlanNereidsDump(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,14 +171,18 @@ private LogicalPlan bindWithCurrentDb(CascadesContext cascadesContext, UnboundRe
List<String> tableQualifier = RelationUtil.getQualifierName(cascadesContext.getConnectContext(),
unboundRelation.getNameParts());
TableIf table = null;
if (customTableResolver.isPresent()) {
table = customTableResolver.get().apply(tableQualifier);
table = ConnectContext.get().getStatementContext().getTableInMinidumpCache(tableQualifier);
if (table == null) {
if (customTableResolver.isPresent()) {
table = customTableResolver.get().apply(tableQualifier);
}
}
// In some cases even if we have already called the "cascadesContext.getTableByName",
// it also gets the null. So, we just check it in the catalog again for safety.
if (table == null) {
table = RelationUtil.getTable(tableQualifier, cascadesContext.getConnectContext().getEnv());
}
ConnectContext.get().getStatementContext().getTables().put(tableQualifier, table);

// TODO: should generate different Scan sub class according to table's type
LogicalPlan scan = getLogicalPlan(table, unboundRelation, tableQualifier, cascadesContext);
Expand All @@ -197,12 +201,14 @@ private LogicalPlan bind(CascadesContext cascadesContext, UnboundRelation unboun
if (customTableResolver.isPresent()) {
table = customTableResolver.get().apply(qualifiedTablName);
}
table = ConnectContext.get().getStatementContext().getTableInMinidumpCache(tableQualifier);
// In some cases even if we have already called the "cascadesContext.getTableByName",
// it also gets the null. So, we just check it in the catalog again for safety.
if (table == null) {
table = RelationUtil.getTable(qualifiedTablName, cascadesContext.getConnectContext().getEnv());
}
return getLogicalPlan(table, unboundRelation, qualifiedTablName, cascadesContext);
ConnectContext.get().getStatementContext().getTables().put(tableQualifier, table);
return getLogicalPlan(table, unboundRelation, tableQualifier, cascadesContext);
}

private LogicalPlan makeOlapScan(TableIf table, UnboundRelation unboundRelation, List<String> qualifier) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
import org.apache.doris.catalog.DatabaseIf;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.FunctionRegistry;
import org.apache.doris.catalog.TableIf;
import org.apache.doris.catalog.Type;
import org.apache.doris.cluster.ClusterNamespace;
import org.apache.doris.common.Config;
Expand Down Expand Up @@ -259,8 +258,6 @@ public void setUserInsertTimeout(int insertTimeout) {
// new planner
private Map<String, PreparedStatementContext> preparedStatementContextMap = Maps.newHashMap();

private List<TableIf> tables = null;

private Map<String, ColumnStatistic> totalColumnStatisticMap = new HashMap<>();

public Map<String, ColumnStatistic> getTotalColumnStatisticMap() {
Expand Down Expand Up @@ -438,14 +435,6 @@ public PreparedStatementContext getPreparedStementContext(String stmtName) {
return this.preparedStatementContextMap.get(stmtName);
}

public List<TableIf> getTables() {
return tables;
}

public void setTables(List<TableIf> tables) {
this.tables = tables;
}

public void closeTxn() {
if (isTxnModel()) {
try {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.doris.nereids.util;

import org.apache.doris.catalog.TableIf;
import org.apache.doris.nereids.CascadesContext;
import org.apache.doris.nereids.NereidsPlanner;
import org.apache.doris.nereids.StatementContext;
import org.apache.doris.nereids.datasets.ssb.SSBTestBase;
Expand Down Expand Up @@ -47,12 +46,15 @@ public void testSimple() {
parser.parseSingle(sql),
PhysicalProperties.ANY
);
CascadesContext cascadesContext = planner.getCascadesContext();

List<TableIf> f = cascadesContext.getTables();
Map<List<String>, TableIf> f = statementContext.getTables();
Assertions.assertEquals(1, f.size());
Assertions.assertEquals("supplier", f.stream().map(TableIf::getName).findFirst().get());
}
Set<String> tableNames = new HashSet<>();
for (Map.Entry<List<String>, TableIf> entry : f.entrySet()) {
TableIf table = entry.getValue();
tableNames.add(table.getName());
}
Assertions.assertTrue(tableNames.contains("supplier"));
}

@Test
public void testCTE() {
Expand All @@ -69,8 +71,7 @@ public void testCTE() {
parser.parseSingle(sql),
PhysicalProperties.ANY
);
CascadesContext cascadesContext = planner.getCascadesContext();
List<TableIf> f = cascadesContext.getTables();
Map<List<String>, TableIf> f = statementContext.getTables();
Assertions.assertEquals(1, f.size());
Assertions.assertEquals("supplier", f.stream().map(TableIf::getName).findFirst().get());
}
Expand All @@ -84,9 +85,7 @@ public void testSubQuery() {
parser.parseSingle(sql),
PhysicalProperties.ANY
);
CascadesContext cascadesContext = planner.getCascadesContext();
List<TableIf> f = cascadesContext.getTables();
Assertions.assertEquals(1, f.size());
Map<List<String>, TableIf> f = statementContext.getTables();
Assertions.assertEquals("supplier", f.stream().map(TableIf::getName).findFirst().get());
}

Expand All @@ -99,8 +98,7 @@ public void testScalarSubQuery() {
parser.parseSingle(sql),
PhysicalProperties.ANY
);
CascadesContext cascadesContext = planner.getCascadesContext();
List<TableIf> f = cascadesContext.getTables();
Map<List<String>, TableIf> f = statementContext.getTables();
Assertions.assertEquals(2, f.size());
Set<String> tableNames = f.stream().map(TableIf::getName).collect(Collectors.toSet());
Assertions.assertTrue(tableNames.contains("supplier"));
Expand All @@ -117,11 +115,14 @@ public void testInserInto() {
(LogicalPlan) insertIntoTableCommand.getExplainPlan(connectContext),
PhysicalProperties.ANY
);
CascadesContext cascadesContext = planner.getCascadesContext();
List<TableIf> f = cascadesContext.getTables();
Assertions.assertEquals(2, f.size());
Set<String> tableNames = f.stream().map(TableIf::getName).collect(Collectors.toSet());
Assertions.assertTrue(tableNames.contains("supplier"));
Map<List<String>, TableIf> f = statementContext.getTables();
// when table in insert would not be added to statement context, but be lock when insert
Assertions.assertEquals(1, f.size());
Set<String> tableNames = new HashSet<>();
for (Map.Entry<List<String>, TableIf> entry : f.entrySet()) {
TableIf table = entry.getValue();
tableNames.add(table.getName());
}
Assertions.assertTrue(tableNames.contains("lineorder"));
}
}
Loading