Skip to content

Commit

Permalink
[fix](broker-load) can't load parquet file with column name case sens…
Browse files Browse the repository at this point in the history
…itive with Doris column (apache#9358)
  • Loading branch information
deardeng authored and minghong.zhou committed May 23, 2022
1 parent fd1fe51 commit bff3a92
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 6 deletions.
8 changes: 6 additions & 2 deletions fe/fe-core/src/main/java/org/apache/doris/load/Load.java
Original file line number Diff line number Diff line change
Expand Up @@ -1049,8 +1049,12 @@ private static void initColumns(Table tbl, List<ImportColumnDesc> columnExprs,
for (ImportColumnDesc importColumnDesc : copiedColumnExprs) {
// make column name case match with real column name
String columnName = importColumnDesc.getColumnName();
String realColName = tbl.getColumn(columnName) == null ? columnName
: tbl.getColumn(columnName).getName();
String realColName;
if (tbl.getColumn(columnName) == null || importColumnDesc.getExpr() == null) {
realColName = columnName;
} else {
realColName = tbl.getColumn(columnName).getName();
}
if (importColumnDesc.getExpr() != null) {
Expr expr = transformHadoopFunctionExpr(tbl, realColName, importColumnDesc.getExpr());
exprsByName.put(realColName, expr);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -247,8 +247,8 @@ private void initParams(ParamCreateContext context)
*/
private void initColumns(ParamCreateContext context) throws UserException {
context.srcTupleDescriptor = analyzer.getDescTbl().createTupleDescriptor();
context.slotDescByName = Maps.newHashMap();
context.exprMap = Maps.newHashMap();
context.slotDescByName = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
context.exprMap = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);

// for load job, column exprs is got from file group
// for query, there is no column exprs, they will be got from table's schema in "Load.initColumns"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,11 @@ public class StreamLoadScanNode extends LoadScanNode {
private TupleDescriptor srcTupleDesc;
private TBrokerScanRange brokerScanRange;

private Map<String, SlotDescriptor> slotDescByName = Maps.newHashMap();
private Map<String, Expr> exprsByName = Maps.newHashMap();
// If use case sensitive map, for example,
// the column name 「A」 in the table and the mapping '(a) set (A = a)' in load sql,
// Slotdescbyname stores「a」, later will use 「a」to get table's 「A」 column info, will throw exception.
private final Map<String, SlotDescriptor> slotDescByName = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
private final Map<String, Expr> exprsByName = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);

// used to construct for streaming loading
public StreamLoadScanNode(
Expand Down

0 comments on commit bff3a92

Please sign in to comment.