Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,10 @@
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelShuttle;
import org.apache.calcite.rel.RelWriter;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexUtil;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.sql.validate.SqlValidatorUtil;
import org.apache.calcite.util.Util;
import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException;
Expand All @@ -46,8 +44,6 @@ public class HiveProject extends Project implements HiveRelNode {
// Information about correlations within a subquery.
private final CorrelationInfoSupplier correlationInfos;

private boolean isSysnthetic;

/**
* Creates a HiveProject.
* @param cluster
Expand Down Expand Up @@ -110,22 +106,7 @@ public static HiveProject create(RelOptCluster cluster, RelNode child, List<? ex
@Override
public Project copy(RelTraitSet traitSet, RelNode input, List<RexNode> exps, RelDataType rowType) {
assert traitSet.containsIfApplicable(HiveRelNode.CONVENTION);
HiveProject hp = new HiveProject(getCluster(), traitSet, input, exps, rowType);
if (this.isSynthetic()) {
hp.setSynthetic();
}

return hp;
}

// TODO: this should come through RelBuilder to the constructor as opposed to
// set method. This requires calcite change
public void setSynthetic() {
this.isSysnthetic = true;
}

public boolean isSynthetic() {
return isSysnthetic;
return new HiveProject(getCluster(), traitSet, input, exps, rowType);
}

//required for HiveRelDecorrelator
Expand All @@ -136,12 +117,6 @@ public boolean isSynthetic() {
return shuttle.visit(this);
}

@Override
public RelWriter explainTerms(RelWriter pw) {
return super.explainTerms(pw)
.itemIf("synthetic", this.isSysnthetic, pw.getDetailLevel() == SqlExplainLevel.DIGEST_ATTRIBUTES);
}

public List<HiveCorrelationInfo> getCorrelationInfos() {
return correlationInfos.get();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -261,13 +261,8 @@ public RelNode project(ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFi
fieldNames));

// 5. Add Proj on top of TS
HiveProject hp = (HiveProject) relBuilder.push(newHT)
return relBuilder.push(newHT)
.project(exprList, new ArrayList<String>(fieldNames)).build();

// 6. Set synthetic flag, so that we would push filter below this one
hp.setSynthetic();

return hp;
}

public List<Integer> getNeededColIndxsFrmReloptHT() {
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,9 @@
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;

import org.apache.calcite.adapter.druid.DruidQuery;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptPredicateList;
import org.apache.calcite.plan.RelOptRuleCall;
Expand All @@ -35,6 +37,7 @@
import org.apache.calcite.rel.core.Filter;
import org.apache.calcite.rel.core.Join;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.rules.FilterProjectTransposeRule;
import org.apache.calcite.rel.rules.ProjectMergeRule;
import org.apache.calcite.rel.type.RelDataTypeFactory;
Expand All @@ -59,14 +62,22 @@
import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil;
import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelFactories;
import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelOptUtil;
import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelOptUtil.RewritablePKFKJoinInfo;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan;

public class HiveFilterProjectTransposeRule extends FilterProjectTransposeRule {

private static RelOptRuleOperand joinOperand() {
Predicate<Join> isRewritable = join -> {
RelMetadataQuery mq = join.getCluster().getMetadataQuery();
return HiveRelOptUtil.isRewritablePKFKJoin(join, join.getLeft(), join.getRight(), mq).rewritable;
};
return operandJ(Join.class, null, isRewritable, any());
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If it's not too much of an effort, it could be good idea to not use deprecated APIs like operandJ. Although, this is not the only place where it is used, so this is not a strict requirement. I will leave it up to you.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tried getting rid of the deprecations but it requires a significant refactoring of the rule so for keeping the scope of the JIRA more focused I decided not to do it as part of this PR.

}

public static final HiveFilterProjectTransposeRule DETERMINISTIC_WINDOWING_ON_NON_FILTERING_JOIN =
new HiveFilterProjectTransposeRule(
operand(Filter.class, operand(Project.class, operand(Join.class, any()))),
operand(Filter.class, operand(Project.class, joinOperand())),
HiveRelFactories.HIVE_BUILDER, true, true, ProjectMergeRule.DEFAULT_BLOAT);

public static final HiveFilterProjectTransposeRule DETERMINISTIC_WINDOWING =
Expand All @@ -75,7 +86,7 @@ public class HiveFilterProjectTransposeRule extends FilterProjectTransposeRule {

public static final HiveFilterProjectTransposeRule DETERMINISTIC_ON_NON_FILTERING_JOIN =
new HiveFilterProjectTransposeRule(
operand(Filter.class, operand(Project.class, operand(Join.class, any()))),
operand(Filter.class, operand(Project.class, joinOperand())),
HiveRelFactories.HIVE_BUILDER, true, false, ProjectMergeRule.DEFAULT_BLOAT);

public static final HiveFilterProjectTransposeRule DETERMINISTIC =
Expand All @@ -86,6 +97,20 @@ public class HiveFilterProjectTransposeRule extends FilterProjectTransposeRule {
new HiveFilterProjectTransposeRule(Filter.class, HiveProject.class,
HiveRelFactories.HIVE_BUILDER, false, false, ProjectMergeRule.DEFAULT_BLOAT);

public static final HiveFilterProjectTransposeRule SCAN = new HiveFilterProjectTransposeRule(
operand(Filter.class, operand(HiveProject.class, operand(HiveTableScan.class, none()))),
HiveRelFactories.HIVE_BUILDER,
false,
false,
ProjectMergeRule.DEFAULT_BLOAT);

public static final HiveFilterProjectTransposeRule DRUID = new HiveFilterProjectTransposeRule(
operand(Filter.class, operand(HiveProject.class, operand(DruidQuery.class, none()))),
HiveRelFactories.HIVE_BUILDER,
false,
false,
ProjectMergeRule.DEFAULT_BLOAT);

private final boolean onlyDeterministic;

private final boolean pushThroughWindowing;
Expand Down Expand Up @@ -127,16 +152,6 @@ public boolean matches(RelOptRuleCall call) {
if (this.onlyDeterministic && !HiveCalciteUtil.isDeterministic(condition)) {
return false;
}

if (call.rels.length > 2) {
final Join joinRel = call.rel(2);
RewritablePKFKJoinInfo joinInfo = HiveRelOptUtil.isRewritablePKFKJoin(
joinRel, joinRel.getLeft(), joinRel.getRight(), call.getMetadataQuery());
if (!joinInfo.rewritable) {
return false;
}
}

return super.matches(call);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@
import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveMultiJoin;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveSortExchange;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableFunctionScan;
import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan;
Expand Down Expand Up @@ -390,10 +389,7 @@ private static RelNode project(DruidQuery dq, ImmutableBitSet fieldsUsed,
true));
nameList.add(extraField.getName());
}

HiveProject hp = (HiveProject) relBuilder.push(dq).project(exprList, nameList).build();
hp.setSynthetic();
return hp;
return relBuilder.push(dq).project(exprList, nameList).build();
}

private boolean isRexLiteral(final RexNode rexNode) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,6 @@
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFieldTrimmerRule;
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterAggregateTransposeRule;
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterJoinRule;
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterProjectTSTransposeRule;
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterProjectTransposeRule;
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterSetOpTransposeRule;
import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterSortPredicates;
Expand Down Expand Up @@ -1933,7 +1932,7 @@ protected RelNode applyPreJoinOrderingTransforms(RelNode basePlan, RelMetadataPr
// matches FIL-PROJ-TS
// Also merge, remove and reduce Project if possible
generatePartialProgram(program, true, HepMatchOrder.TOP_DOWN,
HiveFilterProjectTSTransposeRule.INSTANCE, HiveFilterProjectTSTransposeRule.INSTANCE_DRUID,
HiveFilterProjectTransposeRule.SCAN, HiveFilterProjectTransposeRule.DRUID,
HiveProjectFilterPullUpConstantsRule.INSTANCE, HiveProjectMergeRule.INSTANCE,
ProjectRemoveRule.Config.DEFAULT.toRule(), HiveSortMergeRule.INSTANCE);

Expand Down Expand Up @@ -2100,7 +2099,7 @@ private RelNode rewriteUsingViews(RelOptPlanner planner, RelNode basePlan,
// Unboxing rule
planner.addRule(HiveMaterializedViewBoxing.INSTANCE_UNBOXING);
// Partition pruner rule
planner.addRule(HiveFilterProjectTSTransposeRule.INSTANCE);
planner.addRule(HiveFilterProjectTransposeRule.SCAN);
planner.addRule(new HivePartitionPruneRule(conf));

// Optimize plan
Expand Down Expand Up @@ -2411,7 +2410,7 @@ private RelNode applyPostJoinOrderingTransform(RelNode basePlan, RelMetadataProv
generatePartialProgram(program, false, HepMatchOrder.DEPTH_FIRST,
ProjectRemoveRule.Config.DEFAULT.toRule(), new ProjectMergeRule(false, HiveRelFactories.HIVE_BUILDER));
generatePartialProgram(program, true, HepMatchOrder.TOP_DOWN,
HiveFilterProjectTSTransposeRule.INSTANCE, HiveFilterProjectTSTransposeRule.INSTANCE_DRUID,
HiveFilterProjectTransposeRule.SCAN, HiveFilterProjectTransposeRule.DRUID,
HiveProjectFilterPullUpConstantsRule.INSTANCE);

// 9.2. Introduce exchange operators below join/multijoin operators
Expand Down