diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/hints.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/hints.scala index edfe5d5dd032..5dc3eb707f6c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/hints.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/hints.scala @@ -62,6 +62,8 @@ case class ResolvedHint(child: LogicalPlan, hints: HintInfo = HintInfo()) */ case class JoinHint(leftHint: Option[HintInfo], rightHint: Option[HintInfo]) { + def isEmpty: Boolean = leftHint.isEmpty && rightHint.isEmpty + override def toString: String = { Seq( leftHint.map("leftHint=" + _), diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/Columnar.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/Columnar.scala index d1e916842a21..147285c31fb4 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/Columnar.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/Columnar.scala @@ -548,11 +548,9 @@ case class ApplyColumnarRulesAndInsertTransitions( def apply(plan: SparkPlan): SparkPlan = { var preInsertPlan: SparkPlan = plan - columnarRules.foreach((r : ColumnarRule) => - preInsertPlan = r.preColumnarTransitions(preInsertPlan)) + columnarRules.foreach(r => preInsertPlan = r.preColumnarTransitions(preInsertPlan)) var postInsertPlan = insertTransitions(preInsertPlan, outputsColumnar) - columnarRules.reverse.foreach((r : ColumnarRule) => - postInsertPlan = r.postColumnarTransitions(postInsertPlan)) + columnarRules.reverse.foreach(r => postInsertPlan = r.postColumnarTransitions(postInsertPlan)) postInsertPlan } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala index 90c2507a1e11..9c2195d42786 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala @@ -266,11 +266,15 @@ abstract class SparkStrategies extends QueryPlanner[SparkPlan] { } } - createBroadcastHashJoin(true) - .orElse { if (hintToSortMergeJoin(hint)) createSortMergeJoin() else None } - .orElse(createShuffleHashJoin(true)) - .orElse { if (hintToShuffleReplicateNL(hint)) createCartesianProduct() else None } - .getOrElse(createJoinWithoutHint()) + if (hint.isEmpty) { + createJoinWithoutHint() + } else { + createBroadcastHashJoin(true) + .orElse { if (hintToSortMergeJoin(hint)) createSortMergeJoin() else None } + .orElse(createShuffleHashJoin(true)) + .orElse { if (hintToShuffleReplicateNL(hint)) createCartesianProduct() else None } + .getOrElse(createJoinWithoutHint()) + } case j @ ExtractSingleColumnNullAwareAntiJoin(leftKeys, rightKeys) => Seq(joins.BroadcastHashJoinExec(leftKeys, rightKeys, LeftAnti, BuildRight, @@ -339,10 +343,13 @@ abstract class SparkStrategies extends QueryPlanner[SparkPlan] { } } - createBroadcastNLJoin(hintToBroadcastLeft(hint), hintToBroadcastRight(hint)) - .orElse { if (hintToShuffleReplicateNL(hint)) createCartesianProduct() else None } - .getOrElse(createJoinWithoutHint()) - + if (hint.isEmpty) { + createJoinWithoutHint() + } else { + createBroadcastNLJoin(hintToBroadcastLeft(hint), hintToBroadcastRight(hint)) + .orElse { if (hintToShuffleReplicateNL(hint)) createCartesianProduct() else None } + .getOrElse(createJoinWithoutHint()) + } // --- Cases where this strategy does not apply --------------------------------------------- case _ => Nil