diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala index e8e2a7bbabcd4..74d62872ea72a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala @@ -201,9 +201,10 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product { case s: StructType => s // Don't convert struct types to some other type of Seq[StructField] // Handle Seq[TreeNode] in TreeNode parameters. case s: Seq[_] => s.map { - case arg: TreeNode[_] if containsChild(arg) => + case arg: TreeNode[_] if containsChild(arg) && remainingOldChildren.size > 0 => val newChild = remainingNewChildren.remove(0) val oldChild = remainingOldChildren.remove(0) + assert(arg == oldChild) if (newChild fastEquals oldChild) { oldChild } else { @@ -214,9 +215,10 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product { case null => null } case m: Map[_, _] => m.mapValues { - case arg: TreeNode[_] if containsChild(arg) => + case arg: TreeNode[_] if containsChild(arg) && remainingOldChildren.size > 0 => val newChild = remainingNewChildren.remove(0) val oldChild = remainingOldChildren.remove(0) + assert(arg == oldChild) if (newChild fastEquals oldChild) { oldChild } else { @@ -226,9 +228,10 @@ abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product { case nonChild: AnyRef => nonChild case null => null }.view.force // `mapValues` is lazy and we need to force it to materialize - case arg: TreeNode[_] if containsChild(arg) => + case arg: TreeNode[_] if containsChild(arg) && remainingOldChildren.size > 0 => val newChild = remainingNewChildren.remove(0) val oldChild = remainingOldChildren.remove(0) + assert(arg == oldChild) if (newChild fastEquals oldChild) { oldChild } else {