diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala index 2144472937f9..e6f7b1d723af 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Column.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Column.scala @@ -1042,7 +1042,7 @@ class Column(val expr: Expression) extends Logging { * @since 2.0.0 */ def name(alias: String): Column = withExpr { - Alias(expr, alias)() + Alias(normalizedExpr(), alias)() } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSelfJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSelfJoinSuite.scala index fb58c9851224..3b3b54f75da5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSelfJoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSelfJoinSuite.scala @@ -204,7 +204,7 @@ class DataFrameSelfJoinSuite extends QueryTest with SharedSparkSession { } } - test("SPARK-28344: don't fail as ambiguous self join when there is no join") { + test("SPARK-28344: don't fail if there is no ambiguous self join") { withSQLConf( SQLConf.FAIL_AMBIGUOUS_SELF_JOIN_ENABLED.key -> "true") { val df = Seq(1, 1, 2, 2).toDF("a") @@ -212,6 +212,11 @@ class DataFrameSelfJoinSuite extends QueryTest with SharedSparkSession { checkAnswer( df.select(df("a").alias("x"), sum(df("a")).over(w)), Seq((1, 2), (1, 2), (2, 4), (2, 4)).map(Row.fromTuple)) + + val joined = df.join(spark.range(1)).select($"a") + checkAnswer( + joined.select(joined("a").alias("x"), sum(joined("a")).over(w)), + Seq((1, 2), (1, 2), (2, 4), (2, 4)).map(Row.fromTuple)) } } }