diff --git a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala index 71ab0ddf2d6f..5db732b19b9e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -1107,7 +1107,7 @@ class Dataset[T] private[sql]( */ @scala.annotation.varargs def sort(sortCol: String, sortCols: String*): Dataset[T] = { - sort((sortCol +: sortCols).map(apply) : _*) + sort((sortCol +: sortCols).map(Column(_)) : _*) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala index 73098cdb9247..40235e32d35d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala @@ -1304,6 +1304,19 @@ class DatasetSuite extends QueryTest with SharedSQLContext { assert(rlike3.count() == 0) } } + + test("SPARK-21538: Attribute resolution inconsistency in Dataset API") { + val df = spark.range(3).withColumnRenamed("id", "x") + val expected = Row(0) :: Row(1) :: Row (2) :: Nil + checkAnswer(df.sort("id"), expected) + checkAnswer(df.sort(col("id")), expected) + checkAnswer(df.sort($"id"), expected) + checkAnswer(df.sort('id), expected) + checkAnswer(df.orderBy("id"), expected) + checkAnswer(df.orderBy(col("id")), expected) + checkAnswer(df.orderBy($"id"), expected) + checkAnswer(df.orderBy('id), expected) + } } case class WithImmutableMap(id: String, map_test: scala.collection.immutable.Map[Long, String])