diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 8cf46cb7aade1..8910ca86de477 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1578,6 +1578,11 @@ ], "sqlState" : "0A000" }, + "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY" : { + "message" : [ + "Unsupported data source type for direct query on files: " + ] + }, "UNSUPPORTED_DATATYPE" : { "message" : [ "Unsupported data type ." @@ -5107,11 +5112,6 @@ "failed to evaluate expression : " ] }, - "_LEGACY_ERROR_TEMP_2332" : { - "message" : [ - "" - ] - }, "_LEGACY_ERROR_TEMP_2400" : { "message" : [ "The expression must evaluate to a constant value, but got ." diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala index dc4fed49c1cfd..9255aa2effc5d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/rules.scala @@ -78,9 +78,10 @@ class ResolveSQLOnFile(sparkSession: SparkSession) extends Rule[LogicalPlan] { case e: Exception => // the provider is valid, but failed to create a logical plan u.failAnalysis( - errorClass = "_LEGACY_ERROR_TEMP_2332", - messageParameters = Map("msg" -> e.getMessage), - cause = e) + errorClass = "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY", + messageParameters = Map("dataSourceType" -> u.multipartIdentifier.head), + cause = e + ) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index 20fa5fee3aa12..377be2e720aff 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -1634,15 +1634,23 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark checkErrorTableNotFound(e, "`no_db`.`no_table`", ExpectedContext("no_db.no_table", 14, 13 + "no_db.no_table".length)) - e = intercept[AnalysisException] { - sql("select * from json.invalid_file") - } - assert(e.message.contains("Path does not exist")) + checkError( + exception = intercept[AnalysisException] { + sql("select * from json.invalid_file") + }, + errorClass = "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY", + parameters = Map("dataSourceType" -> "json"), + context = ExpectedContext("json.invalid_file", 14, 30) + ) - e = intercept[AnalysisException] { - sql(s"select id from `org.apache.spark.sql.hive.orc`.`file_path`") - } - assert(e.message.contains("Hive built-in ORC data source must be used with Hive support")) + checkError( + exception = intercept[AnalysisException] { + sql(s"select id from `org.apache.spark.sql.hive.orc`.`file_path`") + }, + errorClass = "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY", + parameters = Map("dataSourceType" -> "org.apache.spark.sql.hive.orc"), + context = ExpectedContext("`org.apache.spark.sql.hive.orc`.`file_path`", 15, 57) + ) e = intercept[AnalysisException] { sql(s"select id from `org.apache.spark.sql.sources.HadoopFsRelationProvider`.`file_path`") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala index a902cb3a69ec5..31dfbedbbb492 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala @@ -1401,16 +1401,24 @@ abstract class SQLQuerySuiteBase extends QueryTest with SQLTestUtils with TestHi withTempPath(f => { spark.range(100).toDF.write.parquet(f.getCanonicalPath) - var e = intercept[AnalysisException] { - sql(s"select id from hive.`${f.getCanonicalPath}`") - } - assert(e.message.contains("Unsupported data source type for direct query on files: hive")) + checkError( + exception = intercept[AnalysisException] { + sql(s"select id from hive.`${f.getCanonicalPath}`") + }, + errorClass = "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY", + parameters = Map("dataSourceType" -> "hive"), + context = ExpectedContext(s"hive.`${f.getCanonicalPath}`", 15, 104) + ) // data source type is case insensitive - e = intercept[AnalysisException] { - sql(s"select id from HIVE.`${f.getCanonicalPath}`") - } - assert(e.message.contains("Unsupported data source type for direct query on files: HIVE")) + checkError( + exception = intercept[AnalysisException] { + sql(s"select id from HIVE.`${f.getCanonicalPath}`") + }, + errorClass = "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY", + parameters = Map("dataSourceType" -> "HIVE"), + context = ExpectedContext(s"HIVE.`${f.getCanonicalPath}`", 15, 104) + ) }) }