Skip to content

Commit 4c4585b

Browse files
itholicMaxGekk
authored andcommitted
[SPARK-42318][SPARK-42319][SQL] Assign name to _LEGACY_ERROR_TEMP_(2123|2125)
### What changes were proposed in this pull request? This PR proposes to assign name to _LEGACY_ERROR_TEMP_2123 and _LEGACY_ERROR_TEMP_2125, "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE". ### Why are the changes needed? We should assign proper name to _LEGACY_ERROR_TEMP_* ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? `./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*` Closes #39891 from itholic/LEGACY_2125. Authored-by: itholic <haejoon.lee@databricks.com> Signed-off-by: Max Gekk <max.gekk@gmail.com> (cherry picked from commit b11fba0) Signed-off-by: Max Gekk <max.gekk@gmail.com>
1 parent c3584dc commit 4c4585b

File tree

6 files changed

+36
-41
lines changed

6 files changed

+36
-41
lines changed

core/src/main/resources/error/error-classes.json

Lines changed: 6 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,12 @@
7676
"Could not load Protobuf class with name <protobufClassName>. <explanation>."
7777
]
7878
},
79+
"CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE" : {
80+
"message" : [
81+
"Failed to merge incompatible data types <left> and <right>."
82+
],
83+
"sqlState" : "42825"
84+
},
7985
"CANNOT_MODIFY_CONFIG" : {
8086
"message" : [
8187
"Cannot modify the value of the Spark config: <key>.",
@@ -4183,21 +4189,11 @@
41834189
"Failed parsing <simpleString>: <raw>."
41844190
]
41854191
},
4186-
"_LEGACY_ERROR_TEMP_2123" : {
4187-
"message" : [
4188-
"Failed to merge fields '<leftName>' and '<rightName>'. <message>"
4189-
]
4190-
},
41914192
"_LEGACY_ERROR_TEMP_2124" : {
41924193
"message" : [
41934194
"Failed to merge decimal types with incompatible scale <leftScale> and <rightScale>."
41944195
]
41954196
},
4196-
"_LEGACY_ERROR_TEMP_2125" : {
4197-
"message" : [
4198-
"Failed to merge incompatible data types <leftCatalogString> and <rightCatalogString>."
4199-
]
4200-
},
42014197
"_LEGACY_ERROR_TEMP_2126" : {
42024198
"message" : [
42034199
"Unsuccessful attempt to build maps with <size> elements due to exceeding the map size limit <maxRoundedArrayLength>."

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala

Lines changed: 3 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1291,16 +1291,6 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
12911291
messageParameters = Map("simpleString" -> StructType.simpleString, "raw" -> raw))
12921292
}
12931293

1294-
def failedMergingFieldsError(leftName: String, rightName: String, e: Throwable): Throwable = {
1295-
new SparkException(
1296-
errorClass = "_LEGACY_ERROR_TEMP_2123",
1297-
messageParameters = Map(
1298-
"leftName" -> leftName,
1299-
"rightName" -> rightName,
1300-
"message" -> e.getMessage),
1301-
cause = null)
1302-
}
1303-
13041294
def cannotMergeDecimalTypesWithIncompatibleScaleError(
13051295
leftScale: Int, rightScale: Int): Throwable = {
13061296
new SparkException(
@@ -1313,10 +1303,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
13131303

13141304
def cannotMergeIncompatibleDataTypesError(left: DataType, right: DataType): Throwable = {
13151305
new SparkException(
1316-
errorClass = "_LEGACY_ERROR_TEMP_2125",
1306+
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
13171307
messageParameters = Map(
1318-
"leftCatalogString" -> left.catalogString,
1319-
"rightCatalogString" -> right.catalogString),
1308+
"left" -> toSQLType(left),
1309+
"right" -> toSQLType(right)),
13201310
cause = null)
13211311
}
13221312

sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -612,7 +612,8 @@ object StructType extends AbstractDataType {
612612
nullable = leftNullable || rightNullable)
613613
} catch {
614614
case NonFatal(e) =>
615-
throw QueryExecutionErrors.failedMergingFieldsError(leftName, rightName, e)
615+
throw QueryExecutionErrors.cannotMergeIncompatibleDataTypesError(
616+
leftType, rightType)
616617
}
617618
}
618619
.orElse {

sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -157,11 +157,9 @@ class DataTypeSuite extends SparkFunSuite {
157157
exception = intercept[SparkException] {
158158
left.merge(right)
159159
},
160-
errorClass = "_LEGACY_ERROR_TEMP_2123",
161-
parameters = Map(
162-
"leftName" -> "b",
163-
"rightName" -> "b",
164-
"message" -> "Failed to merge incompatible data types float and bigint.")
160+
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
161+
parameters = Map("left" -> "\"FLOAT\"", "right" -> "\"BIGINT\""
162+
)
165163
)
166164
}
167165

sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -434,15 +434,21 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper {
434434

435435
// Invalid merge cases:
436436

437-
var e = intercept[SparkException] {
438-
StructType.fromDDL("c1 DECIMAL(10, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
439-
}
440-
assert(e.getMessage.contains("Failed to merge decimal types"))
437+
checkError(
438+
exception = intercept[SparkException] {
439+
StructType.fromDDL("c1 DECIMAL(10, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
440+
},
441+
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
442+
parameters = Map("left" -> "\"DECIMAL(10,5)\"", "right" -> "\"DECIMAL(12,2)\"")
443+
)
441444

442-
e = intercept[SparkException] {
443-
StructType.fromDDL("c1 DECIMAL(12, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
444-
}
445-
assert(e.getMessage.contains("Failed to merge decimal types"))
445+
checkError(
446+
exception = intercept[SparkException] {
447+
StructType.fromDDL("c1 DECIMAL(12, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
448+
},
449+
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
450+
parameters = Map("left" -> "\"DECIMAL(12,5)\"", "right" -> "\"DECIMAL(12,2)\"")
451+
)
446452
}
447453

448454
test("SPARK-39143: Test parsing default column values out of struct types") {

sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -448,14 +448,18 @@ abstract class OrcSuite
448448
spark.read.orc(basePath).columns.length
449449
}.getCause
450450

451-
val innerMessage = orcImp match {
452-
case "native" => exception.getMessage
453-
case "hive" => exception.getCause.getMessage
451+
val innerException = orcImp match {
452+
case "native" => exception
453+
case "hive" => exception.getCause
454454
case impl =>
455455
throw new UnsupportedOperationException(s"Unknown ORC implementation: $impl")
456456
}
457457

458-
assert(innerMessage.contains("Failed to merge incompatible data types"))
458+
checkError(
459+
exception = innerException.asInstanceOf[SparkException],
460+
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
461+
parameters = Map("left" -> "\"BIGINT\"", "right" -> "\"STRING\"")
462+
)
459463
}
460464

461465
// it is ok if no schema merging

0 commit comments

Comments
 (0)