Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 6 additions & 10 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,12 @@
"Could not load Protobuf class with name <protobufClassName>. <explanation>."
]
},
"CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE" : {
"message" : [
"Failed to merge incompatible data types <left> and <right>."
],
"sqlState" : "42825"
},
"CANNOT_MODIFY_CONFIG" : {
"message" : [
"Cannot modify the value of the Spark config: <key>.",
Expand Down Expand Up @@ -4177,21 +4183,11 @@
"Failed parsing <simpleString>: <raw>."
]
},
"_LEGACY_ERROR_TEMP_2123" : {
"message" : [
"Failed to merge fields '<leftName>' and '<rightName>'. <message>"
]
},
"_LEGACY_ERROR_TEMP_2124" : {
"message" : [
"Failed to merge decimal types with incompatible scale <leftScale> and <rightScale>."
]
},
"_LEGACY_ERROR_TEMP_2125" : {
"message" : [
"Failed to merge incompatible data types <leftCatalogString> and <rightCatalogString>."
]
},
"_LEGACY_ERROR_TEMP_2126" : {
"message" : [
"Unsuccessful attempt to build maps with <size> elements due to exceeding the map size limit <maxRoundedArrayLength>."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1291,16 +1291,6 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
messageParameters = Map("simpleString" -> StructType.simpleString, "raw" -> raw))
}

def failedMergingFieldsError(leftName: String, rightName: String, e: Throwable): Throwable = {
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2123",
messageParameters = Map(
"leftName" -> leftName,
"rightName" -> rightName,
"message" -> e.getMessage),
cause = null)
}

def cannotMergeDecimalTypesWithIncompatibleScaleError(
leftScale: Int, rightScale: Int): Throwable = {
new SparkException(
Expand All @@ -1313,10 +1303,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {

def cannotMergeIncompatibleDataTypesError(left: DataType, right: DataType): Throwable = {
new SparkException(
errorClass = "_LEGACY_ERROR_TEMP_2125",
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
messageParameters = Map(
"leftCatalogString" -> left.catalogString,
"rightCatalogString" -> right.catalogString),
"left" -> toSQLType(left),
"right" -> toSQLType(right)),
cause = null)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -612,7 +612,8 @@ object StructType extends AbstractDataType {
nullable = leftNullable || rightNullable)
} catch {
case NonFatal(e) =>
throw QueryExecutionErrors.failedMergingFieldsError(leftName, rightName, e)
throw QueryExecutionErrors.cannotMergeIncompatibleDataTypesError(
leftType, rightType)
}
}
.orElse {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,11 +157,9 @@ class DataTypeSuite extends SparkFunSuite {
exception = intercept[SparkException] {
left.merge(right)
},
errorClass = "_LEGACY_ERROR_TEMP_2123",
parameters = Map(
"leftName" -> "b",
"rightName" -> "b",
"message" -> "Failed to merge incompatible data types float and bigint.")
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
parameters = Map("left" -> "\"FLOAT\"", "right" -> "\"BIGINT\""
)
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -434,15 +434,21 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper {

// Invalid merge cases:

var e = intercept[SparkException] {
StructType.fromDDL("c1 DECIMAL(10, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
}
assert(e.getMessage.contains("Failed to merge decimal types"))
checkError(
exception = intercept[SparkException] {
StructType.fromDDL("c1 DECIMAL(10, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
},
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
parameters = Map("left" -> "\"DECIMAL(10,5)\"", "right" -> "\"DECIMAL(12,2)\"")
)

e = intercept[SparkException] {
StructType.fromDDL("c1 DECIMAL(12, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
}
assert(e.getMessage.contains("Failed to merge decimal types"))
checkError(
exception = intercept[SparkException] {
StructType.fromDDL("c1 DECIMAL(12, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
},
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
parameters = Map("left" -> "\"DECIMAL(12,5)\"", "right" -> "\"DECIMAL(12,2)\"")
)
}

test("SPARK-39143: Test parsing default column values out of struct types") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -448,14 +448,18 @@ abstract class OrcSuite
spark.read.orc(basePath).columns.length
}.getCause

val innerMessage = orcImp match {
case "native" => exception.getMessage
case "hive" => exception.getCause.getMessage
val innerException = orcImp match {
case "native" => exception
case "hive" => exception.getCause
case impl =>
throw new UnsupportedOperationException(s"Unknown ORC implementation: $impl")
}

assert(innerMessage.contains("Failed to merge incompatible data types"))
checkError(
exception = innerException.asInstanceOf[SparkException],
errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
parameters = Map("left" -> "\"BIGINT\"", "right" -> "\"STRING\"")
)
}

// it is ok if no schema merging
Expand Down