Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ class ErrorClassesJsonReader(jsonFileURLs: Seq[URL]) {
}
if (util.SparkEnvUtils.isTesting) {
val placeHoldersNum = ErrorClassesJsonReader.TEMPLATE_REGEX.findAllIn(messageTemplate).length
if (placeHoldersNum < sanitizedParameters.size) {
if (placeHoldersNum < sanitizedParameters.size &&
!ErrorClassesJsonReader.MORE_PARAMS_ALLOWLIST.contains(errorClass)) {
throw SparkException.internalError(
s"Found unused message parameters of the error class '$errorClass'. " +
s"Its error message format has $placeHoldersNum placeholders, " +
Expand Down Expand Up @@ -123,6 +124,8 @@ class ErrorClassesJsonReader(jsonFileURLs: Seq[URL]) {
private object ErrorClassesJsonReader {
private val TEMPLATE_REGEX = "<([a-zA-Z0-9_-]+)>".r

private val MORE_PARAMS_ALLOWLIST = Array("CAST_INVALID_INPUT", "CAST_OVERFLOW")

private val mapper: JsonMapper = JsonMapper.builder()
.addModule(DefaultScalaModule)
.build()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,8 @@ private[sql] object DataTypeErrors extends DataTypeErrorsBase {
messageParameters = Map(
"expression" -> convertedValueStr,
"sourceType" -> toSQLType(StringType),
"targetType" -> toSQLType(to)),
"targetType" -> toSQLType(to),
"ansiConfig" -> toSQLConf("spark.sql.ansi.enabled")),
context = getQueryContext(context),
summary = getSummary(context))
}
Expand All @@ -225,8 +226,11 @@ private[sql] object DataTypeErrors extends DataTypeErrorsBase {
def castingCauseOverflowError(t: String, from: DataType, to: DataType): ArithmeticException = {
new SparkArithmeticException(
errorClass = "CAST_OVERFLOW",
messageParameters =
Map("value" -> t, "sourceType" -> toSQLType(from), "targetType" -> toSQLType(to)),
messageParameters = Map(
"value" -> t,
"sourceType" -> toSQLType(from),
"targetType" -> toSQLType(to),
"ansiConfig" -> toSQLConf("spark.sql.ansi.enabled")),
context = Array.empty,
summary = "")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,8 @@ private[sql] trait ExecutionErrors extends DataTypeErrorsBase {
messageParameters = Map(
"expression" -> sqlValue,
"sourceType" -> toSQLType(from),
"targetType" -> toSQLType(to)),
"targetType" -> toSQLType(to),
"ansiConfig" -> toSQLConf("spark.sql.ansi.enabled")),
context = getQueryContext(context),
summary = getSummary(context))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
messageParameters = Map(
"value" -> toSQLValue(t, from),
"sourceType" -> toSQLType(from),
"targetType" -> toSQLType(to)),
"targetType" -> toSQLType(to),
"ansiConfig" -> toSQLConf("spark.sql.ansi.enabled")),
context = Array.empty,
summary = "")
}
Expand Down Expand Up @@ -123,7 +124,8 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
messageParameters = Map(
"expression" -> toSQLValue(s, StringType),
"sourceType" -> toSQLType(StringType),
"targetType" -> toSQLType(BooleanType)),
"targetType" -> toSQLType(BooleanType),
"ansiConfig" -> toSQLConf("spark.sql.ansi.enabled")),
context = getQueryContext(context),
summary = getSummary(context))
}
Expand All @@ -137,7 +139,8 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
messageParameters = Map(
"expression" -> toSQLValue(s, StringType),
"sourceType" -> toSQLType(StringType),
"targetType" -> toSQLType(to)),
"targetType" -> toSQLType(to),
"ansiConfig" -> toSQLConf("spark.sql.ansi.enabled")),
context = getQueryContext(context),
summary = getSummary(context))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1163,7 +1163,8 @@ class DateTimeUtilsSuite extends SparkFunSuite with Matchers with SQLHelper {
parameters = Map(
"expression" -> s"'$invalidTime'",
"sourceType" -> "\"STRING\"",
"targetType" -> "\"TIME(6)\""))
"targetType" -> "\"TIME(6)\"",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ class TimeFormatterSuite extends SparkFunSuite with SQLHelper {
parameters = Map(
"expression" -> "'x123'",
"sourceType" -> "\"STRING\"",
"targetType" -> "\"TIME(6)\""))
"targetType" -> "\"TIME(6)\"",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,8 @@ class DecimalSuite extends SparkFunSuite with PrivateMethodTester with SQLHelper
parameters = Map(
"expression" -> "'str'",
"sourceType" -> "\"STRING\"",
"targetType" -> "\"DECIMAL(10,0)\""))
"targetType" -> "\"DECIMAL(10,0)\"",
"ansiConfig" -> "\"spark.sql.ansi.enabled\""))
}

test("SPARK-35841: Casting string to decimal type doesn't work " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -471,6 +471,7 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'invalid_cast_error_expected'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
Expand Down Expand Up @@ -661,6 +662,7 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'name1'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,7 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'NaN'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -842,6 +842,7 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'hello'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
Expand Down Expand Up @@ -884,6 +885,7 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"INT\"",
"targetType" : "\"SMALLINT\"",
"value" : "100000"
Expand Down Expand Up @@ -1000,6 +1002,7 @@ org.apache.spark.SparkArithmeticException
"errorClass" : "CAST_OVERFLOW",
"sqlState" : "22003",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"sourceType" : "\"DOUBLE\"",
"targetType" : "\"INT\"",
"value" : "1.0E10D"
Expand Down Expand Up @@ -1059,6 +1062,7 @@ org.apache.spark.SparkNumberFormatException
"errorClass" : "CAST_INVALID_INPUT",
"sqlState" : "22018",
"messageParameters" : {
"ansiConfig" : "\"spark.sql.ansi.enabled\"",
"expression" : "'hello'",
"sourceType" : "\"STRING\"",
"targetType" : "\"INT\""
Expand Down
Loading