diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 1162a5394221c..3e9a07feb8c62 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -505,6 +505,10 @@ object Cast extends QueryErrorsBase { "config" -> toSQLConf(fallbackConf.get._1), "configVal" -> toSQLValue(fallbackConf.get._2, StringType))) + case _ if fallbackConf.isEmpty && Cast.canTryCast(from, to) => + // Suggest try_cast for valid casts that fail in ANSI mode + withFunSuggest("try_cast") + case _ => DataTypeMismatch( errorSubClass = "CAST_WITHOUT_SUGGESTION", @@ -588,8 +592,19 @@ case class Cast( Some(SQLConf.STORE_ASSIGNMENT_POLICY.key -> SQLConf.StoreAssignmentPolicy.LEGACY.toString)) } else { - Cast.typeCheckFailureMessage(child.dataType, dataType, - Some(SQLConf.ANSI_ENABLED.key -> "false")) + // Check if there's a config workaround for this cast failure: + // - If canTryCast supports this cast, pass None here and let typeCheckFailureMessage + // suggest try_cast (which is more user-friendly than disabling ANSI mode) + // - If canTryCast doesn't support it BUT the cast works in non-ANSI mode, + // suggest disabling ANSI mode as a migration path + // - Otherwise, pass None and let typeCheckFailureMessage decide + val fallbackConf = if (!Cast.canTryCast(child.dataType, dataType) && + Cast.canCast(child.dataType, dataType)) { + Some(SQLConf.ANSI_ENABLED.key -> "false") + } else { + None + } + Cast.typeCheckFailureMessage(child.dataType, dataType, fallbackConf) } case EvalMode.TRY => Cast.typeCheckFailureMessage(child.dataType, dataType, None) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala index b03d97abd9002..b76aec6d6ce0e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastWithAnsiOnSuite.scala @@ -211,6 +211,30 @@ class CastWithAnsiOnSuite extends CastSuiteBase with QueryErrorsBase { } } + test("SPARK-49635: suggest try_cast for complex type casts") { + // Array[Int] to Array[Binary]: canTryCast=true (uses canCast), canAnsiCast=false + // Should suggest try_cast, not config + val arrayIntType = ArrayType(IntegerType, containsNull = false) + val arrayBinaryType = ArrayType(BinaryType, containsNull = false) + val arrayIntLiteral = Literal.create(Seq(1, 2, 3), arrayIntType) + + val arrayResult = cast(arrayIntLiteral, arrayBinaryType).checkInputDataTypes() + evalMode match { + case EvalMode.ANSI => + assert(arrayResult == + DataTypeMismatch( + errorSubClass = "CAST_WITH_FUNC_SUGGESTION", + messageParameters = Map( + "srcType" -> toSQLType(arrayIntType), + "targetType" -> toSQLType(arrayBinaryType), + "functionNames" -> "`try_cast`" + ) + ) + ) + case _ => + } + } + test("ANSI mode: disallow variant cast to non-nullable types") { // Array val variantVal = new VariantVal(Array[Byte](12, 3), Array[Byte](1, 0, 0))