diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala index 53be4bb651a46..5abf972f79fed 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/objects/objects.scala @@ -325,11 +325,30 @@ case class Invoke( @transient lazy val method = targetObject.dataType match { case ObjectType(cls) => - val m = cls.getMethods.find(_.getName == encodedFunctionName) - if (m.isEmpty) { - sys.error(s"Couldn't find $encodedFunctionName on $cls") - } else { - m + // Looking with function name + argument classes first. + try { + Some(cls.getMethod(encodedFunctionName, argClasses: _*)) + } catch { + case _: NoSuchMethodException => + // For some cases, e.g. arg class is Object, `getMethod` cannot find the method. + // We look at function name + argument length + val m = cls.getMethods.filter { m => + m.getName == encodedFunctionName && m.getParameterCount == arguments.length + } + if (m.isEmpty) { + sys.error(s"Couldn't find $encodedFunctionName on $cls") + } else if (m.length > 1) { + // More than one matched method signature. Exclude synthetic one, e.g. generic one. + val realMethods = m.filter(!_.isSynthetic) + if (realMethods.length > 1) { + // Ambiguous case, we don't know which method to choose, just fail it. + sys.error(s"Found ${realMethods.length} $encodedFunctionName on $cls") + } else { + Some(realMethods.head) + } + } else { + Some(m.head) + } } case _ => None } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala index cbe37c4f94788..14b72f5132bfd 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ObjectExpressionsSuite.scala @@ -618,6 +618,29 @@ class ObjectExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { checkExceptionInExpression[ArithmeticException]( StaticInvoke(mathCls, IntegerType, "addExact", Seq(Literal(Int.MaxValue), Literal(1))), "") } + + test("SPARK-35278: invoke should find method with correct number of parameters") { + val strClsType = ObjectType(classOf[String]) + checkExceptionInExpression[StringIndexOutOfBoundsException]( + Invoke(Literal("a", strClsType), "substring", strClsType, Seq(Literal(3))), "") + + checkObjectExprEvaluation( + Invoke(Literal("a", strClsType), "substring", strClsType, Seq(Literal(0))), "a") + + checkExceptionInExpression[StringIndexOutOfBoundsException]( + Invoke(Literal("a", strClsType), "substring", strClsType, Seq(Literal(0), Literal(3))), "") + + checkObjectExprEvaluation( + Invoke(Literal("a", strClsType), "substring", strClsType, Seq(Literal(0), Literal(1))), "a") + } + + test("SPARK-35278: invoke should correctly invoke override method") { + val clsType = ObjectType(classOf[ConcreteClass]) + val obj = new ConcreteClass + + checkObjectExprEvaluation( + Invoke(Literal(obj, clsType), "testFunc", IntegerType, Seq(Literal(1))), 0) + } } class TestBean extends Serializable { @@ -628,3 +651,11 @@ class TestBean extends Serializable { def setNonPrimitive(i: AnyRef): Unit = assert(i != null, "this setter should not be called with null.") } + +abstract class BaseClass[T] { + def testFunc(param: T): T +} + +class ConcreteClass extends BaseClass[Int] with Serializable { + override def testFunc(param: Int): Int = param - 1 +}