diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json index 19817ced33568..2fa86de3daa32 100644 --- a/common/utils/src/main/resources/error/error-classes.json +++ b/common/utils/src/main/resources/error/error-classes.json @@ -3592,6 +3592,12 @@ ], "sqlState" : "0A000" }, + "UNSUPPORTED_CALL" : { + "message" : [ + "Cannot call the method \"\" of the class \"\"." + ], + "sqlState" : "0A000" + }, "UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING" : { "message" : [ "The char/varchar type can't be used in the table schema.", @@ -7133,16 +7139,6 @@ "Cannot bind a V1 function." ] }, - "_LEGACY_ERROR_TEMP_3111" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3112" : { - "message" : [ - "Operation unsupported for " - ] - }, "_LEGACY_ERROR_TEMP_3113" : { "message" : [ "UnresolvedTableSpec doesn't have a data type" @@ -7153,76 +7149,11 @@ "UnresolvedTableSpec doesn't have a data type" ] }, - "_LEGACY_ERROR_TEMP_3115" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3116" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3117" : { - "message" : [ - "Cannot modify " - ] - }, - "_LEGACY_ERROR_TEMP_3118" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3119" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3120" : { - "message" : [ - "" - ] - }, "_LEGACY_ERROR_TEMP_3121" : { "message" : [ "A HllSketch instance cannot be updates with a Spark type" ] }, - "_LEGACY_ERROR_TEMP_3122" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3123" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3124" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3125" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3126" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3127" : { - "message" : [ - "Not supported on UnsafeArrayData." - ] - }, - "_LEGACY_ERROR_TEMP_3128" : { - "message" : [ - "" - ] - }, "_LEGACY_ERROR_TEMP_3129" : { "message" : [ "Cannot convert this array to unsafe format as it's too big." @@ -7333,121 +7264,21 @@ "Cannot create columnar reader." ] }, - "_LEGACY_ERROR_TEMP_3151" : { - "message" : [ - "" - ] - }, "_LEGACY_ERROR_TEMP_3152" : { "message" : [ "Datatype not supported " ] }, - "_LEGACY_ERROR_TEMP_3153" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3154" : { - "message" : [ - "" - ] - }, "_LEGACY_ERROR_TEMP_3155" : { "message" : [ "Datatype not supported " ] }, - "_LEGACY_ERROR_TEMP_3156" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3157" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3158" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3159" : { - "message" : [ - "" - ] - }, "_LEGACY_ERROR_TEMP_3160" : { "message" : [ "" ] }, - "_LEGACY_ERROR_TEMP_3161" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3162" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3163" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3164" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3165" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3166" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3167" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3168" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3169" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3170" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3171" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3172" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3173" : { - "message" : [ - "" - ] - }, "_LEGACY_ERROR_USER_RAISED_EXCEPTION" : { "message" : [ "" diff --git a/common/utils/src/main/scala/org/apache/spark/SparkException.scala b/common/utils/src/main/scala/org/apache/spark/SparkException.scala index 6e1f4b796518a..67bdc23b5f080 100644 --- a/common/utils/src/main/scala/org/apache/spark/SparkException.scala +++ b/common/utils/src/main/scala/org/apache/spark/SparkException.scala @@ -228,6 +228,19 @@ private[spark] class SparkUnsupportedOperationException private( override def getErrorClass: String = errorClass.orNull } +private[spark] object SparkUnsupportedOperationException { + def apply(): SparkUnsupportedOperationException = { + val stackTrace = Thread.currentThread().getStackTrace + val messageParameters = if (stackTrace.length >= 4) { + val element = stackTrace(3) + Map("className" -> element.getClassName, "methodName" -> element.getMethodName) + } else { + Map("className" -> "?", "methodName" -> "?") + } + new SparkUnsupportedOperationException("UNSUPPORTED_CALL", messageParameters) + } +} + /** * Class not found exception thrown from Spark with an error class. */ diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md index ef12f6d03c060..35b50d6c6e4f8 100644 --- a/docs/sql-error-conditions.md +++ b/docs/sql-error-conditions.md @@ -2331,6 +2331,12 @@ For more details see [UNSUPPORTED_ADD_FILE](sql-error-conditions-unsupported-add Unsupported arrow type ``. +### UNSUPPORTED_CALL + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +Cannot call the method "``" of the class "``". + ### UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING [SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java index 32188fdc01bb0..e612166fb2596 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java @@ -102,7 +102,7 @@ private void assertIndexIsValid(int ordinal) { @Override public Object[] array() { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3127"); + throw SparkUnsupportedOperationException.apply(); } /** @@ -274,7 +274,7 @@ public UnsafeMapData getMap(int ordinal) { @Override public void update(int ordinal, Object value) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3128"); + throw SparkUnsupportedOperationException.apply(); } @Override diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java index 203b0efd10341..6325ba68af5b7 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java @@ -192,7 +192,7 @@ public void setNullAt(int i) { @Override public void update(int ordinal, Object value) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3126"); + throw SparkUnsupportedOperationException.apply(); } @Override diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java index 653b520fc8fc7..fefc06c12df74 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/WriteBuilder.java @@ -72,7 +72,7 @@ default BatchWrite buildForBatch() { */ @Deprecated(since = "3.2.0") default StreamingWrite buildForStreaming() { - throw new SparkUnsupportedOperationException(getClass().getName() + + throw new SparkUnsupportedOperationException( "_LEGACY_ERROR_TEMP_3136", Map.of("class", getClass().getName())); } } diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java index 417cbdd129c90..7382d96e20baa 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java @@ -227,55 +227,55 @@ final void close() { } boolean getBoolean(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3161"); + throw SparkUnsupportedOperationException.apply(); } byte getByte(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3162"); + throw SparkUnsupportedOperationException.apply(); } short getShort(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3163"); + throw SparkUnsupportedOperationException.apply(); } int getInt(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3164"); + throw SparkUnsupportedOperationException.apply(); } long getLong(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3165"); + throw SparkUnsupportedOperationException.apply(); } float getFloat(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3166"); + throw SparkUnsupportedOperationException.apply(); } double getDouble(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3167"); + throw SparkUnsupportedOperationException.apply(); } CalendarInterval getInterval(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3168"); + throw SparkUnsupportedOperationException.apply(); } Decimal getDecimal(int rowId, int precision, int scale) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3169"); + throw SparkUnsupportedOperationException.apply(); } UTF8String getUTF8String(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3170"); + throw SparkUnsupportedOperationException.apply(); } byte[] getBinary(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3171"); + throw SparkUnsupportedOperationException.apply(); } ColumnarArray getArray(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3172"); + throw SparkUnsupportedOperationException.apply(); } ColumnarMap getMap(int rowId) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3173"); + throw SparkUnsupportedOperationException.apply(); } } diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarArray.java b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarArray.java index 7825be3e40e9f..4163af9bfda58 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarArray.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarArray.java @@ -189,11 +189,11 @@ public Object get(int ordinal, DataType dataType) { @Override public void update(int ordinal, Object value) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3158"); + throw SparkUnsupportedOperationException.apply(); } @Override public void setNullAt(int ordinal) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3159"); + throw SparkUnsupportedOperationException.apply(); } } diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java index faab4509c36e3..d05b3e2dc2d96 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java @@ -90,7 +90,7 @@ public InternalRow copy() { @Override public boolean anyNull() { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3151"); + throw SparkUnsupportedOperationException.apply(); } @Override @@ -201,11 +201,11 @@ public Object get(int ordinal, DataType dataType) { @Override public void update(int ordinal, Object value) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3153"); + throw SparkUnsupportedOperationException.apply(); } @Override public void setNullAt(int ordinal) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3153"); + throw SparkUnsupportedOperationException.apply(); } } diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java index 3c1777b178b56..aaac980bb332a 100644 --- a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java +++ b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java @@ -95,7 +95,7 @@ public InternalRow copy() { @Override public boolean anyNull() { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3154"); + throw SparkUnsupportedOperationException.apply(); } @Override @@ -203,11 +203,11 @@ public Object get(int ordinal, DataType dataType) { @Override public void update(int ordinal, Object value) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3156"); + throw SparkUnsupportedOperationException.apply(); } @Override public void setNullAt(int ordinal) { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3157"); + throw SparkUnsupportedOperationException.apply(); } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ProjectingInternalRow.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ProjectingInternalRow.scala index b15b7690b0082..408bd65333cac 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ProjectingInternalRow.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ProjectingInternalRow.scala @@ -37,13 +37,9 @@ case class ProjectingInternalRow(schema: StructType, colOrdinals: Seq[Int]) exte this.row = row } - override def setNullAt(i: Int): Unit = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3117") - } + override def setNullAt(i: Int): Unit = throw SparkUnsupportedOperationException() - override def update(i: Int, value: Any): Unit = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3117") - } + override def update(i: Int, value: Any): Unit = throw SparkUnsupportedOperationException() override def copy(): InternalRow = { val newRow = if (row != null) row.copy() else null diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 086c688cad32f..719780675273a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -81,7 +81,7 @@ object SimpleAnalyzer extends Analyzer( } object FakeV2SessionCatalog extends TableCatalog with FunctionCatalog { - private def fail() = throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3118") + private def fail() = throw SparkUnsupportedOperationException() override def listTables(namespace: Array[String]): Array[Identifier] = fail() override def loadTable(ident: Identifier): Table = { throw new NoSuchTableException(ident.asMultipartIdentifier) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala index a9b1178a8dbaf..a77086c948e01 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala @@ -266,31 +266,31 @@ trait SimpleFunctionRegistryBase[T] extends FunctionRegistryBase[T] with Logging trait EmptyFunctionRegistryBase[T] extends FunctionRegistryBase[T] { override def registerFunction( name: FunctionIdentifier, info: ExpressionInfo, builder: FunctionBuilder): Unit = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124") + throw SparkUnsupportedOperationException() } override def lookupFunction(name: FunctionIdentifier, children: Seq[Expression]): T = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124") + throw SparkUnsupportedOperationException() } override def listFunction(): Seq[FunctionIdentifier] = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124") + throw SparkUnsupportedOperationException() } override def lookupFunction(name: FunctionIdentifier): Option[ExpressionInfo] = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124") + throw SparkUnsupportedOperationException() } override def lookupFunctionBuilder(name: FunctionIdentifier): Option[FunctionBuilder] = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124") + throw SparkUnsupportedOperationException() } override def dropFunction(name: FunctionIdentifier): Boolean = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124") + throw SparkUnsupportedOperationException() } override def clear(): Unit = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3124") + throw SparkUnsupportedOperationException() } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/FunctionExpressionBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/FunctionExpressionBuilder.scala index 0bfb45579cbf1..08971dedad288 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/FunctionExpressionBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/FunctionExpressionBuilder.scala @@ -27,6 +27,6 @@ trait FunctionExpressionBuilder { object DummyFunctionExpressionBuilder extends FunctionExpressionBuilder { override def makeExpression(name: String, clazz: Class[_], input: Seq[Expression]): Expression = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3119") + throw SparkUnsupportedOperationException() } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/functionResources.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/functionResources.scala index 0d779d9d779f2..efd9130b4649d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/functionResources.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/functionResources.scala @@ -59,6 +59,6 @@ trait FunctionResourceLoader { object DummyFunctionResourceLoader extends FunctionResourceLoader { override def loadResource(resource: FunctionResource): Unit = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3120") + throw SparkUnsupportedOperationException() } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala index 3baf6d9d1b737..4350c4da932fb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/grouping.scala @@ -42,12 +42,10 @@ trait BaseGroupingSets extends Expression with CodegenFallback { // this should be replaced first override lazy val resolved: Boolean = false - override def dataType: DataType = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3122") + override def dataType: DataType = throw SparkUnsupportedOperationException() override def foldable: Boolean = false override def nullable: Boolean = true - override def eval(input: InternalRow): Any = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3123") + override def eval(input: InternalRow): Any = throw SparkUnsupportedOperationException() final override val nodePatterns: Seq[TreePattern] = Seq(GROUPING_ANALYTICS) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala index e25509ef5bc1e..6044945175187 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala @@ -395,23 +395,23 @@ case class PrettyAttribute( override def sql: String = toString override def withNullability(newNullability: Boolean): Attribute = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def newInstance(): Attribute = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def withQualifier(newQualifier: Seq[String]): Attribute = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def withName(newName: String): Attribute = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def withMetadata(newMetadata: Metadata): Attribute = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def qualifier: Seq[String] = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def exprId: ExprId = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def withExprId(newExprId: ExprId): Attribute = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def withDataType(newType: DataType): Attribute = - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3125") + throw SparkUnsupportedOperationException() override def nullable: Boolean = true } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala index 5f9f07e299b7f..37a3b3a34e49c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ordering.scala @@ -30,7 +30,7 @@ import org.apache.spark.sql.types._ */ class BaseOrdering extends Ordering[InternalRow] { def compare(a: InternalRow, b: InternalRow): Int = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3116") + throw SparkUnsupportedOperationException() } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/joinTypes.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/joinTypes.scala index d047051e8cdca..3744613dacc00 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/joinTypes.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/joinTypes.scala @@ -92,7 +92,7 @@ case class ExistenceJoin(exists: Attribute) extends JoinType { override def sql: String = { // This join type is only used in the end of optimizer and physical plans, we will not // generate SQL for this join type - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3115") + throw SparkUnsupportedOperationException() } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala index 8199b358dc1fe..c98a2a92a3abb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/physical/partitioning.scala @@ -634,9 +634,7 @@ trait ShuffleSpec { * - [[isCompatibleWith]] returns false on the side where the `clustering` is from. */ def createPartitioning(clustering: Seq[Expression]): Partitioning = - throw new SparkUnsupportedOperationException( - errorClass = "_LEGACY_ERROR_TEMP_3112", - messageParameters = Map("class" -> getClass.getCanonicalName)) + throw SparkUnsupportedOperationException() } case object SinglePartitionShuffleSpec extends ShuffleSpec { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala index 8f10ba079dd9b..bf64399c56593 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala @@ -451,7 +451,7 @@ object ResolveDefaultColumns extends QueryErrorsBase with ResolveDefaultColumnsU override def initialize(name: String, options: CaseInsensitiveStringMap): Unit = {} override def name(): String = CatalogManager.SESSION_CATALOG_NAME override def listFunctions(namespace: Array[String]): Array[Identifier] = { - throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3111") + throw SparkUnsupportedOperationException() } override def loadFunction(ident: Identifier): UnboundFunction = { V1Function(v1Catalog.lookupPersistentFunction(ident.asFunctionIdentifier)) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala index 6b069d1c97363..95208553a3a59 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ShuffleSpecSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.catalyst -import org.apache.spark.SparkFunSuite +import org.apache.spark.{SparkFunSuite, SparkUnsupportedOperationException} import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.plans.SQLHelper import org.apache.spark.sql.catalyst.plans.physical._ @@ -470,8 +470,13 @@ class ShuffleSpecSuite extends SparkFunSuite with SQLHelper { // unsupported cases - val msg = intercept[Exception](RangeShuffleSpec(10, distribution) - .createPartitioning(distribution.clustering)) - assert(msg.getMessage.contains("Operation unsupported")) + checkError( + exception = intercept[SparkUnsupportedOperationException] { + RangeShuffleSpec(10, distribution).createPartitioning(distribution.clustering) + }, + errorClass = "UNSUPPORTED_CALL", + parameters = Map( + "methodName" -> "createPartitioning$", + "className" -> "org.apache.spark.sql.catalyst.plans.physical.ShuffleSpec")) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index 30a5bf709066d..0ed58626b0996 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -17,9 +17,10 @@ package org.apache.spark.sql.errors -import org.apache.spark.SPARK_DOC_ROOT +import org.apache.spark.{SPARK_DOC_ROOT, SparkUnsupportedOperationException} import org.apache.spark.sql.{AnalysisException, ClassData, IntegratedUDFTestUtils, QueryTest, Row} import org.apache.spark.sql.api.java.{UDF1, UDF2, UDF23Test} +import org.apache.spark.sql.catalyst.expressions.UnsafeRow import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog import org.apache.spark.sql.expressions.SparkUserDefinedFunction @@ -918,6 +919,17 @@ class QueryCompilationErrorsSuite ) } } + + test("UNSUPPORTED_CALL: call the unsupported method update()") { + checkError( + exception = intercept[SparkUnsupportedOperationException] { + new UnsafeRow(1).update(0, 1) + }, + errorClass = "UNSUPPORTED_CALL", + parameters = Map( + "methodName" -> "update", + "className" -> "org.apache.spark.sql.catalyst.expressions.UnsafeRow")) + } } class MyCastToString extends SparkUserDefinedFunction(