Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
181 changes: 6 additions & 175 deletions common/utils/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -3592,6 +3592,12 @@
],
"sqlState" : "0A000"
},
"UNSUPPORTED_CALL" : {
"message" : [
"Cannot call the method \"<methodName>\" of the class \"<className>\"."
],
"sqlState" : "0A000"
},
"UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING" : {
"message" : [
"The char/varchar type can't be used in the table schema.",
Expand Down Expand Up @@ -7133,16 +7139,6 @@
"Cannot bind a V1 function."
]
},
"_LEGACY_ERROR_TEMP_3111" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3112" : {
"message" : [
"Operation unsupported for <class>"
]
},
"_LEGACY_ERROR_TEMP_3113" : {
"message" : [
"UnresolvedTableSpec doesn't have a data type"
Expand All @@ -7153,76 +7149,11 @@
"UnresolvedTableSpec doesn't have a data type"
]
},
"_LEGACY_ERROR_TEMP_3115" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3116" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3117" : {
"message" : [
"Cannot modify <class>"
]
},
"_LEGACY_ERROR_TEMP_3118" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3119" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3120" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3121" : {
"message" : [
"A HllSketch instance cannot be updates with a Spark <dataType> type"
]
},
"_LEGACY_ERROR_TEMP_3122" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3123" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3124" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3125" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3126" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3127" : {
"message" : [
"Not supported on UnsafeArrayData."
]
},
"_LEGACY_ERROR_TEMP_3128" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3129" : {
"message" : [
"Cannot convert this array to unsafe format as it's too big."
Expand Down Expand Up @@ -7333,121 +7264,21 @@
"Cannot create columnar reader."
]
},
"_LEGACY_ERROR_TEMP_3151" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3152" : {
"message" : [
"Datatype not supported <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3153" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3154" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3155" : {
"message" : [
"Datatype not supported <dataType>"
]
},
"_LEGACY_ERROR_TEMP_3156" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3157" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3158" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3159" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3160" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3161" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3162" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3163" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3164" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3165" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3166" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3167" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3168" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3169" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3170" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3171" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3172" : {
"message" : [
""
]
},
"_LEGACY_ERROR_TEMP_3173" : {
"message" : [
""
]
},
"_LEGACY_ERROR_USER_RAISED_EXCEPTION" : {
"message" : [
"<errorMessage>"
Expand Down
13 changes: 13 additions & 0 deletions common/utils/src/main/scala/org/apache/spark/SparkException.scala
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,19 @@ private[spark] class SparkUnsupportedOperationException private(
override def getErrorClass: String = errorClass.orNull
}

private[spark] object SparkUnsupportedOperationException {
def apply(): SparkUnsupportedOperationException = {
val stackTrace = Thread.currentThread().getStackTrace
val messageParameters = if (stackTrace.length >= 4) {
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The stack trace looks like:
Screenshot 2024-01-16 at 19 55 35

val element = stackTrace(3)
Map("className" -> element.getClassName, "methodName" -> element.getMethodName)
} else {
Map("className" -> "?", "methodName" -> "?")
}
new SparkUnsupportedOperationException("UNSUPPORTED_CALL", messageParameters)
}
}

/**
* Class not found exception thrown from Spark with an error class.
*/
Expand Down
6 changes: 6 additions & 0 deletions docs/sql-error-conditions.md
Original file line number Diff line number Diff line change
Expand Up @@ -2331,6 +2331,12 @@ For more details see [UNSUPPORTED_ADD_FILE](sql-error-conditions-unsupported-add

Unsupported arrow type `<typeName>`.

### UNSUPPORTED_CALL

[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)

Cannot call the method "`<methodName>`" of the class "`<className>`".

### UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING

[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ private void assertIndexIsValid(int ordinal) {

@Override
public Object[] array() {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3127");
throw SparkUnsupportedOperationException.apply();
}

/**
Expand Down Expand Up @@ -274,7 +274,7 @@ public UnsafeMapData getMap(int ordinal) {

@Override
public void update(int ordinal, Object value) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3128");
throw SparkUnsupportedOperationException.apply();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ public void setNullAt(int i) {

@Override
public void update(int ordinal, Object value) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3126");
throw SparkUnsupportedOperationException.apply();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ default BatchWrite buildForBatch() {
*/
@Deprecated(since = "3.2.0")
default StreamingWrite buildForStreaming() {
throw new SparkUnsupportedOperationException(getClass().getName() +
throw new SparkUnsupportedOperationException(
"_LEGACY_ERROR_TEMP_3136", Map.of("class", getClass().getName()));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -227,55 +227,55 @@ final void close() {
}

boolean getBoolean(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3161");
throw SparkUnsupportedOperationException.apply();
}

byte getByte(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3162");
throw SparkUnsupportedOperationException.apply();
}

short getShort(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3163");
throw SparkUnsupportedOperationException.apply();
}

int getInt(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3164");
throw SparkUnsupportedOperationException.apply();
}

long getLong(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3165");
throw SparkUnsupportedOperationException.apply();
}

float getFloat(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3166");
throw SparkUnsupportedOperationException.apply();
}

double getDouble(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3167");
throw SparkUnsupportedOperationException.apply();
}

CalendarInterval getInterval(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3168");
throw SparkUnsupportedOperationException.apply();
}

Decimal getDecimal(int rowId, int precision, int scale) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3169");
throw SparkUnsupportedOperationException.apply();
}

UTF8String getUTF8String(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3170");
throw SparkUnsupportedOperationException.apply();
}

byte[] getBinary(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3171");
throw SparkUnsupportedOperationException.apply();
}

ColumnarArray getArray(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3172");
throw SparkUnsupportedOperationException.apply();
}

ColumnarMap getMap(int rowId) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3173");
throw SparkUnsupportedOperationException.apply();
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -189,11 +189,11 @@ public Object get(int ordinal, DataType dataType) {

@Override
public void update(int ordinal, Object value) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3158");
throw SparkUnsupportedOperationException.apply();
}

@Override
public void setNullAt(int ordinal) {
throw new SparkUnsupportedOperationException("_LEGACY_ERROR_TEMP_3159");
throw SparkUnsupportedOperationException.apply();
}
}
Loading