From 42ff879d1cf128742cd56245216b42253ca755ca Mon Sep 17 00:00:00 2001 From: Rui Wang Date: Fri, 24 Feb 2023 14:08:57 -0800 Subject: [PATCH 1/2] [SPARK-42569][CONNECT] Throw unsupported exceptions for non-supported API. --- .../scala/org/apache/spark/sql/Dataset.scala | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala index 87aadfe437be1..c2d365aeda09c 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -2461,6 +2461,60 @@ class Dataset[T] private[sql] (val session: SparkSession, private[sql] val plan: new DataFrameWriterV2[T](table, this) } + def unpersist(blocking: Boolean): this.type = { + throw new UnsupportedOperationException("unpersist() is not implemented.") + } + + def unpersist(): this.type = unpersist(blocking = false) + + def cache(): this.type = { + throw new UnsupportedOperationException("cache() is not implemented.") + } + + def withWatermark(eventTime: String, delayThreshold: String): Dataset[T] = { + throw new UnsupportedOperationException("withWatermark is not implemented.") + } + + def observe(name: String, expr: Column, exprs: Column*): Dataset[T] = { + throw new UnsupportedOperationException("observe is not implemented.") + } + + def foreach(f: T => Unit): Unit = { + throw new UnsupportedOperationException("foreach is not implemented.") + } + + def foreachPartition(f: Iterator[T] => Unit): Unit = { + throw new UnsupportedOperationException("foreach is not implemented.") + } + + def checkpoint(): Dataset[T] = { + throw new UnsupportedOperationException("checkpoint is not implemented.") + } + + def checkpoint(eager: Boolean): Dataset[T] = { + throw new UnsupportedOperationException("checkpoint is not implemented.") + } + + def localCheckpoint(): Dataset[T] = { + throw new UnsupportedOperationException("localCheckpoint is not implemented.") + } + + def localCheckpoint(eager: Boolean): Dataset[T] = { + throw new UnsupportedOperationException("localCheckpoint is not implemented.") + } + + def sameSemantics(other: Dataset[T]): Boolean = { + throw new UnsupportedOperationException("sameSemantics is not implemented.") + } + + def semanticHash(): Int = { + throw new UnsupportedOperationException("sameSemantics is not implemented.") + } + + def toJSON: Dataset[String] = { + throw new UnsupportedOperationException("toJSON is not implemented.") + } + private[sql] def analyze: proto.AnalyzePlanResponse = { session.analyze(plan, proto.Explain.ExplainMode.SIMPLE) } From ca25aa5a5f51fa8a82eda94494c467aa11ebe3eb Mon Sep 17 00:00:00 2001 From: Rui Wang Date: Fri, 24 Feb 2023 16:27:09 -0800 Subject: [PATCH 2/2] update --- .../jvm/src/main/scala/org/apache/spark/sql/Dataset.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala index c2d365aeda09c..e398f3cc4b299 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala @@ -2508,7 +2508,7 @@ class Dataset[T] private[sql] (val session: SparkSession, private[sql] val plan: } def semanticHash(): Int = { - throw new UnsupportedOperationException("sameSemantics is not implemented.") + throw new UnsupportedOperationException("semanticHash is not implemented.") } def toJSON: Dataset[String] = {