Skip to content

Commit 409a0bc

Browse files
committed
Fix DataFrameWriterV2 javadoc.
1 parent eca53b2 commit 409a0bc

File tree

1 file changed

+32
-35
lines changed

1 file changed

+32
-35
lines changed

sql/core/src/main/scala/org/apache/spark/sql/DataFrameWriterV2.scala

Lines changed: 32 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,15 @@ import scala.collection.mutable
2222

2323
import org.apache.spark.annotation.Experimental
2424
import org.apache.spark.sql.catalog.v2.expressions.{LogicalExpressions, Transform}
25-
import org.apache.spark.sql.catalyst.analysis.{NoSuchTableException, TableAlreadyExistsException}
25+
import org.apache.spark.sql.catalyst.analysis.{CannotReplaceMissingTableException, NoSuchTableException, TableAlreadyExistsException}
2626
import org.apache.spark.sql.catalyst.expressions.{Attribute, Bucket, Days, Hours, Literal, Months, Years}
2727
import org.apache.spark.sql.catalyst.plans.logical.{AppendData, CreateTableAsSelect, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, ReplaceTableAsSelect}
2828
import org.apache.spark.sql.execution.SQLExecution
2929
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
3030
import org.apache.spark.sql.types.IntegerType
3131

3232
/**
33-
* Interface used to write a [[Dataset]] to external storage using the v2 API.
33+
* Interface used to write a [[org.apache.spark.sql.Dataset]] to external storage using the v2 API.
3434
*
3535
* @since 3.0.0
3636
*/
@@ -78,14 +78,6 @@ final class DataFrameWriterV2[T] private[sql](table: String, ds: Dataset[T])
7878
this
7979
}
8080

81-
override def option(key: String, value: Boolean): DataFrameWriterV2[T] =
82-
option(key, value.toString)
83-
84-
override def option(key: String, value: Long): DataFrameWriterV2[T] = option(key, value.toString)
85-
86-
override def option(key: String, value: Double): DataFrameWriterV2[T] =
87-
option(key, value.toString)
88-
8981
override def options(options: scala.collection.Map[String, String]): DataFrameWriterV2[T] = {
9082
options.foreach {
9183
case (key, value) =>
@@ -155,12 +147,13 @@ final class DataFrameWriterV2[T] private[sql](table: String, ds: Dataset[T])
155147
/**
156148
* Append the contents of the data frame to the output table.
157149
*
158-
* If the output table does not exist, this operation will fail with [[NoSuchTableException]]. The
159-
* data frame will be validated to ensure it is compatible with the existing table.
160-
*
150+
* If the output table does not exist, this operation will fail with
151+
* [[org.apache.spark.sql.catalyst.analysis.NoSuchTableException]]. The data frame will be
152+
* validated to ensure it is compatible with the existing table.
161153
*
162-
* @throws NoSuchTableException If the table does not exist.
154+
* @throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException If the table does not exist
163155
*/
156+
@throws(classOf[NoSuchTableException])
164157
def append(): Unit = {
165158
val append = loadTable(catalog, identifier) match {
166159
case Some(t) =>
@@ -176,11 +169,13 @@ final class DataFrameWriterV2[T] private[sql](table: String, ds: Dataset[T])
176169
* Overwrite rows matching the given filter condition with the contents of the data frame in
177170
* the output table.
178171
*
179-
* If the output table does not exist, this operation will fail with [[NoSuchTableException]]. The
180-
* data frame will be validated to ensure it is compatible with the existing table.
172+
* If the output table does not exist, this operation will fail with
173+
* [[org.apache.spark.sql.catalyst.analysis.NoSuchTableException]].
174+
* The data frame will be validated to ensure it is compatible with the existing table.
181175
*
182-
* @throws NoSuchTableException If the table does not exist.
176+
* @throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException If the table does not exist
183177
*/
178+
@throws(classOf[NoSuchTableException])
184179
def overwrite(condition: Column): Unit = {
185180
val overwrite = loadTable(catalog, identifier) match {
186181
case Some(t) =>
@@ -200,11 +195,13 @@ final class DataFrameWriterV2[T] private[sql](table: String, ds: Dataset[T])
200195
* This operation is equivalent to Hive's `INSERT OVERWRITE ... PARTITION`, which replaces
201196
* partitions dynamically depending on the contents of the data frame.
202197
*
203-
* If the output table does not exist, this operation will fail with [[NoSuchTableException]]. The
204-
* data frame will be validated to ensure it is compatible with the existing table.
198+
* If the output table does not exist, this operation will fail with
199+
* [[org.apache.spark.sql.catalyst.analysis.NoSuchTableException]]. The data frame will be
200+
* validated to ensure it is compatible with the existing table.
205201
*
206-
* @throws NoSuchTableException If the table does not exist.
202+
* @throws org.apache.spark.sql.catalyst.analysis.NoSuchTableException If the table does not exist
207203
*/
204+
@throws(classOf[NoSuchTableException])
208205
def overwritePartitions(): Unit = {
209206
val dynamicOverwrite = loadTable(catalog, identifier) match {
210207
case Some(t) =>
@@ -220,10 +217,8 @@ final class DataFrameWriterV2[T] private[sql](table: String, ds: Dataset[T])
220217
/**
221218
* Wrap an action to track the QueryExecution and time cost, then report to the user-registered
222219
* callback functions.
223-
*
224-
* Visible for testing.
225220
*/
226-
private[sql] def runCommand(name: String)(command: LogicalPlan): Unit = {
221+
private def runCommand(name: String)(command: LogicalPlan): Unit = {
227222
val qe = sparkSession.sessionState.executePlan(command)
228223
// call `QueryExecution.toRDD` to trigger the execution of commands.
229224
SQLExecution.withNewExecutionId(sparkSession, qe, Some(name))(qe.toRdd)
@@ -260,21 +255,21 @@ trait WriteConfigMethods[R] {
260255
*
261256
* @since 3.0.0
262257
*/
263-
def option(key: String, value: Boolean): R
258+
def option(key: String, value: Boolean): R = option(key, value.toString)
264259

265260
/**
266261
* Add a long output option.
267262
*
268263
* @since 3.0.0
269264
*/
270-
def option(key: String, value: Long): R
265+
def option(key: String, value: Long): R = option(key, value.toString)
271266

272267
/**
273268
* Add a double output option.
274269
*
275270
* @since 3.0.0
276271
*/
277-
def option(key: String, value: Double): R
272+
def option(key: String, value: Double): R = option(key, value.toString)
278273

279274
/**
280275
* Add write options from a Scala Map.
@@ -301,10 +296,13 @@ trait CreateTableWriter[T] extends WriteConfigMethods[CreateTableWriter[T]] {
301296
* The new table's schema, partition layout, properties, and other configuration will be
302297
* based on the configuration set on this writer.
303298
*
304-
* If the output table exists, this operation will fail with [[TableAlreadyExistsException]].
299+
* If the output table exists, this operation will fail with
300+
* [[org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException]].
305301
*
306-
* @throws TableAlreadyExistsException If the table already exists.
302+
* @throws org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException
303+
* If the table already exists
307304
*/
305+
@throws(classOf[TableAlreadyExistsException])
308306
def create(): Unit
309307

310308
/**
@@ -313,10 +311,13 @@ trait CreateTableWriter[T] extends WriteConfigMethods[CreateTableWriter[T]] {
313311
* The existing table's schema, partition layout, properties, and other configuration will be
314312
* replaced with the contents of the data frame and the configuration set on this writer.
315313
*
316-
* If the output table exists, this operation will fail with [[TableAlreadyExistsException]].
314+
* If the output table does not exist, this operation will fail with
315+
* [[org.apache.spark.sql.catalyst.analysis.CannotReplaceMissingTableException]].
317316
*
318-
* @throws TableAlreadyExistsException If the table already exists.
317+
* @throws org.apache.spark.sql.catalyst.analysis.CannotReplaceMissingTableException
318+
* If the table already exists
319319
*/
320+
@throws(classOf[CannotReplaceMissingTableException])
320321
def replace(): Unit
321322

322323
/**
@@ -325,15 +326,11 @@ trait CreateTableWriter[T] extends WriteConfigMethods[CreateTableWriter[T]] {
325326
* The output table's schema, partition layout, properties, and other configuration will be based
326327
* on the contents of the data frame and the configuration set on this writer. If the table
327328
* exists, its configuration and data will be replaced.
328-
*
329-
* If the output table exists, this operation will fail with [[TableAlreadyExistsException]].
330-
*
331-
* @throws TableAlreadyExistsException If the table already exists.
332329
*/
333330
def createOrReplace(): Unit
334331

335332
/**
336-
* Partition the output table created by [[create]], [[createOrReplace]], or [[replace]] using
333+
* Partition the output table created by `create`, `createOrReplace`, or `replace` using
337334
* the given columns or transforms.
338335
*
339336
* When specified, the table data will be stored by these values for efficient reads.

0 commit comments

Comments
 (0)