Skip to content

Commit

Permalink
Default args removed in methods having equivalent signature
Browse files Browse the repository at this point in the history
  • Loading branch information
ferdonline committed Nov 29, 2017
1 parent 59b5562 commit c5f1b2c
Showing 1 changed file with 3 additions and 2 deletions.
5 changes: 3 additions & 2 deletions sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -518,6 +518,7 @@ class Dataset[T] private[sql](
* the logical plan of this Dataset, which is especially useful in iterative algorithms where the
* plan may grow exponentially. It will be saved to files inside the checkpoint
* directory set with `SparkContext#setCheckpointDir`.
*
* @group basic
* @since 2.1.0
*/
Expand All @@ -536,7 +537,7 @@ class Dataset[T] private[sql](
*/
@Experimental
@InterfaceStability.Evolving
def checkpoint(eager: Boolean = true): Dataset[T] = _checkpoint(eager = eager)
def checkpoint(eager: Boolean): Dataset[T] = _checkpoint(eager = eager)

/**
* Eagerly locally checkpoints a Dataset and return the new Dataset. Checkpointing can be
Expand All @@ -562,7 +563,7 @@ class Dataset[T] private[sql](
*/
@Experimental
@InterfaceStability.Evolving
def localCheckpoint(eager: Boolean = true): Dataset[T] = _checkpoint(eager = eager, local = true)
def localCheckpoint(eager: Boolean): Dataset[T] = _checkpoint(eager = eager, local = true)

/**
* Returns a checkpointed version of this Dataset. Checkpointing can be used to truncate the
Expand Down

0 comments on commit c5f1b2c

Please sign in to comment.