Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions project/MimaExcludes.scala
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,22 @@ object MimaExcludes {
"org.apache.spark.mllib.linalg.Vector.toSparse"),
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.mllib.linalg.Vector.numActives")
) ++ Seq(
// This `protected[sql]` method was removed in 1.3.1
ProblemFilters.exclude[MissingMethodProblem](
"org.apache.spark.sql.SQLContext.checkAnalysis"),
// This `private[sql]` class was removed in 1.4.0:
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.execution.AddExchange"),
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.execution.AddExchange$"),
// These test support classes were moved out of src/main and into src/test:
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.parquet.ParquetTestData"),
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.parquet.ParquetTestData$"),
ProblemFilters.exclude[MissingClassProblem](
"org.apache.spark.sql.parquet.TestGroupWriteSupport")
)

case v if v.startsWith("1.3") =>
Expand Down
5 changes: 2 additions & 3 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -156,9 +156,8 @@ object SparkBuild extends PomBuild {
/* Enable tests settings for all projects except examples, assembly and tools */
(allProjects ++ optionallyEnabledProjects).foreach(enable(TestSettings.settings))

// TODO: Add Sql to mima checks
// TODO: remove launcher from this list after 1.3.
allProjects.filterNot(x => Seq(spark, sql, hive, hiveThriftServer, catalyst, repl,
// TODO: remove launcher from this list after 1.4.0
allProjects.filterNot(x => Seq(spark, hive, hiveThriftServer, catalyst, repl,
networkCommon, networkShuffle, networkYarn, launcher, unsafe).contains(x)).foreach {
x => enable(MimaBuild.mimaSettings(sparkHome, x))(x)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ object RDDConversions {
}

/** Logical plan node for scanning data from an RDD. */
case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLContext)
private[sql] case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLContext)
extends LogicalPlan with MultiInstanceRelation {

override def children: Seq[LogicalPlan] = Nil
Expand All @@ -105,11 +105,12 @@ case class LogicalRDD(output: Seq[Attribute], rdd: RDD[Row])(sqlContext: SQLCont
}

/** Physical plan node for scanning data from an RDD. */
case class PhysicalRDD(output: Seq[Attribute], rdd: RDD[Row]) extends LeafNode {
private[sql] case class PhysicalRDD(output: Seq[Attribute], rdd: RDD[Row]) extends LeafNode {
override def execute(): RDD[Row] = rdd
}

/** Logical plan node for scanning data from a local collection. */
private[sql]
case class LogicalLocalTable(output: Seq[Attribute], rows: Seq[Row])(sqlContext: SQLContext)
extends LogicalPlan with MultiInstanceRelation {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.Attribute
/**
* Physical plan node for scanning data from a local collection.
*/
case class LocalTableScan(output: Seq[Attribute], rows: Seq[Row]) extends LeafNode {
private[sql] case class LocalTableScan(output: Seq[Attribute], rows: Seq[Row]) extends LeafNode {

private lazy val rdd = sqlContext.sparkContext.parallelize(rows)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ trait RunnableCommand extends logical.Command {
* A physical operator that executes the run method of a `RunnableCommand` and
* saves the result to prevent multiple executions.
*/
case class ExecutedCommand(cmd: RunnableCommand) extends SparkPlan {
private[sql] case class ExecutedCommand(cmd: RunnableCommand) extends SparkPlan {
/**
* A concrete command should override this lazy field to wrap up any side effects caused by the
* command or any other computation that should be evaluated exactly once. The value of this field
Expand Down