Skip to content

Commit c1423d7

Browse files
committed
[SPARK][MIRROR] Removed the unused imports for scala
1 parent 7f41113 commit c1423d7

File tree

12 files changed

+8
-25
lines changed

12 files changed

+8
-25
lines changed

spark/v3.4/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/optimizer/ReplaceStaticInvoke.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ import org.apache.spark.sql.catalyst.expressions.objects.StaticInvoke
2828
import org.apache.spark.sql.catalyst.plans.logical.Filter
2929
import org.apache.spark.sql.catalyst.plans.logical.Join
3030
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
31-
import org.apache.spark.sql.catalyst.plans.logical.ReplaceData
3231
import org.apache.spark.sql.catalyst.rules.Rule
3332
import org.apache.spark.sql.catalyst.trees.TreePattern.BINARY_COMPARISON
3433
import org.apache.spark.sql.catalyst.trees.TreePattern.COMMAND

spark/v3.4/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -386,7 +386,7 @@ class IcebergParseException(
386386
builder ++= "\n" ++= message
387387
start match {
388388
case Origin(
389-
Some(l), Some(p), Some(startIndex), Some(stopIndex), Some(sqlText), Some(objectType), Some(objectName)) =>
389+
Some(l), Some(p), Some(_), Some(_), Some(_), Some(_), Some(_)) =>
390390
builder ++= s"(line $l, pos $p)\n"
391391
command.foreach { cmd =>
392392
val (above, below) = cmd.split("\n").splitAt(l)

spark/v3.4/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/SetIdentifierFields.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
package org.apache.spark.sql.catalyst.plans.logical
2121

2222
import org.apache.spark.sql.catalyst.expressions.Attribute
23-
import org.apache.spark.sql.connector.expressions.Transform
2423

2524
case class SetIdentifierFields(
2625
table: Seq[String],

spark/v3.4/spark/src/main/scala/org/apache/spark/sql/execution/datasources/SparkExpressionConverter.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,8 @@ object SparkExpressionConverter {
5757
val optimizedLogicalPlan = session.sessionState.executePlan(filter).optimizedPlan
5858
optimizedLogicalPlan.collectFirst {
5959
case filter: Filter => filter.condition
60-
case dummyRelation: DummyRelation => Literal.TrueLiteral
61-
case localRelation: LocalRelation => Literal.FalseLiteral
60+
case _: DummyRelation => Literal.TrueLiteral
61+
case _: LocalRelation => Literal.FalseLiteral
6262
}.getOrElse(throw new AnalysisException("Failed to find filter expression"))
6363
}
6464

spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,30 +28,23 @@ import org.antlr.v4.runtime.tree.TerminalNodeImpl
2828
import org.apache.iceberg.common.DynConstructors
2929
import org.apache.iceberg.spark.ExtendedParser
3030
import org.apache.iceberg.spark.ExtendedParser.RawOrderField
31-
import org.apache.iceberg.spark.Spark3Util
3231
import org.apache.iceberg.spark.procedures.SparkProcedures
33-
import org.apache.iceberg.spark.source.SparkTable
3432
import org.apache.spark.sql.AnalysisException
3533
import org.apache.spark.sql.SparkSession
3634
import org.apache.spark.sql.catalyst.FunctionIdentifier
3735
import org.apache.spark.sql.catalyst.TableIdentifier
38-
import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
3936
import org.apache.spark.sql.catalyst.analysis.RewriteViewCommands
40-
import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation
4137
import org.apache.spark.sql.catalyst.expressions.Expression
4238
import org.apache.spark.sql.catalyst.parser.ParserInterface
4339
import org.apache.spark.sql.catalyst.parser.extensions.IcebergSqlExtensionsParser.NonReservedContext
4440
import org.apache.spark.sql.catalyst.parser.extensions.IcebergSqlExtensionsParser.QuotedIdentifierContext
4541
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
4642
import org.apache.spark.sql.catalyst.trees.Origin
47-
import org.apache.spark.sql.connector.catalog.Table
48-
import org.apache.spark.sql.connector.catalog.TableCatalog
4943
import org.apache.spark.sql.internal.SQLConf
5044
import org.apache.spark.sql.internal.VariableSubstitution
5145
import org.apache.spark.sql.types.DataType
5246
import org.apache.spark.sql.types.StructType
5347
import scala.jdk.CollectionConverters._
54-
import scala.util.Try
5548

5649
class IcebergSparkSqlExtensionsParser(delegate: ParserInterface) extends ParserInterface with ExtendedParser {
5750

@@ -329,7 +322,7 @@ class IcebergParseException(
329322
builder ++= "\n" ++= message
330323
start match {
331324
case Origin(
332-
Some(l), Some(p), Some(startIndex), Some(stopIndex), Some(sqlText), Some(objectType), Some(objectName)) =>
325+
Some(l), Some(p), Some(_), Some(_), Some(_), Some(_), Some(_)) =>
333326
builder ++= s"(line $l, pos $p)\n"
334327
command.foreach { cmd =>
335328
val (above, below) = cmd.split("\n").splitAt(l)

spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/SetIdentifierFields.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
package org.apache.spark.sql.catalyst.plans.logical
2121

2222
import org.apache.spark.sql.catalyst.expressions.Attribute
23-
import org.apache.spark.sql.connector.expressions.Transform
2423

2524
case class SetIdentifierFields(
2625
table: Seq[String],

spark/v3.5/spark/src/main/scala/org/apache/spark/sql/execution/datasources/SparkExpressionConverter.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,8 +57,8 @@ object SparkExpressionConverter {
5757
val optimizedLogicalPlan = session.sessionState.executePlan(filter).optimizedPlan
5858
optimizedLogicalPlan.collectFirst {
5959
case filter: Filter => filter.condition
60-
case dummyRelation: DummyRelation => Literal.TrueLiteral
61-
case localRelation: LocalRelation => Literal.FalseLiteral
60+
case _: DummyRelation => Literal.TrueLiteral
61+
case _: LocalRelation => Literal.FalseLiteral
6262
}.getOrElse(throw new AnalysisException("Failed to find filter expression"))
6363
}
6464

spark/v4.0/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,6 @@ import org.antlr.v4.runtime.tree.TerminalNodeImpl
2828
import org.apache.iceberg.common.DynConstructors
2929
import org.apache.iceberg.spark.ExtendedParser
3030
import org.apache.iceberg.spark.ExtendedParser.RawOrderField
31-
import org.apache.iceberg.spark.procedures.SparkProcedures
3231
import org.apache.spark.sql.AnalysisException
3332
import org.apache.spark.sql.SparkSession
3433
import org.apache.spark.sql.catalyst.FunctionIdentifier
@@ -316,8 +315,8 @@ class IcebergParseException(
316315
val builder = new StringBuilder
317316
builder ++= "\n" ++= message
318317
start match {
319-
case Origin(Some(l), Some(p), Some(startIndex), Some(stopIndex), Some(sqlText), Some(objectType),
320-
Some(objectName), _, _) =>
318+
case Origin(Some(l), Some(p), Some(_), Some(_), Some(_), Some(_),
319+
Some(_), _, _) =>
321320
builder ++= s"(line $l, pos $p)\n"
322321
command.foreach { cmd =>
323322
val (above, below) = cmd.split("\n").splitAt(l)

spark/v4.0/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/SetIdentifierFields.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
package org.apache.spark.sql.catalyst.plans.logical
2121

2222
import org.apache.spark.sql.catalyst.expressions.Attribute
23-
import org.apache.spark.sql.connector.expressions.Transform
2423

2524
case class SetIdentifierFields(
2625
table: Seq[String],

spark/v4.0/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AlterV2ViewUnsetPropertiesExec.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919

2020
package org.apache.spark.sql.execution.datasources.v2
2121

22-
import org.apache.spark.sql.AnalysisException
2322
import org.apache.spark.sql.catalyst.InternalRow
2423
import org.apache.spark.sql.catalyst.analysis.IcebergAnalysisException
2524
import org.apache.spark.sql.catalyst.expressions.Attribute

0 commit comments

Comments
 (0)