Skip to content

Commit dfeeb13

Browse files
committed
remove unused imports in core module
1 parent 6d31dae commit dfeeb13

23 files changed

+11
-40
lines changed

core/src/main/scala/org/apache/spark/MapOutputTracker.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ import org.apache.spark.internal.Logging
3535
import org.apache.spark.internal.config._
3636
import org.apache.spark.io.CompressionCodec
3737
import org.apache.spark.rpc.{RpcCallContext, RpcEndpoint, RpcEndpointRef, RpcEnv}
38-
import org.apache.spark.scheduler.{ExecutorCacheTaskLocation, MapStatus}
38+
import org.apache.spark.scheduler.MapStatus
3939
import org.apache.spark.shuffle.MetadataFetchFailedException
4040
import org.apache.spark.storage.{BlockId, BlockManagerId, ShuffleBlockId}
4141
import org.apache.spark.util._

core/src/main/scala/org/apache/spark/api/python/PythonRunner.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,8 @@ import java.nio.charset.StandardCharsets.UTF_8
2424
import java.util.concurrent.atomic.AtomicBoolean
2525

2626
import scala.collection.JavaConverters._
27-
import scala.collection.mutable.ArrayBuffer
2827
import scala.util.control.NonFatal
2928

30-
import org.json4s.JsonAST._
31-
import org.json4s.JsonDSL._
32-
import org.json4s.jackson.JsonMethods.{compact, render}
33-
3429
import org.apache.spark._
3530
import org.apache.spark.internal.Logging
3631
import org.apache.spark.internal.config.{BUFFER_SIZE, EXECUTOR_CORES}

core/src/main/scala/org/apache/spark/api/python/SerDeUtil.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.api.python
1919

20-
import java.nio.ByteOrder
21-
import java.nio.charset.StandardCharsets
2220
import java.util.{ArrayList => JArrayList}
2321

2422
import scala.collection.JavaConverters._

core/src/main/scala/org/apache/spark/api/r/RRunner.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.api.r
1919

2020
import java.io._
2121

22-
import org.apache.spark._
2322
import org.apache.spark.broadcast.Broadcast
2423

2524
/**

core/src/main/scala/org/apache/spark/deploy/history/BasicEventFilterBuilder.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.deploy.history
1919

2020
import scala.collection.mutable
2121

22-
import org.apache.spark.SparkContext
2322
import org.apache.spark.deploy.history.EventFilter.FilterStatistics
2423
import org.apache.spark.internal.Logging
2524
import org.apache.spark.scheduler._

core/src/main/scala/org/apache/spark/deploy/history/HybridStore.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.deploy.history
1919

20-
import java.io.IOException
2120
import java.util.Collection
2221
import java.util.concurrent.ConcurrentHashMap
2322
import java.util.concurrent.atomic.AtomicBoolean

core/src/main/scala/org/apache/spark/deploy/master/Master.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,7 @@ import java.util.{Date, Locale}
2222
import java.util.concurrent.{ScheduledFuture, TimeUnit}
2323

2424
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
25-
import scala.collection.mutable
2625
import scala.util.Random
27-
import scala.util.control.NonFatal
2826

2927
import org.apache.spark.{SecurityManager, SparkConf, SparkException}
3028
import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, ExecutorState, SparkHadoopUtil}

core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.deploy.master.ui
1919

2020
import java.net.{InetAddress, NetworkInterface, SocketException}
21-
import java.util.Locale
2221
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
2322

2423
import org.apache.spark.deploy.DeployMessages.{DecommissionWorkersOnHosts, MasterStateResponse, RequestMasterState}

core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.executor
1919

20-
import java.io.File
2120
import java.net.URL
2221
import java.nio.ByteBuffer
2322
import java.util.Locale

core/src/main/scala/org/apache/spark/network/BlockDataManager.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ import scala.reflect.ClassTag
2222
import org.apache.spark.TaskContext
2323
import org.apache.spark.network.buffer.ManagedBuffer
2424
import org.apache.spark.network.client.StreamCallbackWithID
25-
import org.apache.spark.storage.{BlockId, ShuffleBlockId, StorageLevel}
25+
import org.apache.spark.storage.{BlockId, StorageLevel}
2626

2727
private[spark]
2828
trait BlockDataManager {

0 commit comments

Comments
 (0)