diff --git a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java index 6af45aec3c7b..b33c53871c32 100644 --- a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java +++ b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/InMemoryStore.java @@ -252,7 +252,7 @@ private static Predicate getPredicate( return (value) -> set.contains(indexValueForEntity(getter, value)); } else { - HashSet set = new HashSet<>(values.size()); + HashSet> set = new HashSet<>(values.size()); for (Object key : values) { set.add(asKey(key)); } diff --git a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java index b8c5fab8709e..d2a26982d870 100644 --- a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java +++ b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVTypeInfo.java @@ -124,7 +124,7 @@ interface Accessor { Object get(Object instance) throws ReflectiveOperationException; - Class getType(); + Class getType(); } private class FieldAccessor implements Accessor { @@ -141,7 +141,7 @@ public Object get(Object instance) throws ReflectiveOperationException { } @Override - public Class getType() { + public Class getType() { return field.getType(); } } @@ -160,7 +160,7 @@ public Object get(Object instance) throws ReflectiveOperationException { } @Override - public Class getType() { + public Class getType() { return method.getReturnType(); } } diff --git a/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java b/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java index 1022111897a4..4f660a13f740 100644 --- a/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java +++ b/core/src/test/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriterSuite.java @@ -172,9 +172,9 @@ public void setUp() throws IOException { when(taskContext.taskMemoryManager()).thenReturn(taskMemoryManager); } - private UnsafeShuffleWriter createWriter(boolean transferToEnabled) { + private UnsafeShuffleWriter createWriter(boolean transferToEnabled) { conf.set("spark.file.transferTo", String.valueOf(transferToEnabled)); - return new UnsafeShuffleWriter( + return new UnsafeShuffleWriter<>( blockManager, taskMemoryManager, new SerializedShuffleHandle<>(0, 1, shuffleDep), @@ -533,7 +533,7 @@ public void testPeakMemoryUsed() throws Exception { final long numRecordsPerPage = pageSizeBytes / recordLengthBytes; taskMemoryManager = spy(taskMemoryManager); when(taskMemoryManager.pageSizeBytes()).thenReturn(pageSizeBytes); - final UnsafeShuffleWriter writer = new UnsafeShuffleWriter( + final UnsafeShuffleWriter writer = new UnsafeShuffleWriter<>( blockManager, taskMemoryManager, new SerializedShuffleHandle<>(0, 1, shuffleDep), diff --git a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala index 71a127bd4b9f..e3cfec90ee81 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/HistoryServerSuite.scala @@ -26,7 +26,6 @@ import javax.servlet.http.{HttpServletRequest, HttpServletRequestWrapper, HttpSe import scala.collection.JavaConverters._ import scala.concurrent.duration._ -import com.gargoylesoftware.htmlunit.BrowserVersion import com.google.common.io.{ByteStreams, Files} import org.apache.commons.io.{FileUtils, IOUtils} import org.apache.hadoop.fs.{FileStatus, FileSystem, Path} @@ -365,8 +364,7 @@ class HistoryServerSuite extends SparkFunSuite with BeforeAndAfter with Matchers contextHandler.addServlet(holder, "/") server.attachHandler(contextHandler) - implicit val webDriver: WebDriver = - new HtmlUnitDriver(BrowserVersion.INTERNET_EXPLORER_11, true) + implicit val webDriver: WebDriver = new HtmlUnitDriver(true) try { val url = s"http://localhost:$port" diff --git a/core/src/test/scala/org/apache/spark/util/KeyLockSuite.scala b/core/src/test/scala/org/apache/spark/util/KeyLockSuite.scala index 2169a0e4d442..6888e492a8d3 100644 --- a/core/src/test/scala/org/apache/spark/util/KeyLockSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/KeyLockSuite.scala @@ -49,7 +49,7 @@ class KeyLockSuite extends SparkFunSuite with TimeLimits { @volatile var e: Throwable = null val threads = (0 until numThreads).map { i => new Thread() { - override def run(): Unit = try { + override def run(): Unit = { latch.await(foreverMs, TimeUnit.MILLISECONDS) keyLock.withLock(keys(i)) { var cur = numThreadsHoldingLock.get() diff --git a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala index 1d2e8acff9a3..6c694804e451 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala @@ -272,7 +272,8 @@ class ExecutorClassLoaderSuite assert(e.getMessage.contains("ThisIsAClassName")) // RemoteClassLoaderError must not be LinkageError nor ClassNotFoundException. Otherwise, // JVM will cache it and doesn't retry to load a class. - assert(!e.isInstanceOf[LinkageError] && !e.isInstanceOf[ClassNotFoundException]) + assert(!(classOf[LinkageError].isAssignableFrom(e.getClass))) + assert(!(classOf[ClassNotFoundException].isAssignableFrom(e.getClass))) } finally { rpcEnv.shutdown() rpcEnv.awaitTermination() diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala index fbaa5527a705..828706743c5b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala @@ -680,12 +680,12 @@ class TreeNodeSuite extends SparkFunSuite with SQLHelper { } test("clone") { - def assertDifferentInstance(before: AnyRef, after: AnyRef): Unit = { + def assertDifferentInstance[T <: TreeNode[T]](before: TreeNode[T], after: TreeNode[T]): Unit = { assert(before.ne(after) && before == after) - before.asInstanceOf[TreeNode[_]].children.zip( - after.asInstanceOf[TreeNode[_]].children).foreach { - case (beforeChild: AnyRef, afterChild: AnyRef) => - assertDifferentInstance(beforeChild, afterChild) + before.children.zip(after.children).foreach { case (beforeChild, afterChild) => + assertDifferentInstance( + beforeChild.asInstanceOf[TreeNode[T]], + afterChild.asInstanceOf[TreeNode[T]]) } }