diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index ea49991493fd..241025e828cc 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -23,7 +23,7 @@ import java.net._ import java.nio.ByteBuffer import java.nio.channels.Channels import java.nio.charset.StandardCharsets -import java.nio.file.Files +import java.nio.file.{Files, Paths} import java.util.{Locale, Properties, Random, UUID} import java.util.concurrent._ import javax.net.ssl.HttpsURLConnection @@ -946,15 +946,7 @@ private[spark] object Utils extends Logging { * Check to see if file is a symbolic link. */ def isSymlink(file: File): Boolean = { - if (file == null) throw new NullPointerException("File must not be null") - if (isWindows) return false - val fileInCanonicalDir = if (file.getParent() == null) { - file - } else { - new File(file.getParentFile().getCanonicalFile(), file.getName()) - } - - !fileInCanonicalDir.getCanonicalFile().equals(fileInCanonicalDir.getAbsoluteFile()) + return Files.isSymbolicLink(Paths.get(file.toURI)) } /** diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala index 271897699201..26e047f6b368 100644 --- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala @@ -575,8 +575,13 @@ class SparkSubmitSuite // NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly. private def runSparkSubmit(args: Seq[String]): Unit = { val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")) + val sparkSubmitFile = if (Utils.isWindows) { + new File("..\\bin\\spark-submit.cmd") + } else { + new File("../bin/spark-submit") + } val process = Utils.executeCommand( - Seq("./bin/spark-submit") ++ args, + Seq(sparkSubmitFile.getCanonicalPath) ++ args, new File(sparkHome), Map("SPARK_TESTING" -> "1", "SPARK_HOME" -> sparkHome)) diff --git a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala index 39c5857b1345..8e2e9c10419b 100644 --- a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala +++ b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala @@ -127,6 +127,8 @@ class FsHistoryProviderSuite extends SparkFunSuite with BeforeAndAfter with Matc } test("SPARK-3697: ignore directories that cannot be read.") { + // setReadable doesn't work on windows for directories + assume(!Utils.isWindows) val logFile1 = newLogFile("new1", None, inProgress = false) writeFile(logFile1, true, None, SparkListenerApplicationStart("app1-1", Some("app1-1"), 1L, "test", None), diff --git a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala index 9ed5016510d5..9e6b02b9eac4 100644 --- a/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala +++ b/core/src/test/scala/org/apache/spark/storage/DiskStoreSuite.scala @@ -22,10 +22,14 @@ import java.util.Arrays import org.apache.spark.{SparkConf, SparkFunSuite} import org.apache.spark.util.io.ChunkedByteBuffer +import org.apache.spark.util.Utils class DiskStoreSuite extends SparkFunSuite { test("reads of memory-mapped and non memory-mapped files are equivalent") { + // It will cause error when we tried to re-open the filestore and the + // memory-mapped byte buffer tot he file has not been GC on Windows. + assume(!Utils.isWindows) val confKey = "spark.storage.memoryMapThreshold" // Create a non-trivial (not all zeros) byte array diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala index 4aa4854c36f3..0ed866a74b58 100644 --- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala @@ -686,7 +686,9 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging { val stream = new java.io.PrintStream(buffer, true, "UTF-8") // scalastyle:off println - stream.println("test circular test circular test circular test circular test circular") + // Note: println will append '\r\n' in windows, + // so there will be one more byte comparing to Unix/Linux. + stream.print("test circular test circular test circular test circular test circular\n") // scalastyle:on println assert(buffer.toString === "t circular test circular\n") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala index cb88a1c83c99..24b0014188bc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala @@ -67,6 +67,8 @@ class CreateTableAsSelectSuite extends DataSourceTest with SharedSQLContext with } test("CREATE TEMPORARY TABLE AS SELECT based on the file without write permission") { + // setWritable(boolean) doesn't work on Windows + assume(!Utils.isWindows) val childPath = new File(path.toString, "child") path.mkdir() childPath.createNewFile()