diff --git a/common/utils/src/main/scala/org/apache/spark/util/MavenUtils.scala b/common/utils/src/main/scala/org/apache/spark/util/MavenUtils.scala
index 65530b7fa473..08291859a32c 100644
--- a/common/utils/src/main/scala/org/apache/spark/util/MavenUtils.scala
+++ b/common/utils/src/main/scala/org/apache/spark/util/MavenUtils.scala
@@ -324,6 +324,14 @@ private[spark] object MavenUtils extends Logging {
val ivySettings: IvySettings = new IvySettings
try {
ivySettings.load(file)
+ if (ivySettings.getDefaultIvyUserDir == null && ivySettings.getDefaultCache == null) {
+ // To protect old Ivy-based systems like old Spark from Apache Ivy 2.5.2's incompatibility.
+ // `processIvyPathArg` can overwrite these later.
+ val alternateIvyDir = System.getProperty("ivy.home",
+ System.getProperty("user.home") + File.separator + ".ivy2.5.2")
+ ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
+ ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
+ }
} catch {
case e @ (_: IOException | _: ParseException) =>
throw new SparkException(s"Failed when loading Ivy settings from $settingsFile", e)
@@ -335,10 +343,13 @@ private[spark] object MavenUtils extends Logging {
/* Set ivy settings for location of cache, if option is supplied */
private def processIvyPathArg(ivySettings: IvySettings, ivyPath: Option[String]): Unit = {
- ivyPath.filterNot(_.trim.isEmpty).foreach { alternateIvyDir =>
- ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
- ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
+ val alternateIvyDir = ivyPath.filterNot(_.trim.isEmpty).getOrElse {
+ // To protect old Ivy-based systems like old Spark from Apache Ivy 2.5.2's incompatibility.
+ System.getProperty("ivy.home",
+ System.getProperty("user.home") + File.separator + ".ivy2.5.2")
}
+ ivySettings.setDefaultIvyUserDir(new File(alternateIvyDir))
+ ivySettings.setDefaultCache(new File(alternateIvyDir, "cache"))
}
/* Add any optional additional remote repositories */
diff --git a/common/utils/src/test/scala/org/apache/spark/util/IvyTestUtils.scala b/common/utils/src/test/scala/org/apache/spark/util/IvyTestUtils.scala
index 50312646bdb7..76062074edca 100644
--- a/common/utils/src/test/scala/org/apache/spark/util/IvyTestUtils.scala
+++ b/common/utils/src/test/scala/org/apache/spark/util/IvyTestUtils.scala
@@ -374,7 +374,8 @@ private[spark] object IvyTestUtils {
f(repo.toURI.toString)
} finally {
// Clean up
- if (repo.toString.contains(".m2") || repo.toString.contains(".ivy2")) {
+ if (repo.toString.contains(".m2") || repo.toString.contains(".ivy2") ||
+ repo.toString.contains(".ivy2.5.2")) {
val groupDir = getBaseGroupDirectory(artifact, useIvyLayout)
FileUtils.deleteDirectory(new File(repo, groupDir + File.separator + artifact.artifactId))
deps.foreach { _.foreach { dep =>
diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index 7c8cfc9f208f..0b026a888e88 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -2491,10 +2491,10 @@ package object config {
.doc("Path to specify the Ivy user directory, used for the local Ivy cache and " +
"package files from spark.jars.packages. " +
"This will override the Ivy property ivy.default.ivy.user.dir " +
- "which defaults to ~/.ivy2.")
+ "which defaults to ~/.ivy2.5.2")
.version("1.3.0")
.stringConf
- .createOptional
+ .createWithDefault("~/.ivy2.5.2")
private[spark] val JAR_IVY_SETTING_PATH =
ConfigBuilder(MavenUtils.JAR_IVY_SETTING_PATH_KEY)
diff --git a/dev/deps/spark-deps-hadoop-3-hive-2.3 b/dev/deps/spark-deps-hadoop-3-hive-2.3
index 97205011e265..bac74d4214d4 100644
--- a/dev/deps/spark-deps-hadoop-3-hive-2.3
+++ b/dev/deps/spark-deps-hadoop-3-hive-2.3
@@ -102,7 +102,7 @@ httpcore/4.4.16//httpcore-4.4.16.jar
icu4j/72.1//icu4j-72.1.jar
ini4j/0.5.4//ini4j-0.5.4.jar
istack-commons-runtime/3.0.8//istack-commons-runtime-3.0.8.jar
-ivy/2.5.1//ivy-2.5.1.jar
+ivy/2.5.2//ivy-2.5.2.jar
jackson-annotations/2.16.1//jackson-annotations-2.16.1.jar
jackson-core-asl/1.9.13//jackson-core-asl-1.9.13.jar
jackson-core/2.16.1//jackson-core-2.16.1.jar
diff --git a/dev/run-tests.py b/dev/run-tests.py
index 3fe79af58d71..eb760139f9b6 100755
--- a/dev/run-tests.py
+++ b/dev/run-tests.py
@@ -478,6 +478,8 @@ def main():
rm_r(os.path.join(SPARK_HOME, "work"))
rm_r(os.path.join(USER_HOME, ".ivy2", "local", "org.apache.spark"))
rm_r(os.path.join(USER_HOME, ".ivy2", "cache", "org.apache.spark"))
+ rm_r(os.path.join(USER_HOME, ".ivy2.5.2", "local", "org.apache.spark"))
+ rm_r(os.path.join(USER_HOME, ".ivy2.5.2", "cache", "org.apache.spark"))
os.environ["CURRENT_BLOCK"] = str(ERROR_CODES["BLOCK_GENERAL"])
diff --git a/docs/core-migration-guide.md b/docs/core-migration-guide.md
index 26e6b0f1f444..3adfbeca8fd9 100644
--- a/docs/core-migration-guide.md
+++ b/docs/core-migration-guide.md
@@ -36,6 +36,8 @@ license: |
- Since Spark 4.0, Spark uses `ReadWriteOncePod` instead of `ReadWriteOnce` access mode in persistence volume claims. To restore the legacy behavior, you can set `spark.kubernetes.legacy.useReadWriteOnceAccessMode` to `true`.
+- Since Spark 4.0, Spark uses `~/.ivy2.5.2` as Ivy user directory by default to isolate the existing systems from Apache Ivy's incompatibility. To restore the legacy behavior, you can set `spark.jars.ivy` to `~/.ivy2`.
+
## Upgrading from Core 3.4 to 3.5
- Since Spark 3.5, `spark.yarn.executor.failuresValidityInterval` is deprecated. Use `spark.executor.failuresValidityInterval` instead.
diff --git a/pom.xml b/pom.xml
index 124552a42ff7..2ee31bd63650 100644
--- a/pom.xml
+++ b/pom.xml
@@ -146,11 +146,7 @@
10.0.19
4.0.3
0.10.0
-
- 2.5.1
+ 2.5.2
2.0.8