diff --git a/assembly/pom.xml b/assembly/pom.xml
index 604b1ab3de6a8..5ec9da22ae83f 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -141,7 +141,9 @@
com.google.common.**
- com.google.common.base.Optional**
+ com/google/common/base/Absent*
+ com/google/common/base/Optional*
+ com/google/common/base/Present*
diff --git a/core/pom.xml b/core/pom.xml
index 2a81f6df289c0..e012c5e673b74 100644
--- a/core/pom.xml
+++ b/core/pom.xml
@@ -343,7 +343,9 @@
com.google.guava:guava
+ com/google/common/base/Absent*
com/google/common/base/Optional*
+ com/google/common/base/Present*
diff --git a/core/src/test/java/org/apache/spark/JavaAPISuite.java b/core/src/test/java/org/apache/spark/JavaAPISuite.java
index b8574dfb42e6b..b8c23d524e00b 100644
--- a/core/src/test/java/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java
@@ -1307,4 +1307,30 @@ public void collectUnderlyingScalaRDD() {
SomeCustomClass[] collected = (SomeCustomClass[]) rdd.rdd().retag(SomeCustomClass.class).collect();
Assert.assertEquals(data.size(), collected.length);
}
+
+ /**
+ * Test for SPARK-3647. This test needs to use the maven-built assembly to trigger the issue,
+ * since that's the only artifact where Guava classes have been relocated.
+ */
+ @Test
+ public void testGuavaOptional() {
+ // Stop the context created in setUp() and start a local-cluster one, to force usage of the
+ // assembly.
+ sc.stop();
+ JavaSparkContext localCluster = new JavaSparkContext("local-cluster[1,1,512]", "JavaAPISuite");
+ try {
+ JavaRDD rdd1 = localCluster.parallelize(Arrays.asList(1, 2, null), 3);
+ JavaRDD> rdd2 = rdd1.map(
+ new Function>() {
+ @Override
+ public Optional call(Integer i) {
+ return Optional.fromNullable(i);
+ }
+ });
+ rdd2.collect();
+ } finally {
+ localCluster.stop();
+ }
+ }
+
}