Skip to content

Commit 63f7f2e

Browse files
committed
Merge remote-tracking branch 'upstream/master' into sc-5027
Conflicts: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/PartitioningUtils.scala sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
2 parents 51c1322 + e0deee1 commit 63f7f2e

File tree

58 files changed

+595
-226
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

58 files changed

+595
-226
lines changed

NOTICE

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -421,9 +421,6 @@ Copyright (c) 2011, Terrence Parr.
421421
This product includes/uses ASM (http://asm.ow2.org/),
422422
Copyright (c) 2000-2007 INRIA, France Telecom.
423423

424-
This product includes/uses org.json (http://www.json.org/java/index.html),
425-
Copyright (c) 2002 JSON.org
426-
427424
This product includes/uses JLine (http://jline.sourceforge.net/),
428425
Copyright (c) 2002-2006, Marc Prud'hommeaux <mwp1@cornell.edu>.
429426

core/src/test/scala/org/apache/spark/util/MutableURLClassLoaderSuite.scala

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ class MutableURLClassLoaderSuite extends SparkFunSuite with Matchers {
5151
assert(fakeClassVersion === "1")
5252
val fakeClass2 = classLoader.loadClass("FakeClass2").newInstance()
5353
assert(fakeClass.getClass === fakeClass2.getClass)
54+
classLoader.close()
55+
parentLoader.close()
5456
}
5557

5658
test("parent first") {
@@ -61,6 +63,8 @@ class MutableURLClassLoaderSuite extends SparkFunSuite with Matchers {
6163
assert(fakeClassVersion === "2")
6264
val fakeClass2 = classLoader.loadClass("FakeClass1").newInstance()
6365
assert(fakeClass.getClass === fakeClass2.getClass)
66+
classLoader.close()
67+
parentLoader.close()
6468
}
6569

6670
test("child first can fall back") {
@@ -69,6 +73,8 @@ class MutableURLClassLoaderSuite extends SparkFunSuite with Matchers {
6973
val fakeClass = classLoader.loadClass("FakeClass3").newInstance()
7074
val fakeClassVersion = fakeClass.toString
7175
assert(fakeClassVersion === "2")
76+
classLoader.close()
77+
parentLoader.close()
7278
}
7379

7480
test("child first can fail") {
@@ -77,20 +83,26 @@ class MutableURLClassLoaderSuite extends SparkFunSuite with Matchers {
7783
intercept[java.lang.ClassNotFoundException] {
7884
classLoader.loadClass("FakeClassDoesNotExist").newInstance()
7985
}
86+
classLoader.close()
87+
parentLoader.close()
8088
}
8189

8290
test("default JDK classloader get resources") {
8391
val parentLoader = new URLClassLoader(fileUrlsParent, null)
8492
val classLoader = new URLClassLoader(fileUrlsChild, parentLoader)
8593
assert(classLoader.getResources("resource1").asScala.size === 2)
8694
assert(classLoader.getResources("resource2").asScala.size === 1)
95+
classLoader.close()
96+
parentLoader.close()
8797
}
8898

8999
test("parent first get resources") {
90100
val parentLoader = new URLClassLoader(fileUrlsParent, null)
91101
val classLoader = new MutableURLClassLoader(fileUrlsChild, parentLoader)
92102
assert(classLoader.getResources("resource1").asScala.size === 2)
93103
assert(classLoader.getResources("resource2").asScala.size === 1)
104+
classLoader.close()
105+
parentLoader.close()
94106
}
95107

96108
test("child first get resources") {
@@ -103,6 +115,8 @@ class MutableURLClassLoaderSuite extends SparkFunSuite with Matchers {
103115

104116
res1.map(scala.io.Source.fromURL(_).mkString) should contain inOrderOnly
105117
("resource1Contents-child", "resource1Contents-parent")
118+
classLoader.close()
119+
parentLoader.close()
106120
}
107121

108122

dev/deps/spark-deps-hadoop-2.2

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,6 @@ jline-2.12.1.jar
103103
joda-time-2.9.3.jar
104104
jodd-core-3.5.2.jar
105105
jpam-1.1.jar
106-
json-20090211.jar
107106
json4s-ast_2.11-3.2.11.jar
108107
json4s-core_2.11-3.2.11.jar
109108
json4s-jackson_2.11-3.2.11.jar

dev/deps/spark-deps-hadoop-2.3

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,6 @@ jline-2.12.1.jar
108108
joda-time-2.9.3.jar
109109
jodd-core-3.5.2.jar
110110
jpam-1.1.jar
111-
json-20090211.jar
112111
json4s-ast_2.11-3.2.11.jar
113112
json4s-core_2.11-3.2.11.jar
114113
json4s-jackson_2.11-3.2.11.jar

dev/deps/spark-deps-hadoop-2.4

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,6 @@ jline-2.12.1.jar
108108
joda-time-2.9.3.jar
109109
jodd-core-3.5.2.jar
110110
jpam-1.1.jar
111-
json-20090211.jar
112111
json4s-ast_2.11-3.2.11.jar
113112
json4s-core_2.11-3.2.11.jar
114113
json4s-jackson_2.11-3.2.11.jar

dev/deps/spark-deps-hadoop-2.6

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,6 @@ jline-2.12.1.jar
116116
joda-time-2.9.3.jar
117117
jodd-core-3.5.2.jar
118118
jpam-1.1.jar
119-
json-20090211.jar
120119
json4s-ast_2.11-3.2.11.jar
121120
json4s-core_2.11-3.2.11.jar
122121
json4s-jackson_2.11-3.2.11.jar

dev/deps/spark-deps-hadoop-2.7

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,6 @@ jline-2.12.1.jar
116116
joda-time-2.9.3.jar
117117
jodd-core-3.5.2.jar
118118
jpam-1.1.jar
119-
json-20090211.jar
120119
json4s-ast_2.11-3.2.11.jar
121120
json4s-core_2.11-3.2.11.jar
122121
json4s-jackson_2.11-3.2.11.jar

examples/src/main/scala/org/apache/spark/examples/LocalFileLR.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,8 @@ object LocalFileLR {
5151

5252
showWarning()
5353

54-
val lines = scala.io.Source.fromFile(args(0)).getLines().toArray
54+
val fileSrc = scala.io.Source.fromFile(args(0))
55+
val lines = fileSrc.getLines().toArray
5556
val points = lines.map(parsePoint _)
5657
val ITERATIONS = args(1).toInt
5758

@@ -69,6 +70,7 @@ object LocalFileLR {
6970
w -= gradient
7071
}
7172

73+
fileSrc.close()
7274
println("Final w: " + w)
7375
}
7476
}

external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/JsonUtils.scala

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,6 @@
1717

1818
package org.apache.spark.sql.kafka010
1919

20-
import java.io.Writer
21-
2220
import scala.collection.mutable.HashMap
2321
import scala.util.control.NonFatal
2422

external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaSource.scala

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
package org.apache.spark.sql.kafka010
1919

2020
import java.{util => ju}
21+
import java.io._
22+
import java.nio.charset.StandardCharsets
2123

2224
import scala.collection.JavaConverters._
2325
import scala.util.control.NonFatal
@@ -114,7 +116,22 @@ private[kafka010] case class KafkaSource(
114116
* `KafkaConsumer.poll` may hang forever (KAFKA-1894).
115117
*/
116118
private lazy val initialPartitionOffsets = {
117-
val metadataLog = new HDFSMetadataLog[KafkaSourceOffset](sqlContext.sparkSession, metadataPath)
119+
val metadataLog =
120+
new HDFSMetadataLog[KafkaSourceOffset](sqlContext.sparkSession, metadataPath) {
121+
override def serialize(metadata: KafkaSourceOffset, out: OutputStream): Unit = {
122+
val bytes = metadata.json.getBytes(StandardCharsets.UTF_8)
123+
out.write(bytes.length)
124+
out.write(bytes)
125+
}
126+
127+
override def deserialize(in: InputStream): KafkaSourceOffset = {
128+
val length = in.read()
129+
val bytes = new Array[Byte](length)
130+
in.read(bytes)
131+
KafkaSourceOffset(SerializedOffset(new String(bytes, StandardCharsets.UTF_8)))
132+
}
133+
}
134+
118135
metadataLog.get(0).getOrElse {
119136
val offsets = startingOffsets match {
120137
case EarliestOffsets => KafkaSourceOffset(fetchEarliestOffsets())

0 commit comments

Comments
 (0)