Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,13 @@

package org.apache.spark.sql.catalyst

import java.net.URLClassLoader
import java.sql.{Date, Timestamp}

import scala.reflect.runtime.universe.typeOf

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.expressions.{BoundReference, Literal, SpecificInternalRow}
import org.apache.spark.sql.catalyst.expressions.objects.NewInstance
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils

case class PrimitiveData(
intField: Int,
Expand Down Expand Up @@ -339,39 +335,4 @@ class ScalaReflectionSuite extends SparkFunSuite {
assert(linkedHashMapDeserializer.dataType == ObjectType(classOf[LHMap[_, _]]))
}

private val dataTypeForComplexData = dataTypeFor[ComplexData]
private val typeOfComplexData = typeOf[ComplexData]

Seq(
("mirror", () => mirror),
("dataTypeFor", () => dataTypeFor[ComplexData]),
("constructorFor", () => deserializerFor[ComplexData]),
("extractorsFor", {
val inputObject = BoundReference(0, dataTypeForComplexData, nullable = false)
() => serializerFor[ComplexData](inputObject)
}),
("getConstructorParameters(cls)", () => getConstructorParameters(classOf[ComplexData])),
("getConstructorParameterNames", () => getConstructorParameterNames(classOf[ComplexData])),
("getClassFromType", () => getClassFromType(typeOfComplexData)),
("schemaFor", () => schemaFor[ComplexData]),
("localTypeOf", () => localTypeOf[ComplexData]),
("getClassNameFromType", () => getClassNameFromType(typeOfComplexData)),
("getParameterTypes", () => getParameterTypes(() => ())),
("getConstructorParameters(tpe)", () => getClassNameFromType(typeOfComplexData))).foreach {
case (name, exec) =>
test(s"SPARK-13640: thread safety of ${name}") {
(0 until 100).foreach { _ =>
val loader = new URLClassLoader(Array.empty, Utils.getContextOrSparkClassLoader)
(0 until 10).par.foreach { _ =>
val cl = Thread.currentThread.getContextClassLoader
try {
Thread.currentThread.setContextClassLoader(loader)
exec()
} finally {
Thread.currentThread.setContextClassLoader(cl)
}
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ object ExtractPythonUDFs extends Rule[SparkPlan] with PredicateHelper {
val validUdfs = udfs.filter { udf =>
// Check to make sure that the UDF can be evaluated with only the input of this child.
udf.references.subsetOf(child.outputSet)
}.toArray
}
if (validUdfs.nonEmpty) {
val resultAttrs = udfs.zipWithIndex.map { case (u, i) =>
AttributeReference(s"pythonUDF$i", u.dataType)()
Expand Down