diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ResolvedDataSource.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ResolvedDataSource.scala index 01a381c11ac56..38eff3aaa2c74 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ResolvedDataSource.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ResolvedDataSource.scala @@ -228,7 +228,7 @@ object ResolvedDataSource extends Logging { sqlContext, fileCatalog, partitionSchema = partitionSchema, - dataSchema = dataSchema, + dataSchema = dataSchema.asNullable, bucketSpec = bucketSpec, format, options) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala index c85eeddc2c6d9..fe215afdecbca 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala @@ -437,8 +437,8 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSQLContext { readParquetFile(path.toString) { df => assertResult(df.schema) { StructType( - StructField("a", BooleanType, nullable = false) :: - StructField("b", IntegerType, nullable = false) :: + StructField("a", BooleanType, nullable = true) :: + StructField("b", IntegerType, nullable = true) :: Nil) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala index fa766e3b8deea..b3e146fba80be 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala @@ -27,7 +27,7 @@ import org.apache.spark.sql.internal.{SessionState, SQLConf} private[sql] class TestSQLContext(sc: SparkContext) extends SQLContext(sc) { self => def this() { - this(new SparkContext("local[1]", "test-sql-context", + this(new SparkContext("local[2]", "test-sql-context", new SparkConf().set("spark.sql.testkey", "true"))) } @@ -63,5 +63,5 @@ private[sql] object TestSQLContext { val overrideConfs: Map[String, String] = Map( // Fewer shuffle partitions to speed up testing. - SQLConf.SHUFFLE_PARTITIONS.key -> "1") + SQLConf.SHUFFLE_PARTITIONS.key -> "5") }