Skip to content

Commit 72eae96

Browse files
author
brandonlin
committed
add new config to enable the function
1 parent 083fe21 commit 72eae96

File tree

3 files changed

+27
-22
lines changed

3 files changed

+27
-22
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3142,6 +3142,14 @@ object SQLConf {
31423142
.booleanConf
31433143
.createWithDefault(false)
31443144

3145+
val ENABLE_SUBDIRECTORY_SUPPORT_WITH_NON_PARTITIONED_TABLE =
3146+
buildConf("spark.sql.nonPartitionedTable.subdirectory.read.enabled")
3147+
.doc("When set to true, Spark SQL could read the files of " +
3148+
" Non-partitioned hive table from subdirectories under root path of table")
3149+
.version("3.2.0")
3150+
.booleanConf
3151+
.createWithDefault(false)
3152+
31453153
/**
31463154
* Holds information about keys that have been deprecated.
31473155
*
@@ -3829,6 +3837,9 @@ class SQLConf extends Serializable with Logging {
38293837

38303838
def legacyIntervalEnabled: Boolean = getConf(LEGACY_INTERVAL_ENABLED)
38313839

3840+
def nonPartitionedTableSubDirectoryReadSupport: Boolean =
3841+
getConf(ENABLE_SUBDIRECTORY_SUPPORT_WITH_NON_PARTITIONED_TABLE)
3842+
38323843
/** ********************** SQLConf functionality methods ************ */
38333844

38343845
/** Set Spark SQL configuration properties. */

sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -280,10 +280,7 @@ private[hive] class HiveMetastoreCatalog(sparkSession: SparkSession) extends Log
280280

281281
private def getDirectoryPathSeq(rootPath: Path): Seq[String] = {
282282
val enableSupportSubDirectories =
283-
sparkSession.sparkContext.
284-
hadoopConfiguration.getBoolean("hive.mapred.supports.subdirectories", false) &&
285-
sparkSession.sparkContext.
286-
hadoopConfiguration.getBoolean("mapred.input.dir.recursive", false)
283+
sparkSession.sessionState.conf.nonPartitionedTableSubDirectoryReadSupport
287284

288285
if (enableSupportSubDirectories) {
289286
val fs = rootPath.getFileSystem(sparkSession.sessionState.newHadoopConf())

sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveMetastoreCatalogSuite.scala

Lines changed: 15 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -367,28 +367,25 @@ class DataSourceWithHiveMetastoreCatalogSuite
367367
test("SPARK-28098: allow reader could read files from subdirectories") {
368368
withTempPath(dir => {
369369
withTable("dirTest") {
370-
val testData = java.util.Arrays.asList(Row(1), Row(2), Row(3), Row(4), Row(5))
370+
withSQLConf(
371+
"spark.sql.nonPartitionedTable.subdirectory.read.enabled" -> "true") {
372+
val testData = java.util.Arrays.asList(Row(1), Row(2), Row(3), Row(4), Row(5))
371373

372-
spark.sparkContext
373-
.hadoopConfiguration.set("hive.mapred.supports.subdirectories", "true")
374+
val dataFrame = spark.sqlContext
375+
.createDataFrame(testData, StructType(Seq(StructField("val", IntegerType))))
374376

375-
spark.sparkContext
376-
.hadoopConfiguration.set("mapred.input.dir.recursive", "true")
377-
378-
val dataFrame = spark.sqlContext
379-
.createDataFrame(testData, StructType(Seq(StructField("val", IntegerType))))
380-
381-
dataFrame
382-
.coalesce(1)
383-
.write
384-
.mode(SaveMode.Overwrite)
385-
.format("orc")
386-
.save(s"${dir.getCanonicalPath}/sub1/sub2")
377+
dataFrame
378+
.coalesce(1)
379+
.write
380+
.mode(SaveMode.Overwrite)
381+
.format("orc")
382+
.save(s"${dir.getCanonicalPath}/sub1/sub2")
387383

388-
spark.sql("CREATE EXTERNAL TABLE dirTest (val INT)" +
389-
s" STORED AS ORC LOCATION '${dir.toURI}'")
384+
spark.sql("CREATE EXTERNAL TABLE dirTest (val INT)" +
385+
s" STORED AS ORC LOCATION '${dir.toURI}'")
390386

391-
checkAnswer(spark.sql("select * from dirTest"), dataFrame)
387+
checkAnswer(spark.sql("select * from dirTest"), dataFrame)
388+
}
392389
}
393390
})
394391
}

0 commit comments

Comments
 (0)