File tree Expand file tree Collapse file tree 2 files changed +3
-8
lines changed
catalyst/src/main/scala/org/apache/spark/sql/internal
core/src/main/scala/org/apache/spark/sql/execution/datasources/orc Expand file tree Collapse file tree 2 files changed +3
-8
lines changed Original file line number Diff line number Diff line change @@ -3613,7 +3613,7 @@ class SQLConf extends Serializable with Logging {
36133613 def parquetVectorizedReaderEnabled : Boolean = getConf(PARQUET_VECTORIZED_READER_ENABLED )
36143614
36153615 def parquetVectorizedReaderBatchSize : Int = getConf(PARQUET_VECTORIZED_READER_BATCH_SIZE )
3616-
3616+
36173617 def fileMetaCacheOrcEnabled : Boolean = getConf(FILE_META_CACHE_ORC_ENABLED )
36183618
36193619 def columnBatchSize : Int = getConf(COLUMN_BATCH_SIZE )
Original file line number Diff line number Diff line change @@ -36,8 +36,8 @@ private[sql] object OrcFileMeta {
3636 def apply (path : Path , conf : Configuration ): OrcFileMeta = {
3737 val fs = path.getFileSystem(conf)
3838 val readerOptions = OrcFile .readerOptions(conf).filesystem(fs)
39- Utils .tryWithResource(new ForTailCacheReader (path, readerOptions)) { fileReader =>
40- new OrcFileMeta (fileReader.getOrcTail )
39+ Utils .tryWithResource(new ReaderImpl (path, readerOptions)) { fileReader =>
40+ new OrcFileMeta (new OrcTail ( fileReader.getFileTail, fileReader.getSerializedFileFooter) )
4141 }
4242 }
4343
@@ -47,8 +47,3 @@ private[sql] object OrcFileMeta {
4747 def readTailFromCache (key : OrcFileMetaKey ): OrcTail =
4848 FileMetaCacheManager .get(key).asInstanceOf [OrcFileMeta ].tail
4949}
50-
51- private [sql] class ForTailCacheReader (path : Path , options : OrcFile .ReaderOptions )
52- extends ReaderImpl (path, options) {
53- def getOrcTail : OrcTail = tail
54- }
You can’t perform that action at this time.
0 commit comments