Skip to content

Commit a74c793

Browse files
committed
remove ForTailCacheReader
1 parent 45f4827 commit a74c793

File tree

2 files changed

+3
-8
lines changed
  • sql

2 files changed

+3
-8
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3613,7 +3613,7 @@ class SQLConf extends Serializable with Logging {
36133613
def parquetVectorizedReaderEnabled: Boolean = getConf(PARQUET_VECTORIZED_READER_ENABLED)
36143614

36153615
def parquetVectorizedReaderBatchSize: Int = getConf(PARQUET_VECTORIZED_READER_BATCH_SIZE)
3616-
3616+
36173617
def fileMetaCacheOrcEnabled: Boolean = getConf(FILE_META_CACHE_ORC_ENABLED)
36183618

36193619
def columnBatchSize: Int = getConf(COLUMN_BATCH_SIZE)

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFileMeta.scala

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@ private[sql] object OrcFileMeta {
3636
def apply(path: Path, conf: Configuration): OrcFileMeta = {
3737
val fs = path.getFileSystem(conf)
3838
val readerOptions = OrcFile.readerOptions(conf).filesystem(fs)
39-
Utils.tryWithResource(new ForTailCacheReader(path, readerOptions)) { fileReader =>
40-
new OrcFileMeta(fileReader.getOrcTail)
39+
Utils.tryWithResource(new ReaderImpl(path, readerOptions)) { fileReader =>
40+
new OrcFileMeta(new OrcTail(fileReader.getFileTail, fileReader.getSerializedFileFooter))
4141
}
4242
}
4343

@@ -47,8 +47,3 @@ private[sql] object OrcFileMeta {
4747
def readTailFromCache(key: OrcFileMetaKey): OrcTail =
4848
FileMetaCacheManager.get(key).asInstanceOf[OrcFileMeta].tail
4949
}
50-
51-
private[sql] class ForTailCacheReader(path: Path, options: OrcFile.ReaderOptions)
52-
extends ReaderImpl(path, options) {
53-
def getOrcTail: OrcTail = tail
54-
}

0 commit comments

Comments
 (0)