@@ -34,28 +34,23 @@ class PruneHiveTablePartitionsSuite extends QueryTest with SQLTestUtils with Tes
3434
3535 test(" SPARK-15616 statistics pruned after going throuhg PruneHiveTablePartitions" ) {
3636 withTable(" test" , " temp" ) {
37- withTempDir { dir =>
38- sql(
39- s """
40- |CREATE EXTERNAL TABLE test(i int)
41- |PARTITIONED BY (p int )
42- |STORED AS textfile
43- |LOCATION ' ${dir.toURI} ' """ .stripMargin )
37+ sql(
38+ s """
39+ |CREATE TABLE test(i int)
40+ |PARTITIONED BY (p int)
41+ |STORED AS textfile """ .stripMargin )
42+ spark.range( 0 , 1000 , 1 ).selectExpr( " id as col " )
43+ .createOrReplaceTempView( " temp " )
4444
45- spark.range(0 , 1000 , 1 ).selectExpr(" id as col" )
46- .createOrReplaceTempView(" temp" )
47-
48- for (part <- Seq (1 , 2 , 3 , 4 )) {
49- sql(s """
50- |INSERT OVERWRITE TABLE test PARTITION (p=' $part')
51- |select col from temp """ .stripMargin)
52- }
53- val singlePartitionSizeInBytes = 3890
54- val analyzed1 = sql(" select i from test where p>0" ).queryExecution.analyzed
55- val analyzed2 = sql(" select i from test where p=1" ).queryExecution.analyzed
56- assert(Optimize .execute(analyzed1).stats.sizeInBytes === singlePartitionSizeInBytes* 4 * 12 / 16 )
57- assert(Optimize .execute(analyzed2).stats.sizeInBytes === singlePartitionSizeInBytes* 12 / 16 )
45+ for (part <- Seq (1 , 2 , 3 , 4 )) {
46+ sql(s """
47+ |INSERT OVERWRITE TABLE test PARTITION (p=' $part')
48+ |select col from temp """ .stripMargin)
5849 }
50+ val analyzed1 = sql(" select i from test where p>0" ).queryExecution.analyzed
51+ val analyzed2 = sql(" select i from test where p=1" ).queryExecution.analyzed
52+ assert(Optimize .execute(analyzed1).stats.sizeInBytes/ 4 ===
53+ Optimize .execute(analyzed2).stats.sizeInBytes)
5954 }
6055 }
6156}
0 commit comments