@@ -19,7 +19,11 @@ package org.apache.spark.sql.hive
1919
2020import java .io .File
2121
22+ import org .scalatest .BeforeAndAfter
23+
2224import com .google .common .io .Files
25+
26+ import org .apache .spark .sql .execution .QueryExecutionException
2327import org .apache .spark .sql .{QueryTest , _ }
2428import org .apache .spark .sql .hive .test .TestHive
2529import org .apache .spark .sql .types ._
@@ -29,12 +33,19 @@ import org.apache.spark.sql.hive.test.TestHive._
2933
3034case class TestData (key : Int , value : String )
3135
32- class InsertIntoHiveTableSuite extends QueryTest {
36+ class InsertIntoHiveTableSuite extends QueryTest with BeforeAndAfter {
3337 import org .apache .spark .sql .hive .test .TestHive .implicits ._
3438
3539 val testData = TestHive .sparkContext.parallelize(
3640 (1 to 100 ).map(i => TestData (i, i.toString)))
37- testData.registerTempTable(" testData" )
41+
42+ before {
43+ // Since every we are doing tests for DDL statements,
44+ // it is better to reset before every test.
45+ TestHive .reset()
46+ // Register the testData, which will be used in every test.
47+ testData.registerTempTable(" testData" )
48+ }
3849
3950 test(" insertInto() HiveTable" ) {
4051 sql(" CREATE TABLE createAndInsertTest (key int, value string)" )
@@ -70,9 +81,11 @@ class InsertIntoHiveTableSuite extends QueryTest {
7081 test(" Double create fails when allowExisting = false" ) {
7182 sql(" CREATE TABLE doubleCreateAndInsertTest (key int, value string)" )
7283
73- intercept[org.apache.hadoop.hive.ql.metadata. HiveException ] {
84+ val message = intercept[QueryExecutionException ] {
7485 sql(" CREATE TABLE doubleCreateAndInsertTest (key int, value string)" )
75- }
86+ }.getMessage
87+
88+ println(" message!!!!" + message)
7689 }
7790
7891 test(" Double create does not fail when allowExisting = true" ) {
0 commit comments