Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add index related integration tests #139

Merged
merged 14 commits into from
Dec 21, 2017
6 changes: 5 additions & 1 deletion integtest/_env.sh
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ create_conf_load() {
echo "create conf for loading data..."
create_conf_db_options
echo "test.mode=Load" >> ${BASE_CONF}
echo "test.ignore=tpch,tpch_TEST,test_index" >> ${BASE_CONF}
echo "test.ignore=tpch,tpch_TEST" >> ${BASE_CONF}

cp ${BASE_CONF} ${TISPARK_CONF}
}
Expand Down Expand Up @@ -229,6 +229,10 @@ load_DAG_Table() {
mysql -h ${tidb_addr} -P ${tidb_port} -u "${tidb_user}" < ./testcases/tispark_test/TisparkTest.sql
}

load_Index_Table() {
mysql -h ${tidb_addr} -P ${tidb_port} -u "${tidb_user}" < ./testcases/test_index/testIndex.sql
}

rename_result_files_no_tpch() {
for f in ./*.jdbc; do
[ -e "$f" ] && mv "$f" "${f/inlineTest/TestNoTPCH}"
Expand Down
37 changes: 18 additions & 19 deletions integtest/src/main/scala/com/pingcap/spark/DAGTestCase.scala
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,8 @@ class DAGTestCase(prop: Properties) extends TestCase(prop) {
"select tp_date,tp_datetime from full_data_type_table where tp_date = tp_datetime order by id_dt limit 20",
"select tp_date,tp_datetime from full_data_type_table where tp_date < tp_datetime order by id_dt limit 20"
)
private val compareOpList = List("=", "<", ">", "<=", ">=", "!=", "<>")
private val arithmeticOpList = List("+", "-", "*", "/", "%")
private val LEFT_TB_NAME = "A"
private val RIGHT_TB_NAME = "B"
private val TABLE_NAME = "full_data_type_table"
private val LITERAL_NULL = "null"
private val SCALE_FACTOR = 4 * 4
private val ID_COL = "id_dt"
private val ARITHMETIC_CONSTANT = List[String](

protected val ARITHMETIC_CONSTANT: List[String] = List[String](
java.lang.Long.MAX_VALUE.toString,
java.lang.Long.MIN_VALUE.toString,
java.lang.Double.MAX_VALUE.toString,
Expand All @@ -36,14 +29,19 @@ class DAGTestCase(prop: Properties) extends TestCase(prop) {
java.lang.Byte.MAX_VALUE.toString,
java.lang.Byte.MIN_VALUE.toString,
"0",
"2017",
BigDecimal.apply(2147868.65536).toString() // Decimal value
)
private val PLACE_HOLDER = List[String](
protected val PLACE_HOLDER: List[String] = List[String](
LITERAL_NULL, // Null
"'PingCAP'", // a simple test string
"'2043-11-28'",
"'2017-09-07 11:11:11'"
"'2017-11-02'",
"'2017-10-30'",
"'2017-09-07 11:11:11'",
"'2017-11-02 08:47:43'",
"'fYfSp'"
) ++ ARITHMETIC_CONSTANT

private var colList: List[String] = _

// TODO: Eliminate these bugs
Expand Down Expand Up @@ -122,7 +120,7 @@ class DAGTestCase(prop: Properties) extends TestCase(prop) {
}
}
result = !result
logger.warn("Result: Total DAG test run:" + inlineSQLNumber + " of " + list.size)
logger.warn(s"Result: Total DAG test run: ${list.size - testsSkipped} of ${list.size}")
logger.warn(s"Result: Test ignored count:$testsSkipped, failed count:$testsFailed")
}

Expand Down Expand Up @@ -277,14 +275,15 @@ class DAGTestCase(prop: Properties) extends TestCase(prop) {
skipLocalSet.add("tp_nvarchar")
skipLocalSet.add("tp_varchar")
skipLocalSet.add("tp_char")
skipLocalSet.add("tp_year")

val arithmeticSkipSet = mutable.Set[String]()
arithmeticSkipSet.add("int")
arithmeticSkipSet.add("float")
arithmeticSkipSet.add("decimal")
arithmeticSkipSet.add("double")
arithmeticSkipSet.add("real")
arithmeticSkipSet.add("bit")
arithmeticSkipSet.add("tp_int")
arithmeticSkipSet.add("tp_float")
arithmeticSkipSet.add("tp_decimal")
arithmeticSkipSet.add("tp_double")
arithmeticSkipSet.add("tp_real")
arithmeticSkipSet.add("tp_bit")
arithmeticSkipSet.add(ID_COL)

for (op <- compareOpList) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,4 +75,8 @@ class SparkJDBCWrapper(prop: Properties) extends SparkWrapper {

dfData(df, schema)
}

override def close(): Unit = {
spark_jdbc.close()
}
}
4 changes: 4 additions & 0 deletions integtest/src/main/scala/com/pingcap/spark/SparkWrapper.scala
Original file line number Diff line number Diff line change
Expand Up @@ -77,4 +77,8 @@ class SparkWrapper() extends LazyLogging {

dfData(df, schema)
}

def close(): Unit = {
spark.close()
}
}
38 changes: 27 additions & 11 deletions integtest/src/main/scala/com/pingcap/spark/TestCase.scala
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,10 @@ class TestCase(val prop: Properties) extends LazyLogging {
"only support precision",
"Invalid Flag type for TimestampType: 8",
"Invalid Flag type for DateTimeType: 8",
"Decimal scale (18) cannot be greater than precision "
"Decimal scale (18) cannot be greater than precision ",
"0E-11", // unresolvable precision fault
"overflows",
"2017-01-01" // timestamp error
// "unknown error Other"
// "Error converting access pointsnull"
)
Expand All @@ -83,13 +86,22 @@ class TestCase(val prop: Properties) extends LazyLogging {
// "line 1 column 13 near"
)

logger.info("Databases to dump: " + dbNames.mkString(","))
logger.info("Run Mode: " + mode)
logger.info("basePath: " + basePath)
logger.info("use these DataBases only: " + (if (dbAssigned) useDatabase.head else "None"))
protected val compareOpList = List("=", "<", ">", "<=", ">=", "!=", "<>")
protected val arithmeticOpList = List("+", "-", "*", "/", "%")
protected val LEFT_TB_NAME = "A"
protected val RIGHT_TB_NAME = "B"
protected val TABLE_NAME = "full_data_type_table"
protected val LITERAL_NULL = "null"
protected val SCALE_FACTOR: Integer = 4 * 4
protected val ID_COL = "id_dt"

def init(): Unit = {

logger.info("Databases to dump: " + dbNames.mkString(","))
logger.info("Run Mode: " + mode)
logger.info("basePath: " + basePath)
logger.info("use these DataBases only: " + (if (dbAssigned) useDatabase.head else "None"))

mode match {
case RunMode.Dump => dbNames.filter(!_.isEmpty).foreach { dbName =>
logger.info("Dumping database " + dbName)
Expand All @@ -101,17 +113,17 @@ class TestCase(val prop: Properties) extends LazyLogging {
jdbc.dumpAllTables(joinPath(basePath, dbName))
}

case RunMode.Load => work(basePath, false, true, true)
case RunMode.Load => work(basePath, run=false, load=true, compareNeeded=true)

case RunMode.Test => work(basePath, true, false, true)
case RunMode.Test => work(basePath, run=true, load=false, compareNeeded=true)

case RunMode.LoadNTest => work(basePath, true, true, true)
case RunMode.LoadNTest => work(basePath, run=true, load=true, compareNeeded=true)

case RunMode.TestIndex => work(basePath, true, false, false)
case RunMode.TestIndex => work(basePath, run=true, load=false, compareNeeded=false)

case RunMode.TestDAG => work(basePath, true, false, false)
case RunMode.TestDAG => work(basePath, run=true, load=false, compareNeeded=false)

case RunMode.SqlOnly => work(basePath, true, false, false)
case RunMode.SqlOnly => work(basePath, run=true, load=false, compareNeeded=false)
}

mode match {
Expand All @@ -123,6 +135,10 @@ class TestCase(val prop: Properties) extends LazyLogging {
+ " Tests skipped: " + testsSkipped)
case _ =>
}

jdbc.close()
spark.close()
spark_jdbc.close()
}

protected def work(parentPath: String, run: Boolean, load: Boolean, compareNeeded: Boolean): Unit = {
Expand Down
Loading