Skip to content

Commit

Permalink
fix test framework concerning null tests (#151)
Browse files Browse the repository at this point in the history
  • Loading branch information
birdstorm authored Dec 22, 2017
1 parent 477b254 commit 2f8265c
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,9 @@ class SparkWrapper() extends LazyLogging {
df.collect().map(row => {
val rowRes = ArrayBuffer.empty[Any]
for (i <- 0 until row.length) {
if (schema(i).dataType.isInstanceOf[BinaryType]) {
if (row.get(i) == null) {
rowRes += null
} else if (schema(i).dataType.isInstanceOf[BinaryType]) {
rowRes += new String(row.get(i).asInstanceOf[Array[Byte]])
} else {
rowRes += toOutput(row.get(i), schema(i).dataType.typeName)
Expand Down
17 changes: 13 additions & 4 deletions integtest/src/main/scala/com/pingcap/spark/TestCase.scala
Original file line number Diff line number Diff line change
Expand Up @@ -232,28 +232,36 @@ class TestCase(val prop: Properties) extends LazyLogging {
}
}

def checkIgnore(value: Any, str: String): Boolean = {
if (value == null) {
false
} else {
value.toString.contains(str)
}
}

def checkSparkIgnore(tiSpark: List[List[Any]]): Boolean = {
val ignoreCase = SparkIgnore ++ TiDBIgnore
tiSpark.exists(
(row: List[Any]) => row.exists(
(str: Any) => ignoreCase.exists(
(i: String) => str.toString.contains(i)
(i: String) => checkIgnore(str, i)
)))
}

def checkTiDBIgnore(tiDb: List[List[Any]]): Boolean = {
tiDb.exists(
(row: List[Any]) => row.exists(
(str: Any) => TiDBIgnore.exists(
(i: String) => str.toString.contains(i)
(i: String) => checkIgnore(str, i)
)))
}

def checkSparkJDBCIgnore(sparkJDBC: List[List[Any]]): Boolean = {
sparkJDBC.exists(
(row: List[Any]) => row.exists(
(str: Any) => SparkIgnore.exists(
(i: String) => str.toString.contains(i)
(i: String) => checkIgnore(str, i)
)))
}

Expand Down Expand Up @@ -612,7 +620,8 @@ class TestCase(val prop: Properties) extends LazyLogging {

private def testSql(dbName: String, sql: String): Unit = {
spark.init(dbName)
execSparkAndShow(sql)
spark_jdbc.init(dbName)
logger.info(if (execSparkBothAndJudge(sql)) "TEST FAILED." else "TEST PASSED.")
}

private def test(dbName: String, testCases: ArrayBuffer[(String, String)], compareNeeded: Boolean): Unit = {
Expand Down
4 changes: 1 addition & 3 deletions src/main/scala/org/apache/spark/sql/TiContext.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,12 @@

package org.apache.spark.sql


import com.pingcap.tikv.tools.RegionUtils

import com.pingcap.tikv.{TiConfiguration, TiSession}
import com.pingcap.tispark._
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging

import scala.collection.JavaConverters._

class TiContext(val session: SparkSession) extends Serializable with Logging {
Expand All @@ -35,7 +34,6 @@ class TiContext(val session: SparkSession) extends Serializable with Logging {

TiUtils.sessionInitialize(session, tiSession)


final val version: String = TiSparkVersion.version

class DebugTool {
Expand Down

0 comments on commit 2f8265c

Please sign in to comment.