-
Notifications
You must be signed in to change notification settings - Fork 176
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat: Add manual test to calculate spark builtin functions coverage #263
Changes from 9 commits
f47fb0d
6694d04
0b7257f
88ad3e0
286ffa2
bd1c1a4
2518809
7378d56
322f69e
71a189e
8daec68
e163d74
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Large diffs are not rendered by default.
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
+-------+--------------------------------------------------------+---+ | ||
|result |details |cnt| | ||
+-------+--------------------------------------------------------+---+ | ||
|FAILED |Unsupported |282| | ||
|FAILED |Failed on native side |16 | | ||
|FAILED |Failed on something else. Check query manually |4 | | ||
|PASSED |OK |101| | ||
|SKIPPED|No examples found in spark.sessionState.functionRegistry|12 | | ||
+-------+--------------------------------------------------------+---+ |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,143 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one | ||
* or more contributor license agreements. See the NOTICE file | ||
* distributed with this work for additional information | ||
* regarding copyright ownership. The ASF licenses this file | ||
* to you under the Apache License, Version 2.0 (the | ||
* "License"); you may not use this file except in compliance | ||
* with the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, | ||
* software distributed under the License is distributed on an | ||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | ||
* KIND, either express or implied. See the License for the | ||
* specific language governing permissions and limitations | ||
* under the License. | ||
*/ | ||
|
||
package org.apache.comet | ||
|
||
import java.nio.charset.StandardCharsets | ||
import java.nio.file.{Files, Paths} | ||
|
||
import scala.collection.mutable | ||
|
||
import org.scalatest.Ignore | ||
import org.scalatest.exceptions.TestFailedException | ||
|
||
import org.apache.spark.sql.CometTestBase | ||
import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper | ||
|
||
/** | ||
* Manual test to calculate Spark builtin expressions coverage support by the Comet | ||
* | ||
* The test will update files doc/spark_builtin_expr_coverage.txt, | ||
* doc/spark_builtin_expr_coverage_agg.txt | ||
*/ | ||
|
||
viirya marked this conversation as resolved.
Show resolved
Hide resolved
|
||
@Ignore | ||
class CometExpressionCoverageSuite extends CometTestBase with AdaptiveSparkPlanHelper { | ||
|
||
import testImplicits._ | ||
|
||
private val rawCoverageFilePath = "doc/spark_builtin_expr_coverage.txt" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. After a second thought, I think maybe we should add spark version in this file name. More functions will be added in new spark version. It might be helpful to indicate how many functions are supported in each Spark version. This can be done in a follow up though |
||
private val aggCoverageFilePath = "doc/spark_builtin_expr_coverage_agg.txt" | ||
|
||
test("Test Spark builtin expressions coverage") { | ||
val queryPattern = """(?i)SELECT (.+?);""".r | ||
val valuesPattern = """(?i)FROM VALUES(.+?);""".r | ||
val selectPattern = """(i?)SELECT(.+?)FROM""".r | ||
val builtinExamplesMap = spark.sessionState.functionRegistry | ||
.listFunction() | ||
.map(spark.sessionState.catalog.lookupFunctionInfo(_)) | ||
.filter(_.getSource.toLowerCase == "built-in") | ||
// exclude spark streaming functions, Comet has no plans to support streaming in near future | ||
.filter(f => | ||
!List("window", "session_window", "window_time").contains(f.getName.toLowerCase)) | ||
.map(f => { | ||
val selectRows = queryPattern.findAllMatchIn(f.getExamples).map(_.group(0)).toList | ||
(f.getName, selectRows.filter(_.nonEmpty)) | ||
}) | ||
.toMap | ||
|
||
// key - function name | ||
// value - list of result shows if function supported by Comet | ||
val resultsMap = new mutable.HashMap[String, CoverageResult]() | ||
|
||
builtinExamplesMap.foreach { | ||
case (funcName, q :: _) => | ||
val queryResult = | ||
try { | ||
// Example with predefined values | ||
// e.g. SELECT bit_xor(col) FROM VALUES (3), (5) AS tab(col) | ||
// better option is probably to parse the query and iterate through expressions | ||
// but this is adhoc coverage test | ||
if (q.toLowerCase.contains(" from values")) { | ||
val select = selectPattern.findFirstMatchIn(q).map(_.group(0)) | ||
val values = valuesPattern.findFirstMatchIn(q).map(_.group(0)) | ||
(select, values) match { | ||
case (Some(s), Some(v)) => | ||
testSingleLineQuery(s"select * $v", s"$s tbl") | ||
|
||
case _ => | ||
resultsMap.put( | ||
funcName, | ||
CoverageResult("FAILED", Seq((q, "Cannot parse properly")))) | ||
} | ||
} else { | ||
// Plain example like SELECT cos(0); | ||
testSingleLineQuery( | ||
"select 'dummy' x", | ||
s"${q.dropRight(1)}, x from tbl", | ||
excludedOptimizerRules = | ||
Some("org.apache.spark.sql.catalyst.optimizer.ConstantFolding")) | ||
} | ||
CoverageResult(CoverageResultStatus.Passed.toString, Seq((q, "OK"))) | ||
} catch { | ||
case e: TestFailedException | ||
if e.message.getOrElse("").contains("Expected only Comet native operators") => | ||
CoverageResult(CoverageResultStatus.Failed.toString, Seq((q, "Unsupported"))) | ||
case e if e.getMessage.contains("CometNativeException") => | ||
CoverageResult( | ||
CoverageResultStatus.Failed.toString, | ||
Seq((q, "Failed on native side"))) | ||
case _ => | ||
CoverageResult( | ||
CoverageResultStatus.Failed.toString, | ||
Seq((q, "Failed on something else. Check query manually"))) | ||
} | ||
resultsMap.put(funcName, queryResult) | ||
|
||
case (funcName, List()) => | ||
resultsMap.put( | ||
funcName, | ||
CoverageResult( | ||
CoverageResultStatus.Skipped.toString, | ||
Seq(("", "No examples found in spark.sessionState.functionRegistry")))) | ||
} | ||
|
||
// TODO: convert results into HTML | ||
resultsMap.toSeq.toDF("name", "details").createOrReplaceTempView("t") | ||
val str_agg = showString( | ||
spark.sql( | ||
"select result, d._2 as details, count(1) cnt from (select name, t.details.result, explode_outer(t.details.details) as d from t) group by 1, 2 order by 1"), | ||
1000, | ||
0) | ||
Files.write(Paths.get(aggCoverageFilePath), str_agg.getBytes(StandardCharsets.UTF_8)) | ||
|
||
val str = showString(spark.sql("select * from t order by 1"), 1000, 0) | ||
Files.write(Paths.get(rawCoverageFilePath), str.getBytes(StandardCharsets.UTF_8)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I second @viirya's point, #263 (comment). It would be better to put the aggregated result in the same file. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Ah, I see you filed a new issue. It could be addressed in a followup PR then. |
||
} | ||
} | ||
|
||
case class CoverageResult(result: String, details: Seq[(String, String)]) | ||
|
||
object CoverageResultStatus extends Enumeration { | ||
type CoverageResultStatus = Value | ||
|
||
val Failed: Value = Value("FAILED") | ||
val Passed: Value = Value("PASSED") | ||
val Skipped: Value = Value("SKIPPED") | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -718,4 +718,47 @@ abstract class CometTestBase | |
Seq.empty | ||
} | ||
} | ||
|
||
// tests one liner query without necessity to create external table | ||
def testSingleLineQuery( | ||
prepareQuery: String, | ||
testQuery: String, | ||
testName: String = "test", | ||
tableName: String = "tbl", | ||
excludedOptimizerRules: Option[String] = None): Unit = { | ||
|
||
withTempDir { dir => | ||
val path = new Path(dir.toURI.toString, testName).toUri.toString | ||
var data: java.util.List[Row] = new java.util.ArrayList() | ||
var schema: StructType = null | ||
|
||
withSQLConf(CometConf.COMET_ENABLED.key -> "false") { | ||
val df = spark.sql(prepareQuery) | ||
data = df.collectAsList() | ||
schema = df.schema | ||
} | ||
|
||
spark.createDataFrame(data, schema).repartition(1).write.parquet(path) | ||
readParquetFile(path, Some(schema)) { df => df.createOrReplaceTempView(tableName) } | ||
|
||
// disable constant folding. This optimization rule precompute and select value as literal | ||
// which subsequently leads to false positives | ||
// | ||
// ConstantFolding is a operator optimization rule in Catalyst that replaces expressions | ||
// that can be statically evaluated with their equivalent literal values. | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This function doesn't always exclude |
||
withSQLConf( | ||
"spark.sql.optimizer.excludedRules" -> excludedOptimizerRules.getOrElse(""), | ||
"spark.sql.adaptive.optimizer.excludedRules" -> excludedOptimizerRules.getOrElse("")) { | ||
checkSparkAnswerAndOperator(sql(testQuery)) | ||
} | ||
} | ||
} | ||
|
||
def showString[T]( | ||
df: Dataset[T], | ||
_numRows: Int, | ||
truncate: Int = 20, | ||
vertical: Boolean = false): String = { | ||
df.showString(_numRows, truncate, vertical) | ||
} | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm thinking to have this aggregated summary in the expr coverage file too. It will be more readable. We can do it in making the coverage file as markdown format.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
yes, I combined all of that in #282