Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: update scalatest and scalactic #1706

Merged
merged 27 commits into from
Nov 7, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
9b616dd
remove unused imports
niehaus59 May 12, 2022
e52919b
Merge remote-tracking branch 'upstream/master'
niehaus59 Jun 7, 2022
ad733ec
batch prompts
niehaus59 Jun 21, 2022
ac81dec
fix merge conflict
niehaus59 Jun 21, 2022
8c7854e
fix merge conflicts
niehaus59 Jun 22, 2022
d9e1863
Merge branch 'manieh/batch-prompts-2'
niehaus59 Jun 22, 2022
29133a5
Merge remote-tracking branch 'upstream/master'
niehaus59 Jun 24, 2022
cce533c
Merge remote-tracking branch 'upstream/master'
niehaus59 Jun 24, 2022
d791955
Merge remote-tracking branch 'upstream/master'
niehaus59 Jun 27, 2022
c394754
Merge remote-tracking branch 'upstream/master'
niehaus59 Jul 13, 2022
18ea6de
Merge remote-tracking branch 'upstream/master'
niehaus59 Jul 25, 2022
e78062d
Merge remote-tracking branch 'upstream/master'
niehaus59 Aug 1, 2022
af6af01
Merge remote-tracking branch 'upstream/master'
niehaus59 Aug 2, 2022
1f126b0
Merge remote-tracking branch 'upstream/master'
niehaus59 Aug 4, 2022
d51e734
Merge remote-tracking branch 'upstream/master'
niehaus59 Aug 10, 2022
5a0b067
Merge branch 'master' of https://github.com/niehaus59/SynapseML
niehaus59 Aug 12, 2022
841cf0a
Merge remote-tracking branch 'upstream/master'
niehaus59 Aug 25, 2022
e67bbce
Merge branch 'master' of https://github.com/niehaus59/SynapseML
niehaus59 Aug 25, 2022
ac40e41
Merge remote-tracking branch 'upstream/master'
niehaus59 Aug 25, 2022
493c8a3
Merge remote-tracking branch 'upstream/master'
niehaus59 Oct 25, 2022
72fed31
don't throw for invalid columns in DropColumn
niehaus59 Oct 25, 2022
305b5cc
remove now unused verify method from DropColumns
niehaus59 Oct 25, 2022
94b1acc
Merge remote-tracking branch 'upstream/master'
niehaus59 Nov 3, 2022
c02fdee
update scalatest and scalactic
niehaus59 Nov 3, 2022
b557b4a
strip unicode out of transliteration result
niehaus59 Nov 4, 2022
bdd3c1d
clean transliterate test
niehaus59 Nov 4, 2022
bb64b5e
override assertDFEq in TransliterateSuite to strip out zero-width chars
niehaus59 Nov 5, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,9 @@ val coreDependencies = Seq(
"org.apache.spark" %% "spark-mllib" % sparkVersion % "compile",
"org.apache.spark" %% "spark-avro" % sparkVersion % "provided",
"org.apache.spark" %% "spark-tags" % sparkVersion % "test",
"org.scalatest" %% "scalatest" % "3.0.5" % "test")
"org.scalatest" %% "scalatest" % "3.2.14" % "test")
val extraDependencies = Seq(
"org.scalactic" %% "scalactic" % "3.0.5",
"org.scalactic" %% "scalactic" % "3.2.14",
"io.spray" %% "spray-json" % "1.3.5",
"com.jcraft" % "jsch" % "0.1.54",
"org.apache.httpcomponents.client5" % "httpclient5" % "5.1.3",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,12 @@ import com.microsoft.azure.synapse.ml.core.test.base.{Flaky, TestBase}
import com.microsoft.azure.synapse.ml.core.test.fuzzing.{TestObject, TransformerFuzzing}
import org.apache.spark.ml.util.MLReadable
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions.{col, flatten}
import org.apache.spark.sql.functions.{col, flatten, udf}
import org.scalactic.Equality
import scala.collection.Traversable

import java.sql.Struct
import scala.collection.mutable

trait TranslatorKey {
lazy val translatorKey: String = sys.env.getOrElse("TRANSLATOR_KEY", Secrets.TranslatorKey)
Expand Down Expand Up @@ -188,8 +193,9 @@ class TransliterateSuite extends TransformerFuzzing[Transliterate]
.withColumn("text", col("result.text"))
.withColumn("script", col("result.script"))
.select("text", "script").collect()
assert(results.head.getSeq(0).mkString("\n") === "Kon'nichiwa\nsayonara")
assert(results.head.getSeq(1).mkString("\n") === "Latn\nLatn")

assert(TransliterateSuite.stripInvalid(results.head.getSeq(0).mkString("\n")) === "Kon'nichiwa\nsayonara")
assert(TransliterateSuite.stripInvalid(results.head.getSeq(1).mkString("\n")) === "Latn\nLatn")
}

test("Throw errors if required fields not set") {
Expand All @@ -206,12 +212,30 @@ class TransliterateSuite extends TransformerFuzzing[Transliterate]
assert(caught.getMessage.contains("toScript"))
}

val stripUdf = udf {
(o: Seq[(String, String)]) => {
o.map(t => (TransliterateSuite.stripInvalid(t._1), t._2))
}
}
override def assertDFEq(df1: DataFrame, df2: DataFrame)(implicit eq: Equality[DataFrame]): Unit = {
val column = "result"
super.assertDFEq(
df1.withColumn(column, stripUdf(col(column))),
df2.withColumn(column, stripUdf(col(column))))(eq)
}

override def testObjects(): Seq[TestObject[Transliterate]] =
Seq(new TestObject(transliterate, transDf))

override def reader: MLReadable[_] = Transliterate
}

object TransliterateSuite {
private def stripInvalid(str: String): String = {
"[^\n'A-Za-z]".r.replaceAllIn(str, "")
}
}

class DetectSuite extends TransformerFuzzing[Detect]
with TranslatorKey with Flaky with TranslatorUtils {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import org.apache.spark.SparkException
import org.apache.spark.ml.linalg.{Vector, Vectors}
import org.apache.spark.sql.functions.{col, lit}
import org.apache.spark.sql.types.{FloatType, IntegerType}
import org.scalatest.Matchers.{a, thrownBy}
import org.scalatest.matchers.should.Matchers.{a, thrownBy}

class SlicerFunctionsSuite extends TestBase {
test("SlicerFunctions UDFs can handle different types of inputs") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import org.apache.spark.streaming.{StreamingContext, Seconds => SparkSeconds}
import org.scalactic.Equality
import org.scalactic.source.Position
import org.scalatest._
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.concurrent.TimeLimits
import org.scalatest.time.{Seconds, Span}

Expand Down Expand Up @@ -142,7 +143,7 @@ object TestBase extends SparkSessionManagement {

}

abstract class TestBase extends FunSuite with BeforeAndAfterEachTestData with BeforeAndAfterAll {
abstract class TestBase extends AnyFunSuite with BeforeAndAfterEachTestData with BeforeAndAfterAll {

lazy val sparkProvider: SparkSessionManagement = TestBase

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ package com.microsoft.azure.synapse.ml.explainers.split1
import breeze.linalg.sum
import com.microsoft.azure.synapse.ml.core.test.base.TestBase
import com.microsoft.azure.synapse.ml.explainers.KernelSHAPSamplerSupport
import org.scalatest.Matchers._
import org.scalatest.matchers.should.Matchers._

class KernelSHAPSamplerSupportSuite extends TestBase {

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types._
import org.scalactic.{Equality, TolerantNumerics}
import org.scalatest.Matchers._
import org.scalatest.matchers.should.Matchers._

import java.nio.file.{Files, Paths}
import javax.imageio.ImageIO
Expand Down