forked from snowflakedb/spark-snowflake
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build.sbt
123 lines (111 loc) · 5.07 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
/*
* Copyright 2015-2019 Snowflake Computing
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import scala.util.Properties
val sparkVersion = "3.2"
val testSparkVersion = sys.props.get("spark.testVersion").getOrElse("3.2.0")
/*
* Don't change the variable name "sparkConnectorVersion" because
* jenkins job "BumpUpSparkConnectorVersion" depends on it.
* If it has to be changed, please also change the script:
* Tests/jenkins/BumpUpSparkConnectorVersion/run.sh
* in snowflake repository.
*/
val sparkConnectorVersion = "2.10.0"
lazy val ItTest = config("it") extend Test
// Test to use self-download or self-build JDBC driver
// unmanagedJars in Compile += file(s"lib/snowflake-jdbc-3.12.12.jar")
lazy val root = project.withId("spark-snowflake").in(file("."))
.configs(ItTest)
.settings(inConfig(ItTest)(Defaults.testSettings))
.settings(Defaults.coreDefaultSettings)
.settings(Defaults.itSettings)
.settings(
name := "spark-snowflake",
organization := "net.snowflake",
version := s"${sparkConnectorVersion}-spark_3.2",
scalaVersion := sys.props.getOrElse("SPARK_SCALA_VERSION", default = "2.12.11"),
// Spark 3.2 supports scala 2.12 and 2.13
crossScalaVersions := Seq("2.12.11", "2.13.7"),
javacOptions ++= Seq("-source", "1.8", "-target", "1.8"),
licenses += "Apache-2.0" -> url("http://opensource.org/licenses/Apache-2.0"),
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"),
resolvers +=
"Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
libraryDependencies ++= Seq(
"net.snowflake" % "snowflake-ingest-sdk" % "0.10.3",
"net.snowflake" % "snowflake-jdbc" % "3.13.14",
"com.google.guava" % "guava" % "14.0.1" % Test,
"org.scalatest" %% "scalatest" % "3.1.1" % Test,
"org.mockito" % "mockito-core" % "1.10.19" % Test,
"org.apache.commons" % "commons-lang3" % "3.5" % "provided",
// Below is for Spark Streaming from Kafka test only
// "org.apache.spark" %% "spark-sql-kafka-0-10" % "2.4.0",
"org.apache.spark" %% "spark-core" % testSparkVersion % "provided, test",
"org.apache.spark" %% "spark-sql" % testSparkVersion % "provided, test",
"org.apache.spark" %% "spark-catalyst" % testSparkVersion % "provided, test",
"org.apache.spark" %% "spark-core" % testSparkVersion % "provided, test" classifier "tests",
"org.apache.spark" %% "spark-sql" % testSparkVersion % "provided, test" classifier "tests",
"org.apache.spark" %% "spark-catalyst" % testSparkVersion % "provided, test" classifier "tests",
"org.apache.spark" %% "spark-core" % testSparkVersion % "provided, test" classifier "test-sources",
"org.apache.spark" %% "spark-sql" % testSparkVersion % "provided, test" classifier "test-sources",
"org.apache.spark" %% "spark-catalyst" % testSparkVersion % "provided, test" classifier "test-sources"
// "org.apache.spark" %% "spark-hive" % testSparkVersion % "provided, test"
),
Test / testOptions += Tests.Argument("-oF"),
Test / fork := true,
Test / javaOptions ++= Seq("-Xms1024M", "-Xmx4096M"),
// Release settings
usePgpKeyHex(Properties.envOrElse("GPG_SIGNATURE", "12345")),
Global / pgpPassphrase := Properties.envOrNone("GPG_KEY_PASSPHRASE").map(_.toCharArray),
publishMavenStyle := true,
releaseCrossBuild := true,
licenses += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0")),
releasePublishArtifactsAction := PgpKeys.publishSigned.value,
pomExtra :=
<url>https://github.com/snowflakedb/spark-snowflake</url>
<scm>
<url>git@github.com:snowflakedb/spark-snowflake.git</url>
<connection>scm:git:git@github.com:snowflakedb/spark-snowflake.git</connection>
</scm>
<developers>
<developer>
<id>MarcinZukowski</id>
<name>Marcin Zukowski</name>
<url>https://github.com/MarcinZukowski</url>
</developer>
<developer>
<id>etduwx</id>
<name>Edward Ma</name>
<url>https://github.com/etduwx</url>
</developer>
<developer>
<id>binglihub</id>
<name>Bing Li</name>
<url>https://github.com/binglihub</url>
</developer>
<developer>
<id>Mingli-Rui</id>
<name>Mingli Rui</name>
<url>https://github.com/Mingli-Rui</url>
</developer>
</developers>,
publishTo := Some(
if (isSnapshot.value)
Opts.resolver.sonatypeSnapshots
else
Opts.resolver.sonatypeStaging
)
)