Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -1951,6 +1951,11 @@
</compilerArgs>
</configuration>
</plugin>
<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr3-maven-plugin</artifactId>
<version>3.5.2</version>
</plugin>
<!-- Surefire runs all Java tests -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
Expand Down
46 changes: 44 additions & 2 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -414,9 +414,51 @@ object Hive {
// Some of our log4j jars make it impossible to submit jobs from this JVM to Hive Map/Reduce
// in order to generate golden files. This is only required for developers who are adding new
// new query tests.
fullClasspath in Test := (fullClasspath in Test).value.filterNot { f => f.toString.contains("jcl-over") }
)
fullClasspath in Test := (fullClasspath in Test).value.filterNot { f => f.toString.contains("jcl-over") },
// ANTLR code-generation step.
//
// This has been heavily inspired by com.github.stefri.sbt-antlr (0.5.3). It fixes a number of
// build errors in the current plugin.
// Create Parser from ANTLR grammar files.
sourceGenerators in Compile += Def.task {
val log = streams.value.log

val grammarFileNames = Seq(
"SparkSqlLexer.g",
"SparkSqlParser.g")
val sourceDir = (sourceDirectory in Compile).value / "antlr3"
val targetDir = (sourceManaged in Compile).value

// Create default ANTLR Tool.
val antlr = new org.antlr.Tool

// Setup input and output directories.
antlr.setInputDirectory(sourceDir.getPath)
antlr.setOutputDirectory(targetDir.getPath)
antlr.setForceRelativeOutput(true)
antlr.setMake(true)

// Add grammar files.
grammarFileNames.flatMap(gFileName => (sourceDir ** gFileName).get).foreach { gFilePath =>
val relGFilePath = (gFilePath relativeTo sourceDir).get.getPath
log.info("ANTLR: Grammar file '%s' detected.".format(relGFilePath))
antlr.addGrammarFile(relGFilePath)
}

// Generate the parser.
antlr.process
if (antlr.getNumErrors > 0) {
log.error("ANTLR: Caught %d build errors.".format(antlr.getNumErrors))
}

// Return all generated java files.
(targetDir ** "*.java").get.toSeq
}.taskValue,
// Include ANTLR tokens files.
resourceGenerators in Compile += Def.task {
((sourceManaged in Compile).value ** "*.tokens").get.toSeq
}.taskValue
)
}

object Assembly {
Expand Down
2 changes: 2 additions & 0 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -27,3 +27,5 @@ addSbtPlugin("io.spray" % "sbt-revolver" % "0.7.2")
libraryDependencies += "org.ow2.asm" % "asm" % "5.0.3"

libraryDependencies += "org.ow2.asm" % "asm-commons" % "5.0.3"

libraryDependencies += "org.antlr" % "antlr" % "3.5.2"
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,12 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {

// The difference between the double numbers generated by Hive and Spark
// can be ignored (e.g., 0.6633880657639323 and 0.6633880657639322)
"udaf_corr"
"udaf_corr",

// Feature removed in HIVE-11145
"alter_partition_protect_mode",
"drop_partitions_ignore_protection",
"protectmode"
)

/**
Expand All @@ -328,7 +333,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"alter_index",
"alter_merge_2",
"alter_partition_format_loc",
"alter_partition_protect_mode",
"alter_partition_with_whitelist",
"alter_rename_partition",
"alter_table_serde",
Expand Down Expand Up @@ -460,7 +464,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"drop_partitions_filter",
"drop_partitions_filter2",
"drop_partitions_filter3",
"drop_partitions_ignore_protection",
"drop_table",
"drop_table2",
"drop_table_removes_partition_dirs",
Expand Down Expand Up @@ -778,7 +781,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"ppr_pushdown2",
"ppr_pushdown3",
"progress_1",
"protectmode",
"push_or",
"query_with_semi",
"quote1",
Expand Down
22 changes: 22 additions & 0 deletions sql/hive/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,7 @@
<configuration>
<sources>
<source>v${hive.version.short}/src/main/scala</source>
<source>${project.build.directory/generated-sources/antlr</source>
</sources>
</configuration>
</execution>
Expand Down Expand Up @@ -260,6 +261,27 @@
</execution>
</executions>
</plugin>


<plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr3-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>antlr</goal>
</goals>
</execution>
</executions>
<configuration>
<sourceDirectory>${basedir}/src/main/antlr3</sourceDirectory>
<includes>
<include>**/SparkSqlLexer.g</include>
<include>**/SparkSqlParser.g</include>
</includes>
</configuration>
</plugin>

</plugins>
</build>
</project>
Loading