Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions LICENSE
Original file line number Diff line number Diff line change
Expand Up @@ -242,18 +242,18 @@ The text of each license is also included at licenses/LICENSE-[project].txt.
(BSD licence) ANTLR ST4 4.0.4 (org.antlr:ST4:4.0.4 - http://www.stringtemplate.org)
(BSD licence) ANTLR StringTemplate (org.antlr:stringtemplate:3.2.1 - http://www.stringtemplate.org)
(BSD License) Javolution (javolution:javolution:5.5.1 - http://javolution.org)
(BSD) JLine (jline:jline:0.9.94 - http://jline.sourceforge.net)
(BSD) JLine (jline:jline:2.14.3 - https://github.com/jline/jline2)
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.3 - http://paranamer.codehaus.org/paranamer)
(BSD) ParaNamer Core (com.thoughtworks.paranamer:paranamer:2.6 - http://paranamer.codehaus.org/paranamer)
(BSD 3 Clause) Scala (http://www.scala-lang.org/download/#License)
(Interpreter classes (all .scala files in repl/src/main/scala
except for Main.Scala, SparkHelper.scala and ExecutorClassLoader.scala),
and for SerializableMapWrapper in JavaUtils.scala)
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scalap (org.scala-lang:scalap:2.11.8 - http://www.scala-lang.org/)
(BSD-like) Scala Actors library (org.scala-lang:scala-actors:2.11.12 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-compiler:2.11.12 - http://www.scala-lang.org/)
(BSD-like) Scala Compiler (org.scala-lang:scala-reflect:2.11.12 - http://www.scala-lang.org/)
(BSD-like) Scala Library (org.scala-lang:scala-library:2.11.12 - http://www.scala-lang.org/)
(BSD-like) Scalap (org.scala-lang:scalap:2.11.12 - http://www.scala-lang.org/)
(BSD-style) scalacheck (org.scalacheck:scalacheck_2.11:1.10.0 - http://www.scalacheck.org)
(BSD-style) spire (org.spire-math:spire_2.11:0.7.1 - http://spire-math.org)
(BSD-style) spire-macros (org.spire-math:spire-macros_2.11:0.7.1 - http://spire-math.org)
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
private val MESOS = 4
private val LOCAL = 8
private val KUBERNETES = 16
private val COOK = 32
private val COOK = 32
private val ALL_CLUSTER_MGRS = YARN | STANDALONE | MESOS | COOK | LOCAL | KUBERNETES

// Deploy modes
Expand Down Expand Up @@ -582,7 +582,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
// Other options
OptionAssigner(args.executorCores, STANDALONE | YARN | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.executor.cores"),
OptionAssigner(args.executorMemory, STANDALONE | MESOS | COOK | YARN | KUBERNETES,
OptionAssigner(args.executorMemory, STANDALONE | MESOS | COOK | YARN | KUBERNETES,
ALL_DEPLOY_MODES, confKey = "spark.executor.memory"),
OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.cores.max"),
Expand All @@ -591,7 +591,7 @@ object SparkSubmit extends CommandLineUtils with Logging {
OptionAssigner(args.jars, LOCAL, CLIENT, confKey = "spark.jars"),
OptionAssigner(args.jars, STANDALONE | MESOS | KUBERNETES, ALL_DEPLOY_MODES,
confKey = "spark.jars"),
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | COOK | KUBERNETES,
OptionAssigner(args.driverMemory, STANDALONE | MESOS | YARN | COOK | KUBERNETES,
CLUSTER, confKey = "spark.driver.memory"),
OptionAssigner(args.driverCores, STANDALONE | MESOS | YARN | KUBERNETES, CLUSTER,
confKey = "spark.driver.cores"),
Expand Down
16 changes: 8 additions & 8 deletions dev/deps/spark-deps-hadoop-2.6
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ activation-1.1.1.jar
aircompressor-0.8.jar
antlr-2.7.7.jar
antlr-runtime-3.4.jar
antlr4-runtime-4.7.jar
antlr4-runtime-4.5.3.jar
aopalliance-1.0.jar
aopalliance-repackaged-2.4.0-b34.jar
apache-log4j-extras-1.2.17.jar
Expand Down Expand Up @@ -122,7 +122,7 @@ jersey-server-2.22.2.jar
jets3t-0.9.4.jar
jetty-6.1.26.jar
jetty-util-6.1.26.jar
jline-2.12.1.jar
jline-2.14.3.jar
joda-time-2.9.3.jar
jodd-core-3.5.2.jar
jpam-1.1.jar
Expand Down Expand Up @@ -171,12 +171,12 @@ parquet-jackson-1.8.2.jar
protobuf-java-2.5.0.jar
py4j-0.10.6.jar
pyrolite-4.13.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
scala-compiler-2.11.12.jar
scala-library-2.11.12.jar
scala-parser-combinators_2.11-1.1.0.jar
scala-reflect-2.11.12.jar
scala-xml_2.11-1.1.0.jar
scalap-2.11.12.jar
shapeless_2.11-2.3.2.jar
slf4j-api-1.7.16.jar
slf4j-log4j12-1.7.16.jar
Expand Down
16 changes: 8 additions & 8 deletions dev/deps/spark-deps-hadoop-2.7
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ activation-1.1.1.jar
aircompressor-0.8.jar
antlr-2.7.7.jar
antlr-runtime-3.4.jar
antlr4-runtime-4.7.jar
antlr4-runtime-4.5.3.jar
aopalliance-1.0.jar
aopalliance-repackaged-2.4.0-b34.jar
apache-log4j-extras-1.2.17.jar
Expand Down Expand Up @@ -122,7 +122,7 @@ jersey-server-2.22.2.jar
jets3t-0.9.4.jar
jetty-6.1.26.jar
jetty-util-6.1.26.jar
jline-2.12.1.jar
jline-2.14.3.jar
joda-time-2.9.3.jar
jodd-core-3.5.2.jar
jpam-1.1.jar
Expand Down Expand Up @@ -172,12 +172,12 @@ parquet-jackson-1.8.2.jar
protobuf-java-2.5.0.jar
py4j-0.10.6.jar
pyrolite-4.13.jar
scala-compiler-2.11.8.jar
scala-library-2.11.8.jar
scala-parser-combinators_2.11-1.0.4.jar
scala-reflect-2.11.8.jar
scala-xml_2.11-1.0.5.jar
scalap-2.11.8.jar
scala-compiler-2.11.12.jar
scala-library-2.11.12.jar
scala-parser-combinators_2.11-1.1.0.jar
scala-reflect-2.11.12.jar
scala-xml_2.11-1.1.0.jar
scalap-2.11.12.jar
shapeless_2.11-2.3.2.jar
slf4j-api-1.7.16.jar
slf4j-log4j12-1.7.16.jar
Expand Down
8 changes: 4 additions & 4 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@
<commons.math3.version>3.4.1</commons.math3.version>
<!-- managed up from 3.2.1 for SPARK-11652 -->
<commons.collections.version>3.2.2</commons.collections.version>
<scala.version>2.11.8</scala.version>
<scala.version>2.11.12</scala.version>
<scala.binary.version>2.11</scala.binary.version>
<codehaus.jackson.version>1.9.13</codehaus.jackson.version>
<fasterxml.jackson.version>2.6.7</fasterxml.jackson.version>
Expand All @@ -180,7 +180,7 @@
<jodd.version>3.5.2</jodd.version>
<jsr305.version>1.3.9</jsr305.version>
<libthrift.version>0.9.3</libthrift.version>
<antlr4.version>4.7</antlr4.version>
<antlr4.version>4.5.3</antlr4.version>
<jpam.version>1.1</jpam.version>
<selenium.version>2.52.0</selenium.version>
<!--
Expand Down Expand Up @@ -850,7 +850,7 @@
<dependency>
<groupId>org.scala-lang.modules</groupId>
<artifactId>scala-parser-combinators_${scala.binary.version}</artifactId>
<version>1.0.4</version>
<version>1.1.0</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
Expand All @@ -861,7 +861,7 @@
<dependency>
<groupId>jline</groupId>
<artifactId>jline</artifactId>
<version>2.12.1</version>
<version>2.14.3</version>
</dependency>
<dependency>
<groupId>org.scalatest</groupId>
Expand Down
1 change: 0 additions & 1 deletion project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,6 @@ object OldDeps {

object Catalyst {
lazy val settings = antlr4Settings ++ Seq(
antlr4Version in Antlr4 := "4.7",
antlr4PackageName in Antlr4 := Some("org.apache.spark.sql.catalyst.parser"),
antlr4GenListener in Antlr4 := true,
antlr4GenVisitor in Antlr4 := true
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,6 @@ package org.apache.spark.repl

import java.io.BufferedReader

// scalastyle:off println
import scala.Predef.{println => _, _}
// scalastyle:on println
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.{ILoop, JPrintWriter}
import scala.tools.nsc.util.stringFromStream
Expand All @@ -35,10 +32,6 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out)
def this() = this(None, new JPrintWriter(Console.out, true))

override def createInterpreter(): Unit = {
intp = new SparkILoopInterpreter(settings, out)
}

val initializationCommands: Seq[String] = Seq(
"""
@transient val spark = if (org.apache.spark.repl.Main.sparkSession != null) {
Expand Down Expand Up @@ -76,7 +69,7 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
def initializeSpark() {
intp.beQuietDuring {
savingReplayStack { // remove the commands from session history.
initializationCommands.foreach(processLine)
initializationCommands.foreach(command)
}
}
}
Expand All @@ -102,13 +95,13 @@ class SparkILoop(in0: Option[BufferedReader], out: JPrintWriter)
override def commands: List[LoopCommand] = standardCommands

/**
* We override `loadFiles` because we need to initialize Spark *before* the REPL
* We override `createInterpreter` because we need to initialize Spark *before* the REPL
* sees any files, so that the Spark context is visible in those files. This is a bit of a
* hack, but there isn't another hook available to us at this point.
*/
override def loadFiles(settings: Settings): Unit = {
override def createInterpreter(): Unit = {
super.createInterpreter()
initializeSpark()
super.loadFiles(settings)
}

override def resetCommand(line: String): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,6 @@ singleDataType
: dataType EOF
;

singleTableSchema
: colTypeList EOF
;

statement
: query #statementDefault
| USE db=identifier #use
Expand Down Expand Up @@ -989,7 +985,7 @@ INPATH: 'INPATH';

STRING
: '\'' ( ~('\''|'\\') | ('\\' .) )* '\''
| '"' ( ~('"'|'\\') | ('\\' .) )* '"'
| '\"' ( ~('\"'|'\\') | ('\\' .) )* '\"'
;

BIGINT_LITERAL
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,6 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging
visitSparkDataType(ctx.dataType)
}

override def visitSingleTableSchema(ctx: SingleTableSchemaContext): StructType = {
withOrigin(ctx)(StructType(visitColTypeList(ctx.colTypeList)))
}

/* ********************************************************************************************
* Plan parsing
* ******************************************************************************************** */
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@ package org.apache.spark.sql.catalyst.parser

import org.antlr.v4.runtime._
import org.antlr.v4.runtime.atn.PredictionMode
import org.antlr.v4.runtime.misc.{Interval, ParseCancellationException}
import org.antlr.v4.runtime.tree.TerminalNodeImpl
import org.antlr.v4.runtime.misc.ParseCancellationException

import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
Expand Down Expand Up @@ -62,7 +61,7 @@ abstract class AbstractSqlParser extends ParserInterface with Logging {
* definitions which will preserve the correct Hive metadata.
*/
override def parseTableSchema(sqlText: String): StructType = parse(sqlText) { parser =>
astBuilder.visitSingleTableSchema(parser.singleTableSchema())
StructType(astBuilder.visitColTypeList(parser.colTypeList()))
}

/** Creates LogicalPlan for a given SQL string. */
Expand All @@ -81,7 +80,7 @@ abstract class AbstractSqlParser extends ParserInterface with Logging {
protected def parse[T](command: String)(toResult: SqlBaseParser => T): T = {
logDebug(s"Parsing command: $command")

val lexer = new SqlBaseLexer(new UpperCaseCharStream(CharStreams.fromString(command)))
val lexer = new SqlBaseLexer(new ANTLRNoCaseStringStream(command))
lexer.removeErrorListeners()
lexer.addErrorListener(ParseErrorListener)

Expand All @@ -100,7 +99,7 @@ abstract class AbstractSqlParser extends ParserInterface with Logging {
catch {
case e: ParseCancellationException =>
// if we fail, parse with LL mode
tokenStream.seek(0) // rewind input stream
tokenStream.reset() // rewind input stream
parser.reset()

// Try Again.
Expand Down Expand Up @@ -149,33 +148,12 @@ object CatalystSqlParser extends AbstractSqlParser {
* the consume() function of the super class ANTLRStringStream. The LA() function is the lookahead
* function and is purely used for matching lexical rules. This also means that the grammar will
* only accept capitalized tokens in case it is run from other tools like antlrworks which do not
* have the UpperCaseCharStream implementation.
* have the ANTLRNoCaseStringStream implementation.
*/

private[parser] class UpperCaseCharStream(wrapped: CodePointCharStream) extends CharStream {
override def consume(): Unit = wrapped.consume
override def getSourceName(): String = wrapped.getSourceName
override def index(): Int = wrapped.index
override def mark(): Int = wrapped.mark
override def release(marker: Int): Unit = wrapped.release(marker)
override def seek(where: Int): Unit = wrapped.seek(where)
override def size(): Int = wrapped.size

override def getText(interval: Interval): String = {
// ANTLR 4.7's CodePointCharStream implementations have bugs when
// getText() is called with an empty stream, or intervals where
// the start > end. See
// https://github.com/antlr/antlr4/commit/ac9f7530 for one fix
// that is not yet in a released ANTLR artifact.
if (size() > 0 && (interval.b - interval.a >= 0)) {
wrapped.getText(interval)
} else {
""
}
}

private[parser] class ANTLRNoCaseStringStream(input: String) extends ANTLRInputStream(input) {
override def LA(i: Int): Int = {
val la = wrapped.LA(i)
val la = super.LA(i)
if (la == 0 || la == IntStream.EOF) la
else Character.toUpperCase(la)
}
Expand Down Expand Up @@ -266,12 +244,11 @@ case object PostProcessor extends SqlBaseBaseListener {
val parent = ctx.getParent
parent.removeLastChild()
val token = ctx.getChild(0).getPayload.asInstanceOf[Token]
val newToken = new CommonToken(
parent.addChild(f(new CommonToken(
new org.antlr.v4.runtime.misc.Pair(token.getTokenSource, token.getInputStream),
SqlBaseParser.IDENTIFIER,
token.getChannel,
token.getStartIndex + stripMargins,
token.getStopIndex - stripMargins)
parent.addChild(new TerminalNodeImpl(f(newToken)))
token.getStopIndex - stripMargins)))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ object ParserUtils {
/** Get the command which created the token. */
def command(ctx: ParserRuleContext): String = {
val stream = ctx.getStart.getInputStream
stream.getText(Interval.of(0, stream.size() - 1))
stream.getText(Interval.of(0, stream.size()))
}

def operationNotAllowed(message: String, ctx: ParserRuleContext): Nothing = {
Expand Down Expand Up @@ -67,7 +67,7 @@ object ParserUtils {
/** Get all the text which comes after the given token. */
def remainder(token: Token): String = {
val stream = token.getInputStream
val interval = Interval.of(token.getStopIndex + 1, stream.size() - 1)
val interval = Interval.of(token.getStopIndex + 1, stream.size())
stream.getText(interval)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*/
package org.apache.spark.sql.catalyst.parser

import org.antlr.v4.runtime.{CharStreams, CommonTokenStream, ParserRuleContext}
import org.antlr.v4.runtime.{CommonTokenStream, ParserRuleContext}

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
Expand Down Expand Up @@ -57,7 +57,7 @@ class ParserUtilsSuite extends SparkFunSuite {
}

private def buildContext[T](command: String)(toResult: SqlBaseParser => T): T = {
val lexer = new SqlBaseLexer(new UpperCaseCharStream(CharStreams.fromString(command)))
val lexer = new SqlBaseLexer(new ANTLRNoCaseStringStream(command))
val tokenStream = new CommonTokenStream(lexer)
val parser = new SqlBaseParser(tokenStream)
toResult(parser)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,12 +79,10 @@ class TableSchemaParserSuite extends SparkFunSuite {
}

// Negative cases
test("Negative cases") {
assertError("")
assertError("a")
assertError("a INT b long")
assertError("a INT,, b long")
assertError("a INT, b long,,")
assertError("a INT, b long, c int,")
}
assertError("")
assertError("a")
assertError("a INT b long")
assertError("a INT,, b long")
assertError("a INT, b long,,")
assertError("a INT, b long, c int,")
}
Loading