diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala index 48beffa18a551..26eccc1f24927 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala @@ -41,6 +41,8 @@ import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, ScalaReflection} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.util.{ArrayData, DateTimeUtils, MapData} +import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getTimeZone, stringToDate, stringToTimestamp} +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types._ import org.apache.spark.util.Utils @@ -139,6 +141,11 @@ object Literal { * Constructs a Literal from a String */ def fromString(str: String, dataType: DataType): Literal = { + def parse[T](f: UTF8String => Option[T]): T = { + f(UTF8String.fromString(str)).getOrElse { + throw new AnalysisException(s"Cannot parse the ${dataType.catalogString} value: $str") + } + } val value = dataType match { case BooleanType => str.toBoolean case ByteType => str.toByte @@ -148,8 +155,10 @@ object Literal { case FloatType => str.toFloat case DoubleType => str.toDouble case StringType => UTF8String.fromString(str) - case DateType => java.sql.Date.valueOf(str) - case TimestampType => java.sql.Timestamp.valueOf(str) + case DateType => parse(stringToDate) + case TimestampType => + val timeZone = getTimeZone(SQLConf.get.sessionLocalTimeZone) + parse(stringToTimestamp(_, timeZone)) case CalendarIntervalType => CalendarInterval.fromString(str) case t: DecimalType => val d = Decimal(str) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 24bbe116ad897..a8eb19fb3a0b1 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -22,6 +22,7 @@ import javax.xml.bind.DatatypeConverter import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer +import scala.util.Try import org.antlr.v4.runtime.{ParserRuleContext, Token} import org.antlr.v4.runtime.tree.{ParseTree, RuleNode, TerminalNode} @@ -36,10 +37,9 @@ import org.apache.spark.sql.catalyst.expressions.aggregate.{First, Last} import org.apache.spark.sql.catalyst.parser.SqlBaseParser._ import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.plans.logical._ -import org.apache.spark.sql.catalyst.util.DateTimeUtils.{getTimeZone, stringToDate, stringToTimestamp} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ -import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} +import org.apache.spark.unsafe.types.CalendarInterval import org.apache.spark.util.random.RandomSampler /** @@ -1552,17 +1552,15 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging override def visitTypeConstructor(ctx: TypeConstructorContext): Literal = withOrigin(ctx) { val value = string(ctx.STRING) val valueType = ctx.identifier.getText.toUpperCase(Locale.ROOT) - def toLiteral[T](f: UTF8String => Option[T], t: DataType): Literal = { - f(UTF8String.fromString(value)).map(Literal(_, t)).getOrElse { + def toLiteral(t: DataType): Literal = { + Try {Literal.fromString(value, t)}.getOrElse { throw new ParseException(s"Cannot parse the $valueType value: $value", ctx) } } try { valueType match { - case "DATE" => toLiteral(stringToDate, DateType) - case "TIMESTAMP" => - val timeZone = getTimeZone(SQLConf.get.sessionLocalTimeZone) - toLiteral(stringToTimestamp(_, timeZone), TimestampType) + case "DATE" => toLiteral(DateType) + case "TIMESTAMP" => toLiteral(TimestampType) case "X" => val padding = if (value.length % 2 != 0) "0" else "" Literal(DatatypeConverter.parseHexBinary(padding + value)) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala index 133aaa449ea44..636b747923bc3 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala @@ -243,7 +243,7 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper { checkEvaluation(Literal.fromString("Databricks", StringType), "Databricks") val dateString = "1970-01-01" checkEvaluation(Literal.fromString(dateString, DateType), java.sql.Date.valueOf(dateString)) - val timestampString = "0000-01-01 00:00:00" + val timestampString = "2000-01-01 00:00:00.123" checkEvaluation(Literal.fromString(timestampString, TimestampType), java.sql.Timestamp.valueOf(timestampString)) val calInterval = new CalendarInterval(1, 1)