Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
/**
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0
Expand Down Expand Up @@ -580,7 +580,7 @@ import java.util.HashMap;

return header;
}

@Override
public String getErrorMessage(RecognitionException e, String[] tokenNames) {
String msg = null;
Expand Down Expand Up @@ -617,7 +617,7 @@ import java.util.HashMap;
}
return msg;
}

public void pushMsg(String msg, RecognizerSharedState state) {
// ANTLR generated code does not wrap the @init code wit this backtracking check,
// even if the matching @after has it. If we have parser rules with that are doing
Expand All @@ -637,7 +637,7 @@ import java.util.HashMap;
// counter to generate unique union aliases
private int aliasCounter;
private String generateUnionAlias() {
return "_u" + (++aliasCounter);
return "u_" + (++aliasCounter);
}
private char [] excludedCharForColumnName = {'.', ':'};
private boolean containExcludedCharForCreateTableColumnName(String input) {
Expand Down Expand Up @@ -1233,7 +1233,7 @@ alterTblPartitionStatementSuffixSkewedLocation
: KW_SET KW_SKEWED KW_LOCATION skewedLocations
-> ^(TOK_ALTERTABLE_SKEWED_LOCATION skewedLocations)
;

skewedLocations
@init { pushMsg("skewed locations", state); }
@after { popMsg(state); }
Expand Down Expand Up @@ -1262,7 +1262,7 @@ alterStatementSuffixLocation
-> ^(TOK_ALTERTABLE_LOCATION $newLoc)
;


alterStatementSuffixSkewedby
@init {pushMsg("alter skewed by statement", state);}
@after{popMsg(state);}
Expand Down Expand Up @@ -1334,10 +1334,10 @@ tabTypeExpr
(identifier (DOT^
(
(KW_ELEM_TYPE) => KW_ELEM_TYPE
|
|
(KW_KEY_TYPE) => KW_KEY_TYPE
|
(KW_VALUE_TYPE) => KW_VALUE_TYPE
|
(KW_VALUE_TYPE) => KW_VALUE_TYPE
| identifier
))*
)?
Expand Down Expand Up @@ -1374,7 +1374,7 @@ descStatement
analyzeStatement
@init { pushMsg("analyze statement", state); }
@after { popMsg(state); }
: KW_ANALYZE KW_TABLE (parttype=tableOrPartition) KW_COMPUTE KW_STATISTICS ((noscan=KW_NOSCAN) | (partialscan=KW_PARTIALSCAN)
: KW_ANALYZE KW_TABLE (parttype=tableOrPartition) KW_COMPUTE KW_STATISTICS ((noscan=KW_NOSCAN) | (partialscan=KW_PARTIALSCAN)
| (KW_FOR KW_COLUMNS (statsColumnName=columnNameList)?))?
-> ^(TOK_ANALYZE $parttype $noscan? $partialscan? KW_COLUMNS? $statsColumnName?)
;
Expand All @@ -1387,7 +1387,7 @@ showStatement
| KW_SHOW KW_COLUMNS (KW_FROM|KW_IN) tableName ((KW_FROM|KW_IN) db_name=identifier)?
-> ^(TOK_SHOWCOLUMNS tableName $db_name?)
| KW_SHOW KW_FUNCTIONS (KW_LIKE showFunctionIdentifier|showFunctionIdentifier)? -> ^(TOK_SHOWFUNCTIONS KW_LIKE? showFunctionIdentifier?)
| KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? -> ^(TOK_SHOWPARTITIONS $tabName partitionSpec?)
| KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? -> ^(TOK_SHOWPARTITIONS $tabName partitionSpec?)
| KW_SHOW KW_CREATE (
(KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) db_name=identifier -> ^(TOK_SHOW_CREATEDATABASE $db_name)
|
Expand All @@ -1396,7 +1396,7 @@ showStatement
| KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=identifier)? KW_LIKE showStmtIdentifier partitionSpec?
-> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?)
| KW_SHOW KW_TBLPROPERTIES tableName (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES tableName $prptyName?)
| KW_SHOW KW_LOCKS
| KW_SHOW KW_LOCKS
(
(KW_DATABASE|KW_SCHEMA) => (KW_DATABASE|KW_SCHEMA) (dbName=Identifier) (isExtended=KW_EXTENDED)? -> ^(TOK_SHOWDBLOCKS $dbName $isExtended?)
|
Expand Down Expand Up @@ -1509,7 +1509,7 @@ showCurrentRole
setRole
@init {pushMsg("set role", state);}
@after {popMsg(state);}
: KW_SET KW_ROLE
: KW_SET KW_ROLE
(
(KW_ALL) => (all=KW_ALL) -> ^(TOK_SHOW_SET_ROLE Identifier[$all.text])
|
Expand Down Expand Up @@ -1964,7 +1964,7 @@ columnNameOrderList
skewedValueElement
@init { pushMsg("skewed value element", state); }
@after { popMsg(state); }
:
:
skewedColumnValues
| skewedColumnValuePairList
;
Expand All @@ -1978,8 +1978,8 @@ skewedColumnValuePairList
skewedColumnValuePair
@init { pushMsg("column value pair", state); }
@after { popMsg(state); }
:
LPAREN colValues=skewedColumnValues RPAREN
:
LPAREN colValues=skewedColumnValues RPAREN
-> ^(TOK_TABCOLVALUES $colValues)
;

Expand All @@ -1999,11 +1999,11 @@ skewedColumnValue
skewedValueLocationElement
@init { pushMsg("skewed value location element", state); }
@after { popMsg(state); }
:
:
skewedColumnValue
| skewedColumnValuePair
;

columnNameOrder
@init { pushMsg("column name order", state); }
@after { popMsg(state); }
Expand Down Expand Up @@ -2116,7 +2116,7 @@ unionType
@after { popMsg(state); }
: KW_UNIONTYPE LESSTHAN colTypeList GREATERTHAN -> ^(TOK_UNIONTYPE colTypeList)
;

setOperator
@init { pushMsg("set operator", state); }
@after { popMsg(state); }
Expand Down Expand Up @@ -2168,7 +2168,7 @@ fromStatement[boolean topLevel]
{adaptor.create(Identifier, generateUnionAlias())}
)
)
^(TOK_INSERT
^(TOK_INSERT
^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
^(TOK_SELECT ^(TOK_SELEXPR TOK_ALLCOLREF))
)
Expand Down Expand Up @@ -2391,8 +2391,8 @@ setColumnsClause
KW_SET columnAssignmentClause (COMMA columnAssignmentClause)* -> ^(TOK_SET_COLUMNS_CLAUSE columnAssignmentClause* )
;

/*
UPDATE <table>
/*
UPDATE <table>
SET col1 = val1, col2 = val2... WHERE ...
*/
updateStatement
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,8 +86,7 @@ class Analyzer(
HiveTypeCoercion.typeCoercionRules ++
extendedResolutionRules : _*),
Batch("Nondeterministic", Once,
PullOutNondeterministic,
ComputeCurrentTime),
PullOutNondeterministic),
Batch("UDF", Once,
HandleNullInputsForUDF),
Batch("Cleanup", fixedPoint,
Expand Down Expand Up @@ -1229,23 +1228,6 @@ object CleanupAliases extends Rule[LogicalPlan] {
}
}

/**
* Computes the current date and time to make sure we return the same result in a single query.
*/
object ComputeCurrentTime extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = {
val dateExpr = CurrentDate()
val timeExpr = CurrentTimestamp()
val currentDate = Literal.create(dateExpr.eval(EmptyRow), dateExpr.dataType)
val currentTime = Literal.create(timeExpr.eval(EmptyRow), timeExpr.dataType)

plan transformAllExpressions {
case CurrentDate() => currentDate
case CurrentTimestamp() => currentTime
}
}
}

/**
* Replace the `UpCast` expression by `Cast`, and throw exceptions if the cast may truncate.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,9 @@ class SimpleCatalog(val conf: CatalystConf) extends Catalog {

// If an alias was specified by the lookup, wrap the plan in a subquery so that attributes are
// properly qualified with this alias.
alias.map(a => Subquery(a, tableWithQualifiers)).getOrElse(tableWithQualifiers)
alias
.map(a => Subquery(a, tableWithQualifiers))
.getOrElse(tableWithQualifiers)
}

override def getTables(databaseName: Option[String]): Seq[(String, Boolean)] = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -931,6 +931,14 @@ case class Cast(child: Expression, dataType: DataType) extends UnaryExpression {
$evPrim = $result.copy();
"""
}

override def sql: String = dataType match {
// HiveQL doesn't allow casting to complex types. For logical plans translated from HiveQL, this
// type of casting can only be introduced by the analyzer, and can be omitted when converting
// back to SQL query string.
case _: ArrayType | _: MapType | _: StructType => child.sql
case _ => s"CAST(${child.sql} AS ${dataType.sql})"
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,10 @@
package org.apache.spark.sql.catalyst.expressions

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, UnresolvedAttribute}
import org.apache.spark.sql.catalyst.analysis.{Analyzer, TypeCheckResult, UnresolvedAttribute}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.trees.TreeNode
import org.apache.spark.sql.catalyst.util.sequenceOption
import org.apache.spark.sql.types._

////////////////////////////////////////////////////////////////////////////////////////////////////
Expand Down Expand Up @@ -223,6 +224,15 @@ abstract class Expression extends TreeNode[Expression] {
protected def toCommentSafeString: String = this.toString
.replace("*/", "\\*\\/")
.replace("\\u", "\\\\u")

/**
* Returns SQL representation of this expression. For expressions that don't have a SQL
* representation (e.g. `ScalaUDF`), this method should throw an `UnsupportedOperationException`.
*/
@throws[UnsupportedOperationException](cause = "Expression doesn't have a SQL representation")
def sql: String = throw new UnsupportedOperationException(
s"Cannot map expression $this to its SQL representation"
)
}


Expand Down Expand Up @@ -356,6 +366,8 @@ abstract class UnaryExpression extends Expression {
"""
}
}

override def sql: String = s"($prettyName(${child.sql}))"
}


Expand Down Expand Up @@ -456,6 +468,8 @@ abstract class BinaryExpression extends Expression {
"""
}
}

override def sql: String = s"$prettyName(${left.sql}, ${right.sql})"
}


Expand Down Expand Up @@ -492,6 +506,8 @@ abstract class BinaryOperator extends BinaryExpression with ExpectsInputTypes {
TypeCheckResult.TypeCheckSuccess
}
}

override def sql: String = s"(${left.sql} $symbol ${right.sql})"
}


Expand Down Expand Up @@ -593,4 +609,9 @@ abstract class TernaryExpression extends Expression {
"""
}
}

override def sql: String = {
val childrenSQL = children.map(_.sql).mkString(", ")
s"$prettyName($childrenSQL)"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,5 @@ case class InputFileName() extends LeafExpression with Nondeterministic {
"org.apache.spark.rdd.SqlNewHadoopRDDState.getInputFileName();"
}

override def sql: String = prettyName
}
Original file line number Diff line number Diff line change
Expand Up @@ -78,4 +78,8 @@ private[sql] case class MonotonicallyIncreasingID() extends LeafExpression with
$countTerm++;
"""
}

override def prettyName: String = "monotonically_increasing_id"

override def sql: String = s"$prettyName()"
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,17 @@ import org.apache.spark.sql.types._
import org.apache.spark.util.collection.unsafe.sort.PrefixComparators.BinaryPrefixComparator
import org.apache.spark.util.collection.unsafe.sort.PrefixComparators.DoublePrefixComparator

abstract sealed class SortDirection
case object Ascending extends SortDirection
case object Descending extends SortDirection
abstract sealed class SortDirection {
def sql: String
}

case object Ascending extends SortDirection {
override def sql: String = "ASC"
}

case object Descending extends SortDirection {
override def sql: String = "DESC"
}

/**
* An expression that can be used to sort a tuple. This class extends expression primarily so that
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ package org.apache.spark.sql.catalyst.expressions.aggregate

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{CodeGenContext, CodegenFallback, GeneratedExpressionCode}
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.catalyst.util.sequenceOption
import org.apache.spark.sql.types._

/** The mode of an [[AggregateFunction]]. */
Expand Down Expand Up @@ -93,11 +94,13 @@ private[sql] case class AggregateExpression(

override def prettyString: String = aggregateFunction.prettyString

override def toString: String = s"(${aggregateFunction},mode=$mode,isDistinct=$isDistinct)"
override def toString: String = s"($aggregateFunction,mode=$mode,isDistinct=$isDistinct)"

override def sql: String = aggregateFunction.sql(isDistinct)
}

/**
* AggregateFunction2 is the superclass of two aggregation function interfaces:
* AggregateFunction is the superclass of two aggregation function interfaces:
*
* - [[ImperativeAggregate]] is for aggregation functions that are specified in terms of
* initialize(), update(), and merge() functions that operate on Row-based aggregation buffers.
Expand Down Expand Up @@ -163,6 +166,11 @@ sealed abstract class AggregateFunction extends Expression with ImplicitCastInpu
def toAggregateExpression(isDistinct: Boolean): AggregateExpression = {
AggregateExpression(aggregateFunction = this, mode = Complete, isDistinct = isDistinct)
}

def sql(isDistinct: Boolean): String = {
val distinct = if (isDistinct) "DISTINCT " else " "
s"$prettyName($distinct${children.map(_.sql).mkString(", ")})"
}
}

/**
Expand Down
Loading