Skip to content

Commit

Permalink
[SPARK-50422][SQL] Make Parameterized SQL queries of `SparkSession.…
Browse files Browse the repository at this point in the history
…sql` API GA
  • Loading branch information
dongjoon-hyun committed Nov 26, 2024
1 parent afb5d6f commit 655edf5
Show file tree
Hide file tree
Showing 3 changed files with 0 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,6 @@ class SparkSession private[sql] (
throw ConnectClientUnsupportedErrors.executeCommand()

/** @inheritdoc */
@Experimental
def sql(sqlText: String, args: Array[_]): DataFrame = {
val sqlCommand = proto.SqlCommand
.newBuilder()
Expand All @@ -221,13 +220,11 @@ class SparkSession private[sql] (
}

/** @inheritdoc */
@Experimental
def sql(sqlText: String, args: Map[String, Any]): DataFrame = {
sql(sqlText, args.asJava)
}

/** @inheritdoc */
@Experimental
override def sql(sqlText: String, args: java.util.Map[String, Any]): DataFrame = {
val sqlCommand = proto.SqlCommand
.newBuilder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,6 @@ abstract class SparkSession extends Serializable with Closeable {
* is.
* @since 3.5.0
*/
@Experimental
def sql(sqlText: String, args: Array[_]): Dataset[Row]

/**
Expand All @@ -488,7 +487,6 @@ abstract class SparkSession extends Serializable with Closeable {
* `array()`, `struct()`, in that case it is taken as is.
* @since 3.4.0
*/
@Experimental
def sql(sqlText: String, args: Map[String, Any]): Dataset[Row]

/**
Expand All @@ -506,7 +504,6 @@ abstract class SparkSession extends Serializable with Closeable {
* `array()`, `struct()`, in that case it is taken as is.
* @since 3.4.0
*/
@Experimental
def sql(sqlText: String, args: util.Map[String, Any]): Dataset[Row] = {
sql(sqlText, args.asScala.toMap)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,6 @@ class SparkSession private(
}

/** @inheritdoc */
@Experimental
def sql(sqlText: String, args: Array[_]): DataFrame = {
sql(sqlText, args, new QueryPlanningTracker)
}
Expand Down Expand Up @@ -498,13 +497,11 @@ class SparkSession private(
}

/** @inheritdoc */
@Experimental
def sql(sqlText: String, args: Map[String, Any]): DataFrame = {
sql(sqlText, args, new QueryPlanningTracker)
}

/** @inheritdoc */
@Experimental
override def sql(sqlText: String, args: java.util.Map[String, Any]): DataFrame = {
sql(sqlText, args.asScala.toMap)
}
Expand Down

0 comments on commit 655edf5

Please sign in to comment.