From 7cbfc2cad078f40c290eff6ffd90e4902397314a Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Tue, 26 Nov 2024 10:56:38 +0100 Subject: [PATCH] [SPARK-50422][SQL] Make `Parameterized SQL queries` of `SparkSession.sql` API GA ### What changes were proposed in this pull request? This PR aims to make `Parameterized SQL queries` of `SparkSession.sql` API GA in Apache Spark 4.0.0. ### Why are the changes needed? Apache Spark has been supported `Parameterized SQL queries` because they are very convenient usage for the users . - https://github.com/apache/spark/pull/38864 (Since Spark 3.4.0) - https://github.com/apache/spark/pull/41568 (Since Spark 3.5.0) It's time to make it GA by removing `Experimental` tags since this feature has been serving well for a long time. ### Does this PR introduce _any_ user-facing change? No, there is no behavior change. ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #48965 from dongjoon-hyun/SPARK-50422. Authored-by: Dongjoon Hyun Signed-off-by: Max Gekk --- .../jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala | 3 --- .../src/main/scala/org/apache/spark/sql/api/SparkSession.scala | 3 --- .../src/main/scala/org/apache/spark/sql/SparkSession.scala | 3 --- 3 files changed, 9 deletions(-) diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala index 231c604b98bb5..b74d0c2ff2243 100644 --- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -210,7 +210,6 @@ class SparkSession private[sql] ( throw ConnectClientUnsupportedErrors.executeCommand() /** @inheritdoc */ - @Experimental def sql(sqlText: String, args: Array[_]): DataFrame = { val sqlCommand = proto.SqlCommand .newBuilder() @@ -221,13 +220,11 @@ class SparkSession private[sql] ( } /** @inheritdoc */ - @Experimental def sql(sqlText: String, args: Map[String, Any]): DataFrame = { sql(sqlText, args.asJava) } /** @inheritdoc */ - @Experimental override def sql(sqlText: String, args: java.util.Map[String, Any]): DataFrame = { val sqlCommand = proto.SqlCommand .newBuilder() diff --git a/sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala b/sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala index 64b0a87c573d3..35f74497b96f4 100644 --- a/sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala +++ b/sql/api/src/main/scala/org/apache/spark/sql/api/SparkSession.scala @@ -470,7 +470,6 @@ abstract class SparkSession extends Serializable with Closeable { * is. * @since 3.5.0 */ - @Experimental def sql(sqlText: String, args: Array[_]): Dataset[Row] /** @@ -488,7 +487,6 @@ abstract class SparkSession extends Serializable with Closeable { * `array()`, `struct()`, in that case it is taken as is. * @since 3.4.0 */ - @Experimental def sql(sqlText: String, args: Map[String, Any]): Dataset[Row] /** @@ -506,7 +504,6 @@ abstract class SparkSession extends Serializable with Closeable { * `array()`, `struct()`, in that case it is taken as is. * @since 3.4.0 */ - @Experimental def sql(sqlText: String, args: util.Map[String, Any]): Dataset[Row] = { sql(sqlText, args.asScala.toMap) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index a7f85db12b214..8cf30fb39f310 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -460,7 +460,6 @@ class SparkSession private( } /** @inheritdoc */ - @Experimental def sql(sqlText: String, args: Array[_]): DataFrame = { sql(sqlText, args, new QueryPlanningTracker) } @@ -498,13 +497,11 @@ class SparkSession private( } /** @inheritdoc */ - @Experimental def sql(sqlText: String, args: Map[String, Any]): DataFrame = { sql(sqlText, args, new QueryPlanningTracker) } /** @inheritdoc */ - @Experimental override def sql(sqlText: String, args: java.util.Map[String, Any]): DataFrame = { sql(sqlText, args.asScala.toMap) }