This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 37c898c63b1 [MINOR][CONNECT][TESTS] Check named parameters in `sql()` 37c898c63b1 is described below commit 37c898c63b1fd9fcb9773313246ff28e631eb28f Author: Max Gekk <max.g...@gmail.com> AuthorDate: Mon Jun 26 09:17:56 2023 +0300 [MINOR][CONNECT][TESTS] Check named parameters in `sql()` ### What changes were proposed in this pull request? In the PR, I propose to add new tests to check named parameters in `sql()` of Scala connect client. ### Why are the changes needed? To improve test coverage. Before the PR, the feature has not been tested at all. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? By running new test: ``` $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *.ClientE2ETestSuite" ``` Closes #41726 from MaxGekk/test-named-params-proto. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala index b24e445964a..0ababaa0af1 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala @@ -960,6 +960,17 @@ class ClientE2ETestSuite extends RemoteSparkSession with SQLHelper with PrivateM assert(result2(0).getInt(0) === 1) assert(result2(0).getString(1) === "abc") } + + test("sql() with named parameters") { + val result0 = spark.sql("select 1", Map.empty[String, Any]).collect() + assert(result0.length == 1 && result0(0).getInt(0) === 1) + + val result1 = spark.sql("select :abc", Map("abc" -> 1)).collect() + assert(result1.length == 1 && result1(0).getInt(0) === 1) + + val result2 = spark.sql("select :c0 limit :l0", Map("l0" -> 1, "c0" -> "abc")).collect() + assert(result2.length == 1 && result2(0).getString(0) === "abc") + } } private[sql] case class MyType(id: Long, a: Double, b: Double) --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org