This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0fc85a24a15 [SPARK-44783][SQL][TESTS] Checks arrays as named and 
positional parameters
0fc85a24a15 is described below

commit 0fc85a24a1531c74e7bc5b2d0a36635580a395a6
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Mon Aug 14 18:32:58 2023 +0800

    [SPARK-44783][SQL][TESTS] Checks arrays as named and positional parameters
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to add new test which checks arrays as named and 
positional parameters.
    
    ### Why are the changes needed?
    To improve test coverage.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    By running the modified test suite:
    ```
    $ build/sbt "test:testOnly *ParametersSuite"
    ```
    
    Closes #42470 from MaxGekk/sql-parameterized-by-array.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../org/apache/spark/sql/ParametersSuite.scala     | 27 ++++++++++++++++++++++
 1 file changed, 27 insertions(+)

diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
index a72c9a600ad..6310a5a50e0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala
@@ -502,4 +502,31 @@ class ParametersSuite extends QueryTest with 
SharedSparkSession {
         start = 24,
         stop = 36))
   }
+
+  test("SPARK-44783: arrays as parameters") {
+    checkAnswer(
+      spark.sql("SELECT array_position(:arrParam, 'abc')", Map("arrParam" -> 
Array.empty[String])),
+      Row(0))
+    checkAnswer(
+      spark.sql("SELECT array_position(?, 0.1D)", Array(Array.empty[Double])),
+      Row(0))
+    checkAnswer(
+      spark.sql("SELECT array_contains(:arrParam, 10)", Map("arrParam" -> 
Array(10, 20, 30))),
+      Row(true))
+    checkAnswer(
+      spark.sql("SELECT array_contains(?, ?)", Array(Array("a", "b", "c"), 
"b")),
+      Row(true))
+    checkAnswer(
+      spark.sql("SELECT :arr[1]", Map("arr" -> Array(10, 20, 30))),
+      Row(20))
+    checkAnswer(
+      spark.sql("SELECT ?[?]", Array(Array(1f, 2f, 3f), 0)),
+      Row(1f))
+    checkAnswer(
+      spark.sql("SELECT :arr[0][1]", Map("arr" -> Array(Array(1, 2), 
Array(20), Array.empty[Int]))),
+      Row(2))
+    checkAnswer(
+      spark.sql("SELECT ?[?][?]", Array(Array(Array(1f, 2f), 
Array.empty[Float], Array(3f)), 0, 1)),
+      Row(2f))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to