This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.5
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.5 by this push:
     new 723a85eb2df [SPARK-45078][SQL] Fix `array_insert` 
ImplicitCastInputTypes not work
723a85eb2df is described below

commit 723a85eb2dffa69571cba841380eb759a9b89321
Author: Jia Fan <fanjiaemi...@qq.com>
AuthorDate: Sun Sep 17 11:16:24 2023 +0300

    [SPARK-45078][SQL] Fix `array_insert` ImplicitCastInputTypes not work
    
    ### What changes were proposed in this pull request?
    This PR fix call `array_insert` with different type between array and 
insert column, will throw exception. Sometimes it should be execute successed.
    eg:
    ```sql
    select array_insert(array(1), 2, cast(2 as tinyint))
    ```
    The `ImplicitCastInputTypes` in `ArrayInsert` always return empty array at 
now. So that Spark can not convert `tinyint` to `int`.
    
    ### Why are the changes needed?
    Fix error behavior in `array_insert`
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Add new test.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #42951 from Hisoka-X/SPARK-45078_arrayinsert_type_mismatch.
    
    Authored-by: Jia Fan <fanjiaemi...@qq.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
    (cherry picked from commit e84c66db60c78476806161479344cd32a7606ab1)
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/catalyst/expressions/collectionOperations.scala     | 1 -
 .../test/resources/sql-tests/analyzer-results/ansi/array.sql.out  | 7 +++++++
 .../src/test/resources/sql-tests/analyzer-results/array.sql.out   | 7 +++++++
 sql/core/src/test/resources/sql-tests/inputs/array.sql            | 1 +
 sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out  | 8 ++++++++
 sql/core/src/test/resources/sql-tests/results/array.sql.out       | 8 ++++++++
 6 files changed, 31 insertions(+), 1 deletion(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
index fe9c4015c15..ade4a6c5be7 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
@@ -4711,7 +4711,6 @@ case class ArrayInsert(
         }
       case (e1, e2, e3) => Seq.empty
     }
-    Seq.empty
   }
 
   override def checkInputDataTypes(): TypeCheckResult = {
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out
index cd101c7a524..6fc30815793 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/ansi/array.sql.out
@@ -531,6 +531,13 @@ Project [array_insert(array(2, 3, cast(null as int), 4), 
-5, 1, false) AS array_
 +- OneRowRelation
 
 
+-- !query
+select array_insert(array(1), 2, cast(2 as tinyint))
+-- !query analysis
+Project [array_insert(array(1), 2, cast(cast(2 as tinyint) as int), false) AS 
array_insert(array(1), 2, CAST(2 AS TINYINT))#x]
++- OneRowRelation
+
+
 -- !query
 set spark.sql.legacy.negativeIndexInArrayInsert=true
 -- !query analysis
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out
index 8279fb3362e..e0585b77cb6 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/array.sql.out
@@ -531,6 +531,13 @@ Project [array_insert(array(2, 3, cast(null as int), 4), 
-5, 1, false) AS array_
 +- OneRowRelation
 
 
+-- !query
+select array_insert(array(1), 2, cast(2 as tinyint))
+-- !query analysis
+Project [array_insert(array(1), 2, cast(cast(2 as tinyint) as int), false) AS 
array_insert(array(1), 2, CAST(2 AS TINYINT))#x]
++- OneRowRelation
+
+
 -- !query
 set spark.sql.legacy.negativeIndexInArrayInsert=true
 -- !query analysis
diff --git a/sql/core/src/test/resources/sql-tests/inputs/array.sql 
b/sql/core/src/test/resources/sql-tests/inputs/array.sql
index 48edc6b4742..52a0906ea73 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/array.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/array.sql
@@ -141,6 +141,7 @@ select array_insert(array(1, 2, 3, NULL), cast(NULL as 
INT), 4);
 select array_insert(array(1, 2, 3, NULL), 4, cast(NULL as INT));
 select array_insert(array(2, 3, NULL, 4), 5, 5);
 select array_insert(array(2, 3, NULL, 4), -5, 1);
+select array_insert(array(1), 2, cast(2 as tinyint));
 
 set spark.sql.legacy.negativeIndexInArrayInsert=true;
 select array_insert(array(1, 3, 4), -2, 2);
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out
index 03be0f9d84b..49e18411ffa 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out
@@ -659,6 +659,14 @@ struct<array_insert(array(2, 3, NULL, 4), -5, 
1):array<int>>
 [1,2,3,null,4]
 
 
+-- !query
+select array_insert(array(1), 2, cast(2 as tinyint))
+-- !query schema
+struct<array_insert(array(1), 2, CAST(2 AS TINYINT)):array<int>>
+-- !query output
+[1,2]
+
+
 -- !query
 set spark.sql.legacy.negativeIndexInArrayInsert=true
 -- !query schema
diff --git a/sql/core/src/test/resources/sql-tests/results/array.sql.out 
b/sql/core/src/test/resources/sql-tests/results/array.sql.out
index 9dbf4fbebc2..e568f5fa779 100644
--- a/sql/core/src/test/resources/sql-tests/results/array.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/array.sql.out
@@ -540,6 +540,14 @@ struct<array_insert(array(2, 3, NULL, 4), -5, 
1):array<int>>
 [1,2,3,null,4]
 
 
+-- !query
+select array_insert(array(1), 2, cast(2 as tinyint))
+-- !query schema
+struct<array_insert(array(1), 2, CAST(2 AS TINYINT)):array<int>>
+-- !query output
+[1,2]
+
+
 -- !query
 set spark.sql.legacy.negativeIndexInArrayInsert=true
 -- !query schema


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to