This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new cf1a80eeae8 [SPARK-40152][SQL] Fix split_part codegen compilation issue
cf1a80eeae8 is described below

commit cf1a80eeae8bf815270fb39568b1846c2bd8d437
Author: Yuming Wang <yumw...@ebay.com>
AuthorDate: Sun Aug 21 14:30:05 2022 -0500

    [SPARK-40152][SQL] Fix split_part codegen compilation issue
    
    ### What changes were proposed in this pull request?
    
    Fix `split_part` codegen compilation issue:
    ```sql
    SELECT split_part(str, delimiter, partNum) FROM VALUES ('11.12.13', '.', 3) 
AS v1(str, delimiter, partNum);
    ```
    ```
    org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
42, Column 1: failed to compile: 
org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 42, 
Column 1: Expression "project_isNull_0 = false" is not a type
    ```
    
    ### Why are the changes needed?
    
    Fix bug.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Unit test.
    
    Closes #37589 from wangyum/SPARK-40152.
    
    Authored-by: Yuming Wang <yumw...@ebay.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 .../spark/sql/catalyst/expressions/collectionOperations.scala     | 6 +++---
 sql/core/src/test/resources/sql-tests/inputs/string-functions.sql | 1 +
 .../resources/sql-tests/results/ansi/string-functions.sql.out     | 8 ++++++++
 .../src/test/resources/sql-tests/results/string-functions.sql.out | 8 ++++++++
 4 files changed, 20 insertions(+), 3 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
index 40eade75578..148bbc721e7 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
@@ -2270,9 +2270,9 @@ case class ElementAt(
               case Some(value) =>
                 val defaultValueEval = value.genCode(ctx)
                 s"""
-                  ${defaultValueEval.code}
-                  ${ev.isNull} = ${defaultValueEval.isNull}
-                  ${ev.value} = ${defaultValueEval.value}
+                  ${defaultValueEval.code};
+                  ${ev.isNull} = ${defaultValueEval.isNull};
+                  ${ev.value} = ${defaultValueEval.value};
                 """.stripMargin
               case None => s"${ev.isNull} = true;"
             }
diff --git a/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql 
b/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
index e1c97b468f2..058ea891797 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/string-functions.sql
@@ -38,6 +38,7 @@ SELECT split_part('11.12.13', '.', 4);
 SELECT split_part('11.12.13', '.', 5);
 SELECT split_part('11.12.13', '.', -5);
 SELECT split_part(null, '.', 1);
+SELECT split_part(str, delimiter, partNum) FROM VALUES ('11.12.13', '.', 3) AS 
v1(str, delimiter, partNum);
 
 -- substring function
 SELECT substr('Spark SQL', 5);
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
index 08dcc011f24..add89a635a8 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
@@ -239,6 +239,14 @@ struct<split_part(NULL, ., 1):string>
 NULL
 
 
+-- !query
+SELECT split_part(str, delimiter, partNum) FROM VALUES ('11.12.13', '.', 3) AS 
v1(str, delimiter, partNum)
+-- !query schema
+struct<split_part(str, delimiter, partNum):string>
+-- !query output
+13
+
+
 -- !query
 SELECT substr('Spark SQL', 5)
 -- !query schema
diff --git 
a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
index a02b27142ff..dedbd29d4bb 100644
--- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
@@ -205,6 +205,14 @@ struct<split_part(NULL, ., 1):string>
 NULL
 
 
+-- !query
+SELECT split_part(str, delimiter, partNum) FROM VALUES ('11.12.13', '.', 3) AS 
v1(str, delimiter, partNum)
+-- !query schema
+struct<split_part(str, delimiter, partNum):string>
+-- !query output
+13
+
+
 -- !query
 SELECT substr('Spark SQL', 5)
 -- !query schema


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to