This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 008b3a34759 [SPARK-40152][SQL][TESTS] Add tests for SplitPart
008b3a34759 is described below

commit 008b3a347595cc47ff30853d7141b17bf7be4f13
Author: Yuming Wang <yumw...@ebay.com>
AuthorDate: Tue Aug 23 08:55:27 2022 -0500

    [SPARK-40152][SQL][TESTS] Add tests for SplitPart
    
    ### What changes were proposed in this pull request?
    
    Add tests for `SplitPart`.
    
    ### Why are the changes needed?
    
    Improve test coverage.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    N/A.
    
    Closes #37626 from wangyum/SPARK-40152-2.
    
    Authored-by: Yuming Wang <yumw...@ebay.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
    (cherry picked from commit 4f525eed7d5d461498aee68c4d3e57941f9aae2c)
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 .../catalyst/expressions/collectionOperations.scala  |  2 +-
 .../expressions/CollectionExpressionsSuite.scala     | 20 ++++++++++++++++++++
 2 files changed, 21 insertions(+), 1 deletion(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
index 8186d006296..53bda0cbdc7 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
@@ -2225,7 +2225,7 @@ case class ElementAt(
               case Some(value) =>
                 val defaultValueEval = value.genCode(ctx)
                 s"""
-                  ${defaultValueEval.code};
+                  ${defaultValueEval.code}
                   ${ev.isNull} = ${defaultValueEval.isNull};
                   ${ev.value} = ${defaultValueEval.value};
                 """.stripMargin
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
index 802988038a6..8fb04cd1ac7 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
@@ -2532,4 +2532,24 @@ class CollectionExpressionsSuite extends SparkFunSuite 
with ExpressionEvalHelper
           Date.valueOf("2017-02-12")))
     }
   }
+
+  test("SplitPart") {
+    val delimiter = Literal.create(".", StringType)
+    val str = StringSplitSQL(Literal.create("11.12.13", StringType), delimiter)
+    val outOfBoundValue = Some(Literal.create("", StringType))
+
+    checkEvaluation(ElementAt(str, Literal(3), outOfBoundValue), 
UTF8String.fromString("13"))
+    checkEvaluation(ElementAt(str, Literal(1), outOfBoundValue), 
UTF8String.fromString("11"))
+    checkEvaluation(ElementAt(str, Literal(10), outOfBoundValue), 
UTF8String.fromString(""))
+    checkEvaluation(ElementAt(str, Literal(-10), outOfBoundValue), 
UTF8String.fromString(""))
+
+    checkEvaluation(ElementAt(StringSplitSQL(Literal.create(null, StringType), 
delimiter),
+      Literal(1), outOfBoundValue), null)
+    checkEvaluation(ElementAt(StringSplitSQL(Literal.create("11.12.13", 
StringType),
+      Literal.create(null, StringType)), Literal(1), outOfBoundValue), null)
+
+    intercept[Exception] {
+      checkEvaluation(ElementAt(str, Literal(0), outOfBoundValue), null)
+    }.getMessage.contains("The index 0 is invalid")
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to