This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 4b84e57  [SPARK-32877][SQL][TEST] Add test for Hive UDF complex 
decimal type
4b84e57 is described below

commit 4b84e571f06dec953d532e60942b0e96b4100abe
Author: ulysses <youxi...@weidian.com>
AuthorDate: Thu Sep 24 22:16:05 2020 -0700

    [SPARK-32877][SQL][TEST] Add test for Hive UDF complex decimal type
    
    ### What changes were proposed in this pull request?
    
    Add test to cover Hive UDF whose input contains complex decimal type.
    Add comment to explain why we can't make `HiveSimpleUDF` extend 
`ImplicitTypeCasts`.
    
    ### Why are the changes needed?
    
    For better test coverage with Hive which we compatible or not.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Add test.
    
    Closes #29863 from ulysses-you/SPARK-32877-test.
    
    Authored-by: ulysses <youxi...@weidian.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
    (cherry picked from commit f2fc96667481169affbc20cec95b9fc1c19fc7c3)
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../scala/org/apache/spark/sql/hive/hiveUDFs.scala |  5 ++++
 .../spark/sql/hive/execution/HiveUDFSuite.scala    | 31 ++++++++++++++++++++++
 2 files changed, 36 insertions(+)

diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
index 05d608a..3117781 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/hiveUDFs.scala
@@ -41,6 +41,11 @@ import org.apache.spark.sql.hive.HiveShim._
 import org.apache.spark.sql.types._
 import org.apache.spark.util.Utils
 
+/**
+ * Here we cannot extends `ImplicitTypeCasts` to compatible with UDF input 
data type, the reason is:
+ * we use children data type to reflect UDF method first and will get 
exception if it fails so that
+ * we can never go into `ImplicitTypeCasts`.
+ */
 private[hive] case class HiveSimpleUDF(
     name: String, funcWrapper: HiveFunctionWrapper, children: Seq[Expression])
   extends Expression
diff --git 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index 7bca2af..d3c8428 100644
--- 
a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -34,6 +34,7 @@ import org.apache.spark.sql.{AnalysisException, QueryTest, 
Row}
 import org.apache.spark.sql.catalyst.plans.logical.Project
 import org.apache.spark.sql.execution.command.FunctionsCommand
 import org.apache.spark.sql.functions.max
+import org.apache.spark.sql.hive.HiveUtils
 import org.apache.spark.sql.hive.test.TestHiveSingleton
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.test.SQLTestUtils
@@ -658,6 +659,25 @@ class HiveUDFSuite extends QueryTest with 
TestHiveSingleton with SQLTestUtils {
     }
   }
 
+  test("SPARK-32877: add test for Hive UDF complex decimal type") {
+    assume(HiveUtils.isHive23)
+    withUserDefinedFunction("testArraySum" -> false) {
+      sql(s"CREATE FUNCTION testArraySum AS '${classOf[ArraySumUDF].getName}'")
+      checkAnswer(
+        sql("SELECT testArraySum(array(1, 1.1, 1.2))"),
+        Seq(Row(3.3)))
+
+      val msg = intercept[AnalysisException] {
+        sql("SELECT testArraySum(1)")
+      }.getMessage
+      assert(msg.contains(s"No handler for UDF/UDAF/UDTF 
'${classOf[ArraySumUDF].getName}'"))
+
+      val msg2 = intercept[AnalysisException] {
+        sql("SELECT testArraySum(1, 2)")
+      }.getMessage
+      assert(msg2.contains(s"No handler for UDF/UDAF/UDTF 
'${classOf[ArraySumUDF].getName}'"))
+    }
+  }
 }
 
 class TestPair(x: Int, y: Int) extends Writable with Serializable {
@@ -741,3 +761,14 @@ class StatelessUDF extends UDF {
     result
   }
 }
+
+class ArraySumUDF extends UDF {
+  import scala.collection.JavaConverters._
+  def evaluate(values: java.util.List[java.lang.Double]): java.lang.Double = {
+    var r = 0d
+    for (v <- values.asScala) {
+      r += v
+    }
+    r
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to