Repository: spark
Updated Branches:
  refs/heads/master c3b4a94a9 -> 804515f82


[SPARK-21318][SQL] Improve exception message thrown by `lookupFunction`

## What changes were proposed in this pull request?

The function actually exists in current selected database, and it's failed to 
init during `lookupFunciton`, but the exception message is:
```
This function is neither a registered temporary function nor a permanent 
function registered in the database 'default'.
```

This is not conducive to positioning problems. This PR fix the problem.

## How was this patch tested?

new test case + manual tests

Closes #18544 from stanzhai/fix-udf-error-message.

Authored-by: Stan Zhai <m...@stanzhai.site>
Signed-off-by: Wenchen Fan <wenc...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/804515f8
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/804515f8
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/804515f8

Branch: refs/heads/master
Commit: 804515f821086ea685815d3c8eff42d76b7d9e4e
Parents: c3b4a94
Author: Stan Zhai <m...@stanzhai.site>
Authored: Mon Sep 24 21:33:12 2018 +0800
Committer: Wenchen Fan <wenc...@databricks.com>
Committed: Mon Sep 24 21:33:12 2018 +0800

----------------------------------------------------------------------
 .../catalyst/catalog/SessionCatalogSuite.scala  |  3 ++
 .../spark/sql/hive/HiveSessionCatalog.scala     | 10 +++---
 .../spark/sql/hive/execution/UDAFEmpty.java     | 32 ++++++++++++++++++++
 .../org/apache/spark/sql/hive/UDFSuite.scala    | 16 ++++++++++
 4 files changed, 56 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/804515f8/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
index 89fabd4..19e8c03 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
@@ -1427,6 +1427,7 @@ abstract class SessionCatalogSuite extends AnalysisTest {
     Seq(true, false) foreach { caseSensitive =>
       val conf = new SQLConf().copy(SQLConf.CASE_SENSITIVE -> caseSensitive)
       val catalog = new SessionCatalog(newBasicCatalog(), new 
SimpleFunctionRegistry, conf)
+      catalog.setCurrentDatabase("db1")
       try {
         val analyzer = new Analyzer(catalog, conf)
 
@@ -1440,6 +1441,8 @@ abstract class SessionCatalogSuite extends AnalysisTest {
         }
 
         assert(cause.getMessage.contains("Undefined function: 'undefined_fn'"))
+        // SPARK-21318: the error message should contains the current database 
name
+        assert(cause.getMessage.contains("db1"))
       } finally {
         catalog.reset()
       }

http://git-wip-us.apache.org/repos/asf/spark/blob/804515f8/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index de41bb4..405c0c8 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -131,14 +131,14 @@ private[sql] class HiveSessionCatalog(
     Try(super.lookupFunction(funcName, children)) match {
       case Success(expr) => expr
       case Failure(error) =>
-        if (functionRegistry.functionExists(funcName)) {
-          // If the function actually exists in functionRegistry, it means 
that there is an
-          // error when we create the Expression using the given children.
+        if (super.functionExists(name)) {
+          // If the function exists (either in functionRegistry or 
externalCatalog),
+          // it means that there is an error when we create the Expression 
using the given children.
           // We need to throw the original exception.
           throw error
         } else {
-          // This function is not in functionRegistry, let's try to load it as 
a Hive's
-          // built-in function.
+          // This function does not exist (neither in functionRegistry or 
externalCatalog),
+          // let's try to load it as a Hive's built-in function.
           // Hive is case insensitive.
           val functionName = funcName.unquotedString.toLowerCase(Locale.ROOT)
           if (!hiveFunctions.contains(functionName)) {

http://git-wip-us.apache.org/repos/asf/spark/blob/804515f8/sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDAFEmpty.java
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDAFEmpty.java 
b/sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDAFEmpty.java
new file mode 100644
index 0000000..badc396
--- /dev/null
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/execution/UDAFEmpty.java
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql.hive.execution;
+
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.udf.generic.AbstractGenericUDAFResolver;
+import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+
+/**
+ * An empty UDAF that throws a semantic exception
+ */
+public class UDAFEmpty extends AbstractGenericUDAFResolver {
+    @Override
+    public GenericUDAFEvaluator getEvaluator(TypeInfo[] info) throws 
SemanticException {
+        throw new SemanticException("Can not get an evaluator of the empty 
UDAF");
+    }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/804515f8/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
----------------------------------------------------------------------
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala 
b/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
index a56c6f7..d567128 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/UDFSuite.scala
@@ -193,4 +193,20 @@ class UDFSuite
       }
     }
   }
+
+  test("SPARK-21318: The correct exception message should be thrown " +
+    "if a UDF/UDAF has already been registered") {
+    val functionName = "empty"
+    val functionClass = 
classOf[org.apache.spark.sql.hive.execution.UDAFEmpty].getCanonicalName
+
+    withUserDefinedFunction(functionName -> false) {
+      sql(s"CREATE FUNCTION $functionName AS '$functionClass'")
+
+      val e = intercept[AnalysisException] {
+        sql(s"SELECT $functionName(value) from $testTableName")
+      }
+
+      assert(e.getMessage.contains("Can not get an evaluator of the empty 
UDAF"))
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to