This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch drop_func
in repository https://gitbox.apache.org/repos/asf/spark.git

commit cbac500067119f92e8cd71b1b364d4c118a37b31
Author: Wenchen Fan <[email protected]>
AuthorDate: Tue Dec 30 02:13:38 2025 +0800

    Refactor Drop/RefreshFunction to avoid catalog lookup
---
 .../src/main/resources/error/error-conditions.json |  2 +-
 .../spark/sql/catalyst/analysis/Analyzer.scala     | 11 ++---
 .../sql/catalyst/analysis/ResolveCatalogs.scala    | 36 +++++++++++++++-
 .../analysis/ResolveCommandsWithIfExists.scala     |  4 +-
 .../sql/catalyst/analysis/v2ResolutionPlans.scala  |  2 -
 .../spark/sql/catalyst/parser/AstBuilder.scala     | 48 ++++++++--------------
 .../sql/connector/catalog/CatalogManager.scala     |  1 +
 .../spark/sql/errors/QueryCompilationErrors.scala  | 10 +++--
 .../spark/sql/catalyst/parser/DDLParserSuite.scala | 11 ++---
 .../catalyst/analysis/ResolveSessionCatalog.scala  | 41 ++++++++++++++----
 .../org/apache/spark/sql/classic/Catalog.scala     |  5 +--
 .../spark/sql/execution/SparkSqlParser.scala       | 27 +++++-------
 .../spark/sql/execution/command/functions.scala    | 22 +++-------
 .../identifier-clause-legacy.sql.out               |  4 +-
 .../analyzer-results/identifier-clause.sql.out     |  4 +-
 .../sql-tests/analyzer-results/sql-udf.sql.out     | 44 ++++++++++----------
 .../analyzer-results/udf/udf-udaf.sql.out          |  2 +-
 .../sql/connector/DataSourceV2FunctionSuite.scala  | 22 ++++++----
 .../sql/execution/command/DDLParserSuite.scala     | 12 ++----
 .../spark/sql/execution/command/DDLSuite.scala     | 41 ++++++------------
 20 files changed, 176 insertions(+), 173 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index 614944e68fc6..396f1933af17 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -7803,7 +7803,7 @@
   },
   "_LEGACY_ERROR_TEMP_1017" : {
     "message" : [
-      "<name> is a built-in/temporary function. '<cmd>' expects a persistent 
function.<hintStr>."
+      "<name> is a temporary function. '<cmd>' expects a persistent 
function.<hintStr>"
     ]
   },
   "_LEGACY_ERROR_TEMP_1018" : {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index f9df64303f43..46f2267e1519 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -2096,16 +2096,11 @@ class Analyzer(
     def apply(plan: LogicalPlan): LogicalPlan = 
plan.resolveOperatorsUpWithPruning(
       _.containsAnyPattern(UNRESOLVED_FUNC, UNRESOLVED_FUNCTION, GENERATOR,
         UNRESOLVED_TABLE_VALUED_FUNCTION, UNRESOLVED_TVF_ALIASES), ruleId) {
-      // Resolve functions with concrete relations from v2 catalog.
-      case u @ UnresolvedFunctionName(nameParts, cmd, requirePersistentFunc, 
mismatchHint, _) =>
+      // Resolve scalar/table functions and get the function metadata for 
DESCRIBE FUNCTION.
+      case u @ UnresolvedFunctionName(nameParts, _, _) =>
         functionResolution.lookupBuiltinOrTempFunction(nameParts, None)
           
.orElse(functionResolution.lookupBuiltinOrTempTableFunction(nameParts)).map { 
info =>
-          if (requirePersistentFunc) {
-            throw QueryCompilationErrors.expectPersistentFuncError(
-              nameParts.head, cmd, mismatchHint, u)
-          } else {
-            ResolvedNonPersistentFunc(nameParts.head, 
V1Function.metadataOnly(info))
-          }
+          ResolvedNonPersistentFunc(nameParts.head, 
V1Function.metadataOnly(info))
         }.getOrElse {
           val CatalogAndIdentifier(catalog, ident) =
             relationResolution.expandIdentifier(nameParts)
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
index 6307ccd5b975..e433401511d3 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala
@@ -21,10 +21,11 @@ import scala.jdk.CollectionConverters._
 
 import org.apache.spark.SparkException
 import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.SqlScriptingContextManager
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, 
SqlScriptingContextManager}
 import org.apache.spark.sql.catalyst.expressions.AttributeReference
 import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.rules.Rule
+import org.apache.spark.sql.catalyst.trees.{CurrentOrigin, Origin}
 import 
org.apache.spark.sql.catalyst.util.SparkCharVarcharUtils.replaceCharVarcharWithString
 import org.apache.spark.sql.connector.catalog._
 import org.apache.spark.sql.errors.DataTypeErrors.toSQLId
@@ -74,6 +75,7 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
         case plan => plan
       }
       c.copy(names = resolved)
+
     case d @ DropVariable(UnresolvedIdentifier(nameParts, _), _) =>
       if (withinSqlScript) {
         throw new AnalysisException(
@@ -83,6 +85,12 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
       assertValidSessionVariableNameParts(nameParts, resolved)
       d.copy(name = resolved)
 
+    case d @ DropFunction(u @ UnresolvedIdentifier(nameParts, _), _) =>
+      d.copy(child = resolveFunctionIdentifier(nameParts, u.origin))
+
+    case r @ RefreshFunction(u @ UnresolvedIdentifier(nameParts, _)) =>
+      r.copy(child = resolveFunctionIdentifier(nameParts, u.origin))
+
     // For CREATE TABLE and REPLACE TABLE statements, resolve the table 
identifier and include
     // the table columns as output. This allows expressions (e.g., 
constraints) referencing these
     // columns to be resolved correctly.
@@ -126,6 +134,32 @@ class ResolveCatalogs(val catalogManager: CatalogManager)
     }
   }
 
+  /**
+   * Resolves a function identifier, checking for builtin and temp functions 
first.
+   * Builtin and temp functions are only registered with unqualified names.
+   */
+  private def resolveFunctionIdentifier(
+      nameParts: Seq[String],
+      origin: Origin): ResolvedIdentifier = CurrentOrigin.withOrigin(origin) {
+    if (nameParts.length == 1) {
+      val funcName = FunctionIdentifier(nameParts.head)
+      val sessionCatalog = catalogManager.v1SessionCatalog
+      if (sessionCatalog.isBuiltinFunction(funcName)) {
+        val ident = Identifier.of(Array(CatalogManager.BUILTIN_NAMESPACE), 
nameParts.head)
+        ResolvedIdentifier(FakeSystemCatalog, ident)
+      } else if (sessionCatalog.isTemporaryFunction(funcName)) {
+        val ident = Identifier.of(Array(CatalogManager.SESSION_NAMESPACE), 
nameParts.head)
+        ResolvedIdentifier(FakeSystemCatalog, ident)
+      } else {
+        val CatalogAndIdentifier(catalog, ident) = nameParts
+        ResolvedIdentifier(catalog, ident)
+      }
+    } else {
+      val CatalogAndIdentifier(catalog, ident) = nameParts
+      ResolvedIdentifier(catalog, ident)
+    }
+  }
+
   private def resolveNamespace(
       catalog: CatalogPlugin,
       ns: Seq[String],
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala
index 65c23c3d3b20..22080bcd4e29 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCommandsWithIfExists.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.catalyst.analysis
 
-import org.apache.spark.sql.catalyst.plans.logical.{DropFunction, LogicalPlan, 
NoopCommand, UncacheTable}
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, NoopCommand, 
UncacheTable}
 import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.catalyst.trees.TreePattern.COMMAND
 
@@ -31,7 +31,5 @@ object ResolveCommandsWithIfExists extends Rule[LogicalPlan] {
     _.containsPattern(COMMAND)) {
     case UncacheTable(u: UnresolvedRelation, ifExists, _) if ifExists =>
       NoopCommand("UNCACHE TABLE", u.multipartIdentifier)
-    case DropFunction(u: UnresolvedFunctionName, ifExists) if ifExists =>
-      NoopCommand("DROP FUNCTION", u.multipartIdentifier)
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala
index b52091afc133..9b3af836ec73 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala
@@ -123,8 +123,6 @@ case class UnresolvedFieldPosition(position: 
ColumnPosition) extends FieldPositi
 case class UnresolvedFunctionName(
     multipartIdentifier: Seq[String],
     commandName: String,
-    requirePersistent: Boolean,
-    funcTypeMismatchHint: Option[String],
     possibleQualifiedName: Option[Seq[String]] = None) extends 
UnresolvedLeafNode {
   final override val nodePatterns: Seq[TreePattern] = Seq(UNRESOLVED_FUNC)
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 780a06060341..a7a0008a9efc 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -3832,21 +3832,22 @@ class AstBuilder extends DataTypeAstBuilder
   }
 
   /**
-   * Create an [[UnresolvedFunction]] from a multi-part identifier.
+   * Create an [[UnresolvedFunctionName]] from a multi-part identifier with 
proper origin.
    */
   private def createUnresolvedFunctionName(
       ctx: ParserRuleContext,
       ident: Seq[String],
-      commandName: String,
-      requirePersistent: Boolean = false,
-      funcTypeMismatchHint: Option[String] = None,
-      possibleQualifiedName: Option[Seq[String]] = None): 
UnresolvedFunctionName = withOrigin(ctx) {
-    UnresolvedFunctionName(
-      ident,
-      commandName,
-      requirePersistent,
-      funcTypeMismatchHint,
-      possibleQualifiedName)
+      commandName: String): UnresolvedFunctionName = withOrigin(ctx) {
+    UnresolvedFunctionName(ident, commandName)
+  }
+
+  /**
+   * Create an [[UnresolvedIdentifier]] from a multi-part identifier with 
proper origin.
+   */
+  protected def createUnresolvedIdentifier(
+      ctx: ParserRuleContext,
+      ident: Seq[String]): UnresolvedIdentifier = withOrigin(ctx) {
+    UnresolvedIdentifier(ident)
   }
 
   /**
@@ -6326,23 +6327,14 @@ class AstBuilder extends DataTypeAstBuilder
           Seq(describeFuncName.getText)
         }
       DescribeFunction(
-        createUnresolvedFunctionName(
-          ctx.describeFuncName(),
-          functionName,
-          "DESCRIBE FUNCTION",
-          requirePersistent = false,
-          funcTypeMismatchHint = None),
+        createUnresolvedFunctionName(describeFuncName, functionName, "DESCRIBE 
FUNCTION"),
         EXTENDED != null)
     } else {
       DescribeFunction(
         withIdentClause(
           describeFuncName.identifierReference(),
-          createUnresolvedFunctionName(
-            describeFuncName.identifierReference,
-            _,
-            "DESCRIBE FUNCTION",
-            requirePersistent = false,
-            funcTypeMismatchHint = None)),
+          createUnresolvedFunctionName(describeFuncName.identifierReference, 
_, "DESCRIBE FUNCTION")
+        ),
         EXTENDED != null)
     }
   }
@@ -6384,15 +6376,9 @@ class AstBuilder extends DataTypeAstBuilder
   }
 
   override def visitRefreshFunction(ctx: RefreshFunctionContext): LogicalPlan 
= withOrigin(ctx) {
+    val identCtx = ctx.identifierReference
     RefreshFunction(
-      withIdentClause(
-        ctx.identifierReference,
-        createUnresolvedFunctionName(
-          ctx.identifierReference,
-          _,
-          "REFRESH FUNCTION",
-          requirePersistent = true,
-          funcTypeMismatchHint = None)))
+      withIdentClause(identCtx, createUnresolvedIdentifier(identCtx, _)))
   }
 
   override def visitCommentNamespace(ctx: CommentNamespaceContext): 
LogicalPlan = withOrigin(ctx) {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogManager.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogManager.scala
index 9b8584604d32..d59ef5875cab 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogManager.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogManager.scala
@@ -162,4 +162,5 @@ private[sql] object CatalogManager {
   val SESSION_CATALOG_NAME: String = "spark_catalog"
   val SYSTEM_CATALOG_NAME = "system"
   val SESSION_NAMESPACE = "session"
+  val BUILTIN_NAMESPACE = "builtin"
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 4b711e065cf8..1241c213c911 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -3034,16 +3034,18 @@ private[sql] object QueryCompilationErrors extends 
QueryErrorsBase with Compilat
       messageParameters = Map("functionName" -> functionName))
   }
 
-  def cannotRefreshBuiltInFuncError(functionName: String): Throwable = {
+  def cannotRefreshBuiltInFuncError(functionName: String, t: TreeNode[_]): 
Throwable = {
     new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1256",
-      messageParameters = Map("functionName" -> functionName))
+      messageParameters = Map("functionName" -> functionName),
+      origin = t.origin)
   }
 
-  def cannotRefreshTempFuncError(functionName: String): Throwable = {
+  def cannotRefreshTempFuncError(functionName: String, t: TreeNode[_]): 
Throwable = {
     new AnalysisException(
       errorClass = "_LEGACY_ERROR_TEMP_1257",
-      messageParameters = Map("functionName" -> functionName))
+      messageParameters = Map("functionName" -> functionName),
+      origin = t.origin)
   }
 
   def noSuchFunctionError(identifier: FunctionIdentifier): Throwable = {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
index 9e2def3072ab..a0267d08dedc 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala
@@ -2511,7 +2511,7 @@ class DDLParserSuite extends AnalysisTest {
 
   test("DESCRIBE FUNCTION") {
     def createFuncPlan(name: Seq[String]): UnresolvedFunctionName = {
-      UnresolvedFunctionName(name, "DESCRIBE FUNCTION", false, None)
+      UnresolvedFunctionName(name, "DESCRIBE FUNCTION")
     }
     comparePlans(
       parsePlan("DESC FUNCTION a"),
@@ -2528,15 +2528,12 @@ class DDLParserSuite extends AnalysisTest {
   }
 
   test("REFRESH FUNCTION") {
-    def createFuncPlan(name: Seq[String]): UnresolvedFunctionName = {
-      UnresolvedFunctionName(name, "REFRESH FUNCTION", true, None)
-    }
     parseCompare("REFRESH FUNCTION c",
-      RefreshFunction(createFuncPlan(Seq("c"))))
+      RefreshFunction(UnresolvedIdentifier(Seq("c"))))
     parseCompare("REFRESH FUNCTION b.c",
-      RefreshFunction(createFuncPlan(Seq("b", "c"))))
+      RefreshFunction(UnresolvedIdentifier(Seq("b", "c"))))
     parseCompare("REFRESH FUNCTION a.b.c",
-      RefreshFunction(createFuncPlan(Seq("a", "b", "c"))))
+      RefreshFunction(UnresolvedIdentifier(Seq("a", "b", "c"))))
   }
 
   test("CREATE INDEX") {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
index eff95bf4f523..92ffbd08f814 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala
@@ -532,20 +532,45 @@ class ResolveSessionCatalog(val catalogManager: 
CatalogManager)
         ResolvedDatabaseInSessionCatalog(db), userScope, systemScope, pattern, 
output) =>
       ShowFunctionsCommand(db, pattern, userScope, systemScope, output)
 
-    case DropFunction(ResolvedPersistentFunc(catalog, identifier, _), 
ifExists) =>
+    case d @ DropFunction(ResolvedIdentifier(FakeSystemCatalog, ident), _) =>
+      // Builtin or temp function - throw appropriate error
+      assert(ident.namespace().length == 1)
+      val namespace = ident.namespace().head
+      if (namespace == CatalogManager.BUILTIN_NAMESPACE) {
+        throw QueryCompilationErrors.cannotDropBuiltinFuncError(ident.name())
+      } else {
+        assert(namespace == CatalogManager.SESSION_NAMESPACE)
+        // Temp function - user should use DROP TEMPORARY FUNCTION
+        throw QueryCompilationErrors.expectPersistentFuncError(
+          ident.name(),
+          "DROP FUNCTION",
+          Some("Please use DROP TEMPORARY FUNCTION to drop a temporary 
function."),
+          d)
+      }
+
+    case DropFunction(ResolvedIdentifier(catalog, ident), ifExists) =>
       if (isSessionCatalog(catalog)) {
-        val funcIdentifier = catalogManager.v1SessionCatalog.qualifyIdentifier(
-          identifier.asFunctionIdentifier)
-        DropFunctionCommand(funcIdentifier, ifExists, false)
+        val funcIdentifier = ident.asFunctionIdentifier.copy(catalog = 
Some(catalog.name))
+        DropFunctionCommand(funcIdentifier, ifExists, isTemp = false)
       } else {
         throw 
QueryCompilationErrors.missingCatalogDropFunctionAbilityError(catalog)
       }
 
-    case RefreshFunction(ResolvedPersistentFunc(catalog, identifier, _)) =>
+    case RefreshFunction(r @ ResolvedIdentifier(FakeSystemCatalog, ident)) =>
+      // Builtin or temp function - throw appropriate error
+      assert(ident.namespace().length == 1)
+      val namespace = ident.namespace().head
+      if (namespace == CatalogManager.BUILTIN_NAMESPACE) {
+        throw 
QueryCompilationErrors.cannotRefreshBuiltInFuncError(ident.name(), r)
+      } else {
+        assert(namespace == CatalogManager.SESSION_NAMESPACE)
+        throw QueryCompilationErrors.cannotRefreshTempFuncError(ident.name(), 
r)
+      }
+
+    case RefreshFunction(ResolvedIdentifier(catalog, ident)) =>
       if (isSessionCatalog(catalog)) {
-        val funcIdentifier = catalogManager.v1SessionCatalog.qualifyIdentifier(
-          identifier.asFunctionIdentifier)
-        RefreshFunctionCommand(funcIdentifier.database, 
funcIdentifier.funcName)
+        val funcIdentifier = ident.asFunctionIdentifier.copy(catalog = 
Some(catalog.name))
+        RefreshFunctionCommand(funcIdentifier)
       } else {
         throw 
QueryCompilationErrors.missingCatalogRefreshFunctionAbilityError(catalog)
       }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/classic/Catalog.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/classic/Catalog.scala
index 71d12bf09b07..dea86604a971 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/classic/Catalog.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/classic/Catalog.scala
@@ -323,8 +323,7 @@ class Catalog(sparkSession: SparkSession) extends 
catalog.Catalog {
   }
 
   private def functionExists(ident: Seq[String]): Boolean = {
-    val plan =
-      UnresolvedFunctionName(ident, Catalog.FUNCTION_EXISTS_COMMAND_NAME, 
false, None)
+    val plan = UnresolvedFunctionName(ident, 
Catalog.FUNCTION_EXISTS_COMMAND_NAME)
     try {
       sparkSession.sessionState.executePlan(plan).analyzed match {
         case _: ResolvedPersistentFunc => true
@@ -337,7 +336,7 @@ class Catalog(sparkSession: SparkSession) extends 
catalog.Catalog {
   }
 
   private def makeFunction(ident: Seq[String]): Function = {
-    val plan = UnresolvedFunctionName(ident, "Catalog.makeFunction", false, 
None)
+    val plan = UnresolvedFunctionName(ident, "Catalog.makeFunction")
     sparkSession.sessionState.executePlan(plan).analyzed match {
       case f: ResolvedPersistentFunc =>
         val className = f.func match {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index 9af2a82cdd9e..2dd88eeeb1c3 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -29,8 +29,7 @@ import org.apache.spark.SparkException
 import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.analysis.{CurrentNamespace, 
GlobalTempView, LocalTempView,
   PersistedView, PlanWithUnresolvedIdentifier, SchemaEvolution, 
SchemaTypeEvolution,
-  UnresolvedAttribute, UnresolvedFunctionName, UnresolvedIdentifier, 
UnresolvedNamespace,
-  UnresolvedProcedure}
+  UnresolvedAttribute, UnresolvedIdentifier, UnresolvedNamespace, 
UnresolvedProcedure}
 import org.apache.spark.sql.catalyst.catalog._
 import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
 import org.apache.spark.sql.catalyst.parser._
@@ -1032,9 +1031,10 @@ class SparkSqlAstBuilder extends AstBuilder {
    * }}}
    */
   override def visitDropFunction(ctx: DropFunctionContext): LogicalPlan = 
withOrigin(ctx) {
-    withIdentClause(ctx.identifierReference(), functionName => {
-      val isTemp = ctx.TEMPORARY != null
-      if (isTemp) {
+    val isTemp = ctx.TEMPORARY != null
+    val identCtx = ctx.identifierReference()
+    if (isTemp) {
+      withIdentClause(identCtx, functionName => {
         if (functionName.length > 1) {
           throw QueryParsingErrors.invalidNameForDropTempFunc(functionName, 
ctx)
         }
@@ -1042,17 +1042,12 @@ class SparkSqlAstBuilder extends AstBuilder {
           identifier = FunctionIdentifier(functionName.head),
           ifExists = ctx.EXISTS != null,
           isTemp = true)
-      } else {
-        val hintStr = "Please use fully qualified identifier to drop the 
persistent function."
-        DropFunction(
-          UnresolvedFunctionName(
-            functionName,
-            "DROP FUNCTION",
-            requirePersistent = true,
-            funcTypeMismatchHint = Some(hintStr)),
-          ctx.EXISTS != null)
-      }
-    })
+      })
+    } else {
+      DropFunction(
+        withIdentClause(identCtx, createUnresolvedIdentifier(identCtx, _)),
+        ctx.EXISTS != null)
+    }
   }
 
   private def toStorageFormat(
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
index b9a7151b4aff..b190d91df588 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
@@ -204,31 +204,19 @@ case class ShowFunctionsCommand(
  *    REFRESH FUNCTION functionName
  * }}}
  */
-case class RefreshFunctionCommand(
-    databaseName: Option[String],
-    functionName: String)
-  extends LeafRunnableCommand {
+case class RefreshFunctionCommand(identifier: FunctionIdentifier) extends 
LeafRunnableCommand {
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
     val catalog = sparkSession.sessionState.catalog
-    val ident = FunctionIdentifier(functionName, databaseName)
-    if (FunctionRegistry.builtin.functionExists(ident)) {
-      throw QueryCompilationErrors.cannotRefreshBuiltInFuncError(functionName)
-    }
-    if (catalog.isTemporaryFunction(ident)) {
-      throw QueryCompilationErrors.cannotRefreshTempFuncError(functionName)
-    }
-
-    val qualified = catalog.qualifyIdentifier(ident)
     // we only refresh the permanent function.
-    if (catalog.isPersistentFunction(qualified)) {
+    if (catalog.isPersistentFunction(identifier)) {
       // register overwrite function.
-      val func = catalog.getFunctionMetadata(qualified)
+      val func = catalog.getFunctionMetadata(identifier)
       catalog.registerFunction(func, true)
     } else {
       // clear cached function and throw exception
-      catalog.unregisterFunction(qualified)
-      throw QueryCompilationErrors.noSuchFunctionError(qualified)
+      catalog.unregisterFunction(identifier)
+      throw QueryCompilationErrors.noSuchFunctionError(identifier)
     }
 
     Seq.empty[Row]
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause-legacy.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause-legacy.sql.out
index 95639c72a0ad..f0a7722886ed 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause-legacy.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause-legacy.sql.out
@@ -540,13 +540,13 @@ DescribeFunctionCommand 
org.apache.spark.sql.catalyst.expressions.ExpressionInfo
 -- !query
 REFRESH FUNCTION IDENTIFIER('ident.' || 'myDoubleAvg')
 -- !query analysis
-RefreshFunctionCommand ident, mydoubleavg
+RefreshFunctionCommand spark_catalog.ident.myDoubleAvg
 
 
 -- !query
 DROP FUNCTION IDENTIFIER('ident.' || 'myDoubleAvg')
 -- !query analysis
-DropFunctionCommand spark_catalog.ident.mydoubleavg, false, false
+DropFunctionCommand spark_catalog.ident.myDoubleAvg, false, false
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause.sql.out
 
b/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause.sql.out
index e3150b199658..00740529b8a8 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/identifier-clause.sql.out
@@ -540,13 +540,13 @@ DescribeFunctionCommand 
org.apache.spark.sql.catalyst.expressions.ExpressionInfo
 -- !query
 REFRESH FUNCTION IDENTIFIER('ident.' || 'myDoubleAvg')
 -- !query analysis
-RefreshFunctionCommand ident, mydoubleavg
+RefreshFunctionCommand spark_catalog.ident.myDoubleAvg
 
 
 -- !query
 DROP FUNCTION IDENTIFIER('ident.' || 'myDoubleAvg')
 -- !query analysis
-DropFunctionCommand spark_catalog.ident.mydoubleavg, false, false
+DropFunctionCommand spark_catalog.ident.myDoubleAvg, false, false
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
index 2b8a47c9ca63..5d4b4c4b4bd7 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
@@ -4577,13 +4577,13 @@ DropFunctionCommand spark_catalog.default.foo1b2, true, 
false
 -- !query
 DROP FUNCTION IF EXISTS foo1c1
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1c1]
+DropFunctionCommand spark_catalog.default.foo1c1, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo1c2
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1c2]
+DropFunctionCommand spark_catalog.default.foo1c2, true, false
 
 
 -- !query
@@ -4619,49 +4619,49 @@ DropFunctionCommand spark_catalog.default.foo1d6, true, 
false
 -- !query
 DROP FUNCTION IF EXISTS foo1e1
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1e1]
+DropFunctionCommand spark_catalog.default.foo1e1, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo1e2
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1e2]
+DropFunctionCommand spark_catalog.default.foo1e2, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo1e3
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1e3]
+DropFunctionCommand spark_catalog.default.foo1e3, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo1f1
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1f1]
+DropFunctionCommand spark_catalog.default.foo1f1, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo1f2
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1f2]
+DropFunctionCommand spark_catalog.default.foo1f2, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo1g1
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1g1]
+DropFunctionCommand spark_catalog.default.foo1g1, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo1g2
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo1g2]
+DropFunctionCommand spark_catalog.default.foo1g2, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo2a0
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo2a0]
+DropFunctionCommand spark_catalog.default.foo2a0, true, false
 
 
 -- !query
@@ -4679,37 +4679,37 @@ DropFunctionCommand spark_catalog.default.foo2a4, true, 
false
 -- !query
 DROP FUNCTION IF EXISTS foo2b1
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo2b1]
+DropFunctionCommand spark_catalog.default.foo2b1, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo2b2
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo2b2]
+DropFunctionCommand spark_catalog.default.foo2b2, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo2c1
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo2c1]
+DropFunctionCommand spark_catalog.default.foo2c1, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo31
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo31]
+DropFunctionCommand spark_catalog.default.foo31, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo32
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo32]
+DropFunctionCommand spark_catalog.default.foo32, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo33
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo33]
+DropFunctionCommand spark_catalog.default.foo33, true, false
 
 
 -- !query
@@ -4721,7 +4721,7 @@ DropFunctionCommand spark_catalog.default.foo41, true, 
false
 -- !query
 DROP FUNCTION IF EXISTS foo42
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo42]
+DropFunctionCommand spark_catalog.default.foo42, true, false
 
 
 -- !query
@@ -4811,7 +4811,7 @@ DropFunctionCommand spark_catalog.default.foo9h, true, 
false
 -- !query
 DROP FUNCTION IF EXISTS foo9i
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo9i]
+DropFunctionCommand spark_catalog.default.foo9i, true, false
 
 
 -- !query
@@ -5003,13 +5003,13 @@ DropFunctionCommand spark_catalog.default.foo2_2b, 
true, false
 -- !query
 DROP FUNCTION IF EXISTS foo2_2c
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo2_2c]
+DropFunctionCommand spark_catalog.default.foo2_2c, true, false
 
 
 -- !query
 DROP FUNCTION IF EXISTS foo2_2d
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo2_2d]
+DropFunctionCommand spark_catalog.default.foo2_2d, true, false
 
 
 -- !query
@@ -5159,7 +5159,7 @@ DropFunctionCommand spark_catalog.default.foo3_2d1, true, 
false
 -- !query
 DROP FUNCTION IF EXISTS foo3_2d2
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo3_2d2]
+DropFunctionCommand spark_catalog.default.foo3_2d2, true, false
 
 
 -- !query
@@ -5285,7 +5285,7 @@ DropFunctionCommand spark_catalog.default.foo3_12e, true, 
false
 -- !query
 DROP FUNCTION IF EXISTS foo3_12f
 -- !query analysis
-NoopCommand DROP FUNCTION, [foo3_12f]
+DropFunctionCommand spark_catalog.default.foo3_12f, true, false
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-udaf.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-udaf.sql.out
index 248ed95df9de..d9105ccaaac1 100644
--- 
a/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-udaf.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/analyzer-results/udf/udf-udaf.sql.out
@@ -83,7 +83,7 @@ org.apache.spark.sql.AnalysisException
 -- !query
 DROP FUNCTION myDoubleAvg
 -- !query analysis
-DropFunctionCommand spark_catalog.default.mydoubleavg, false, false
+DropFunctionCommand spark_catalog.default.myDoubleAvg, false, false
 
 
 -- !query
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
index 23e221d6ecaf..963062c7781d 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2FunctionSuite.scala
@@ -197,10 +197,13 @@ class DataSourceV2FunctionSuite extends 
DatasourceV2SQLBase {
     }
     assert(e.message.contains("Catalog testcat does not support DROP 
FUNCTION"))
 
-    val e1 = intercept[AnalysisException] {
-      sql("DROP FUNCTION default.ns1.ns2.fun")
-    }
-    assert(e1.message.contains("requires a single-part namespace"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("DROP FUNCTION default.ns1.ns2.fun")
+      },
+      condition = "IDENTIFIER_TOO_MANY_NAME_PARTS",
+      parameters = Map("identifier" -> "`default`.`ns1`.`ns2`.`fun`", "limit" 
-> "2")
+    )
   }
 
   test("CREATE FUNCTION: only support session catalog") {
@@ -223,10 +226,13 @@ class DataSourceV2FunctionSuite extends 
DatasourceV2SQLBase {
     }
     assert(e.message.contains("Catalog testcat does not support REFRESH 
FUNCTION"))
 
-    val e1 = intercept[AnalysisException] {
-      sql("REFRESH FUNCTION default.ns1.ns2.fun")
-    }
-    assert(e1.message.contains("requires a single-part namespace"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        sql("REFRESH FUNCTION default.ns1.ns2.fun")
+      },
+      condition = "IDENTIFIER_TOO_MANY_NAME_PARTS",
+      parameters = Map("identifier" -> "`default`.`ns1`.`ns2`.`fun`", "limit" 
-> "2")
+    )
   }
 
   test("built-in with non-function catalog should still work") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
index 3dea8593b428..1561336fdfa3 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala
@@ -18,7 +18,7 @@
 package org.apache.spark.sql.execution.command
 
 import org.apache.spark.SparkThrowable
-import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, 
LocalTempView, SchemaCompensation, UnresolvedAttribute, UnresolvedFunctionName, 
UnresolvedIdentifier}
+import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, 
LocalTempView, SchemaCompensation, UnresolvedAttribute, UnresolvedIdentifier}
 import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, FileResource, 
FunctionResource, JarResource}
 import org.apache.spark.sql.catalyst.dsl.expressions._
 import org.apache.spark.sql.catalyst.dsl.plans
@@ -666,22 +666,18 @@ class DDLParserSuite extends AnalysisTest with 
SharedSparkSession {
   }
 
   test("DROP FUNCTION") {
-    def createFuncPlan(name: Seq[String]): UnresolvedFunctionName = {
-      UnresolvedFunctionName(name, "DROP FUNCTION", true,
-        Some("Please use fully qualified identifier to drop the persistent 
function."))
-    }
     comparePlans(
       parser.parsePlan("DROP FUNCTION a"),
-      DropFunction(createFuncPlan(Seq("a")), false))
+      DropFunction(UnresolvedIdentifier(Seq("a")), false))
     comparePlans(
       parser.parsePlan("DROP FUNCTION a.b.c"),
-      DropFunction(createFuncPlan(Seq("a", "b", "c")), false))
+      DropFunction(UnresolvedIdentifier(Seq("a", "b", "c")), false))
     comparePlans(
       parser.parsePlan("DROP TEMPORARY FUNCTION a"),
       DropFunctionCommand(Seq("a").asFunctionIdentifier, false, true))
     comparePlans(
       parser.parsePlan("DROP FUNCTION IF EXISTS a.b.c"),
-      DropFunction(createFuncPlan(Seq("a", "b", "c")), true))
+      DropFunction(UnresolvedIdentifier(Seq("a", "b", "c")), true))
     comparePlans(
       parser.parsePlan("DROP TEMPORARY FUNCTION IF EXISTS a"),
       DropFunctionCommand(Seq("a").asFunctionIdentifier, true, true))
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 3d4da878869e..42410a6cca52 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -2250,23 +2250,15 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
       exception = intercept[AnalysisException] {
         sql("REFRESH FUNCTION md5")
       },
-      condition = "_LEGACY_ERROR_TEMP_1017",
-      parameters = Map(
-        "name" -> "md5",
-        "cmd" -> "REFRESH FUNCTION", "hintStr" -> ""),
+      condition = "_LEGACY_ERROR_TEMP_1256",
+      parameters = Map("functionName" -> "md5"),
       context = ExpectedContext(fragment = "md5", start = 17, stop = 19))
     checkError(
       exception = intercept[AnalysisException] {
         sql("REFRESH FUNCTION default.md5")
       },
-      condition = "UNRESOLVED_ROUTINE",
-      parameters = Map(
-        "routineName" -> "`default`.`md5`",
-        "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
-      context = ExpectedContext(
-        fragment = "default.md5",
-        start = 17,
-        stop = 27))
+      condition = "ROUTINE_NOT_FOUND",
+      parameters = Map("routineName" -> "`default`.`md5`"))
 
     withUserDefinedFunction("func1" -> true) {
       sql("CREATE TEMPORARY FUNCTION func1 AS 
'test.org.apache.spark.sql.MyDoubleAvg'")
@@ -2274,8 +2266,8 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
         exception = intercept[AnalysisException] {
           sql("REFRESH FUNCTION func1")
         },
-        condition = "_LEGACY_ERROR_TEMP_1017",
-        parameters = Map("name" -> "func1", "cmd" -> "REFRESH FUNCTION", 
"hintStr" -> ""),
+        condition = "_LEGACY_ERROR_TEMP_1257",
+        parameters = Map("functionName" -> "func1"),
         context = ExpectedContext(
           fragment = "func1",
           start = 17,
@@ -2290,11 +2282,8 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
         exception = intercept[AnalysisException] {
           sql("REFRESH FUNCTION func1")
         },
-        condition = "UNRESOLVED_ROUTINE",
-        parameters = Map(
-          "routineName" -> "`func1`",
-          "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
-        context = ExpectedContext(fragment = "func1", start = 17, stop = 21)
+        condition = "ROUTINE_NOT_FOUND",
+        parameters = Map("routineName" -> "`default`.`func1`")
       )
       assert(!spark.sessionState.catalog.isRegisteredFunction(func))
 
@@ -2306,14 +2295,8 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
         exception = intercept[AnalysisException] {
           sql("REFRESH FUNCTION func2")
         },
-        condition = "UNRESOLVED_ROUTINE",
-        parameters = Map(
-          "routineName" -> "`func2`",
-          "searchPath" -> "[`system`.`builtin`, `system`.`session`, 
`spark_catalog`.`default`]"),
-        context = ExpectedContext(
-          fragment = "func2",
-          start = 17,
-          stop = 21))
+        condition = "ROUTINE_NOT_FOUND",
+        parameters = Map("routineName" -> "`default`.`func2`"))
       assert(spark.sessionState.catalog.isRegisteredFunction(func))
 
       spark.sessionState.catalog.externalCatalog.dropFunction("default", 
"func1")
@@ -2354,8 +2337,8 @@ abstract class DDLSuite extends QueryTest with 
DDLSuiteBase {
         exception = intercept[AnalysisException] {
           sql("REFRESH FUNCTION rand")
         },
-        condition = "_LEGACY_ERROR_TEMP_1017",
-        parameters = Map("name" -> "rand", "cmd" -> "REFRESH FUNCTION", 
"hintStr" -> ""),
+        condition = "_LEGACY_ERROR_TEMP_1256",
+        parameters = Map("functionName" -> "rand"),
         context = ExpectedContext(fragment = "rand", start = 17, stop = 20)
       )
       assert(!spark.sessionState.catalog.isRegisteredFunction(rand))


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to