This is an automated email from the ASF dual-hosted git repository.

wenchen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2829fb9304d0 [SPARK-54682][SQL] Support showing parameters in DESCRIBE 
PROCEDURE
2829fb9304d0 is described below

commit 2829fb9304d0b4783ba1ca307295c58b802bd3a4
Author: Allison Wang <[email protected]>
AuthorDate: Wed Jan 7 00:35:15 2026 +0800

    [SPARK-54682][SQL] Support showing parameters in DESCRIBE PROCEDURE
    
    ### What changes were proposed in this pull request?
    
    This PR updates the DESCRIBE PROCEDURE command to correctly resolve V2 
procedures and display detailed parameter information.
    Previously, DESCRIBE PROCEDURE did not provide parameter details. This 
change enhances the command to:
    - Properly resolve procedure identifiers using the ResolveProcedures 
analyzer rule.
    - Bind the procedure to retrieve its schema.
    - Display a comprehensive list of input parameters, including their mode 
(IN/OUT), name, data type, default values, and comments.
    
    ### Why are the changes needed?
    
    Users need to know the parameter signatures of stored procedures to call 
them correctly. Without this change, DESCRIBE PROCEDURE provided insufficient 
information for using a procedure. This change also makes the command more like 
DESCRIBE FUNCTION.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, DESCRIBE PROCEDURE output now includes a "Parameters" section listing 
all arguments with their types and defaults.
    
    ### How was this patch tested?
    
    Existing tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    Yes
    
    Closes #53437 from allisonwang-db/desc-procedure.
    
    Lead-authored-by: Allison Wang <[email protected]>
    Co-authored-by: Wenchen Fan <[email protected]>
    Signed-off-by: Wenchen Fan <[email protected]>
---
 .../spark/sql/catalyst/analysis/Analyzer.scala     |  4 +-
 .../spark/sql/execution/SparkSqlParser.scala       |  5 +-
 .../command/DescribeProcedureCommand.scala         | 59 +++++++++++++++-------
 .../spark/sql/connector/ProcedureSuite.scala       | 54 +++++++++++++++++---
 4 files changed, 93 insertions(+), 29 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 980d07f86ecd..23ee8bc7c9eb 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -2285,10 +2285,10 @@ class Analyzer(
   object ResolveProcedures extends Rule[LogicalPlan] {
     def apply(plan: LogicalPlan): LogicalPlan = 
plan.resolveOperatorsWithPruning(
       _.containsPattern(UNRESOLVED_PROCEDURE), ruleId) {
-      case Call(UnresolvedProcedure(CatalogAndIdentifier(catalog, ident)), 
args, execute) =>
+      case UnresolvedProcedure(CatalogAndIdentifier(catalog, ident)) =>
         val procedureCatalog = catalog.asProcedureCatalog
         val procedure = load(procedureCatalog, ident)
-        Call(ResolvedProcedure(procedureCatalog, ident, procedure), args, 
execute)
+        ResolvedProcedure(procedureCatalog, ident, procedure)
     }
 
     private def load(catalog: ProcedureCatalog, ident: Identifier): 
UnboundProcedure = {
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index be4dcc550a83..9af2a82cdd9e 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -29,7 +29,8 @@ import org.apache.spark.SparkException
 import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
 import org.apache.spark.sql.catalyst.analysis.{CurrentNamespace, 
GlobalTempView, LocalTempView,
   PersistedView, PlanWithUnresolvedIdentifier, SchemaEvolution, 
SchemaTypeEvolution,
-  UnresolvedAttribute, UnresolvedFunctionName, UnresolvedIdentifier, 
UnresolvedNamespace}
+  UnresolvedAttribute, UnresolvedFunctionName, UnresolvedIdentifier, 
UnresolvedNamespace,
+  UnresolvedProcedure}
 import org.apache.spark.sql.catalyst.catalog._
 import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
 import org.apache.spark.sql.catalyst.parser._
@@ -1399,7 +1400,7 @@ class SparkSqlAstBuilder extends AstBuilder {
   override def visitDescribeProcedure(
       ctx: DescribeProcedureContext): LogicalPlan = withOrigin(ctx) {
     withIdentClause(ctx.identifierReference(), procIdentifier =>
-      DescribeProcedureCommand(UnresolvedIdentifier(procIdentifier)))
+      DescribeProcedureCommand(UnresolvedProcedure(procIdentifier)))
   }
 
   override def visitCreatePipelineInsertIntoFlow(
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/DescribeProcedureCommand.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/DescribeProcedureCommand.scala
index ef7a538307bf..39749f705f6d 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/DescribeProcedureCommand.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/DescribeProcedureCommand.scala
@@ -19,15 +19,12 @@ package org.apache.spark.sql.execution.command
 
 import scala.collection.mutable.ArrayBuffer
 
-import org.apache.spark.{SparkException, SparkThrowable}
+import org.apache.spark.SparkException
 import org.apache.spark.sql.{Row, SparkSession}
-import org.apache.spark.sql.catalyst.analysis.ResolvedIdentifier
+import org.apache.spark.sql.catalyst.analysis.ResolvedProcedure
 import org.apache.spark.sql.catalyst.expressions.{Attribute, 
AttributeReference}
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.connector.catalog.{Identifier, ProcedureCatalog}
-import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
-import org.apache.spark.sql.connector.catalog.procedures.UnboundProcedure
-import org.apache.spark.sql.errors.QueryCompilationErrors
+import org.apache.spark.sql.connector.catalog.procedures.{ProcedureParameter, 
SimpleProcedure, UnboundProcedure}
 import org.apache.spark.sql.types.StringType
 
 /**
@@ -45,34 +42,60 @@ case class DescribeProcedureCommand(
 
   override def run(sparkSession: SparkSession): Seq[Row] = {
     child match {
-      case ResolvedIdentifier(catalog, ident) =>
-        val procedure = load(catalog.asProcedureCatalog, ident)
+      case ResolvedProcedure(catalog, ident, procedure: UnboundProcedure) =>
         describeV2Procedure(procedure)
       case _ =>
         throw SparkException.internalError(s"Invalid procedure identifier: 
${child.getClass}")
     }
   }
 
-  private def load(catalog: ProcedureCatalog, ident: Identifier): 
UnboundProcedure = {
-    try {
-      catalog.loadProcedure(ident)
-    } catch {
-      case e: Exception if !e.isInstanceOf[SparkThrowable] =>
-        val nameParts = catalog.name +: ident.asMultipartIdentifier
-        throw QueryCompilationErrors.failedToLoadRoutineError(nameParts, e)
-    }
-  }
-
   private def describeV2Procedure(procedure: UnboundProcedure): Seq[Row] = {
     val buffer = new ArrayBuffer[(String, String)]
     append(buffer, "Procedure:", procedure.name())
     append(buffer, "Description:", procedure.description())
 
+    procedure match {
+      case p: SimpleProcedure =>
+        val params = p.parameters()
+        if (params != null && params.nonEmpty) {
+          val formattedParams = formatProcedureParameters(params)
+          append(buffer, "Parameters:", formattedParams.head)
+          formattedParams.tail.foreach(s => append(buffer, "", s))
+        } else {
+          append(buffer, "Parameters:", "()")
+        }
+      case _ =>
+        // Do not show parameters for non-simple procedures
+    }
+
     val keys = tabulate(buffer.map(_._1).toSeq)
     val values = buffer.map(_._2)
     keys.zip(values).map { case (key, value) => Row(s"$key $value") }
   }
 
+  private def formatProcedureParameters(params: Array[ProcedureParameter]): 
Seq[String] = {
+    val paramsStrings = params.map { p =>
+      val mode = p.mode().toString
+      val name = p.name()
+      val dataType = p.dataType().sql
+      val comment = if (p.comment() != null) s" '${p.comment()}'" else ""
+      val defaultVal = if (p.defaultValue() != null) p.defaultValue().getSql 
else null
+      val default = if (defaultVal != null) s" DEFAULT $defaultVal" else ""
+      (mode, name, dataType, default, comment)
+    }
+
+    val modeLen = paramsStrings.map(_._1.length).max
+    val nameLen = paramsStrings.map(_._2.length).max
+    val dataTypeLen = paramsStrings.map(_._3.length).max
+
+    paramsStrings.map { case (mode, name, dataType, default, comment) =>
+      val paddedMode = mode.padTo(modeLen, " ").mkString
+      val paddedName = name.padTo(nameLen, " ").mkString
+      val paddedDataType = dataType.padTo(dataTypeLen, " ").mkString
+      s"$paddedMode $paddedName $paddedDataType$default$comment"
+    }.toSeq
+  }
+
   private def append(buffer: ArrayBuffer[(String, String)], key: String, 
value: String): Unit = {
     buffer += (key -> value)
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/ProcedureSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/ProcedureSuite.scala
index bcf288056471..dc35bd75b569 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/connector/ProcedureSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/connector/ProcedureSuite.scala
@@ -493,7 +493,7 @@ class ProcedureSuite extends QueryTest with 
SharedSparkSession with BeforeAndAft
   }
 
   test("SPARK-51780: Implement DESC PROCEDURE") {
-    catalog.createProcedure(Identifier.of(Array("ns"), "foo"), UnboundSum)
+    catalog.createProcedure(Identifier.of(Array("ns"), "foo"), SimpleSum)
     catalog.createProcedure(Identifier.of(Array("ns", "db"), "abc"), 
UnboundLongSum)
     catalog.createProcedure(Identifier.of(Array(""), "xyz"), 
UnboundComplexProcedure)
     catalog.createProcedure(Identifier.of(Array(), "xxx"), 
UnboundStructProcedure)
@@ -517,20 +517,26 @@ class ProcedureSuite extends QueryTest with 
SharedSparkSession with BeforeAndAft
 
       checkAnswer(
         sql("DESC PROCEDURE cat.ns.foo"),
-        Row("Procedure:   sum") ::
-          Row("Description: sum integers") :: Nil)
+        Row("Procedure:   simple_sum") ::
+          Row("Description: simple sum integers") ::
+          Row("Parameters:  IN in1 INT") ::
+          Row("             IN in2 INT") :: Nil)
 
       checkAnswer(
         // use DESCRIBE instead of DESC
         sql("DESCRIBE PROCEDURE cat.ns.foo"),
-        Row("Procedure:   sum") ::
-          Row("Description: sum integers") :: Nil)
+        Row("Procedure:   simple_sum") ::
+          Row("Description: simple sum integers") ::
+          Row("Parameters:  IN in1 INT") ::
+          Row("             IN in2 INT") :: Nil)
 
       checkAnswer(
         // use default catalog
         sql("DESC PROCEDURE ns.foo"),
-        Row("Procedure:   sum") ::
-          Row("Description: sum integers") :: Nil)
+        Row("Procedure:   simple_sum") ::
+          Row("Description: simple sum integers") ::
+          Row("Parameters:  IN in1 INT") ::
+          Row("             IN in2 INT") :: Nil)
 
       checkAnswer(
         // use multi-part namespace
@@ -562,6 +568,32 @@ class ProcedureSuite extends QueryTest with 
SharedSparkSession with BeforeAndAft
     }
   }
 
+  test("SPARK-51780: DESC PROCEDURE with binding failure") {
+    catalog.createProcedure(Identifier.of(Array("ns"), "bind_fail"), 
UnboundBindFailProcedure)
+    checkAnswer(
+      sql("DESC PROCEDURE cat.ns.bind_fail"),
+      Row("Procedure:   bind_fail") ::
+      Row("Description: bind fail procedure") :: Nil)
+  }
+
+  test("SPARK-51780: DESC PROCEDURE with zero parameters") {
+    catalog.createProcedure(
+      Identifier.of(Array("ns"), "zero_params"), SimpleZeroParameterProcedure)
+    checkAnswer(
+      sql("DESC PROCEDURE cat.ns.zero_params"),
+      Row("Procedure:   zero_params") ::
+      Row("Description: zero parameter procedure") ::
+      Row("Parameters:  ()") :: Nil)
+  }
+
+  object UnboundBindFailProcedure extends UnboundProcedure {
+    override def name: String = "bind_fail"
+    override def description: String = "bind fail procedure"
+    override def bind(inputType: StructType): BoundProcedure = {
+      throw new UnsupportedOperationException("Cannot bind")
+    }
+  }
+
   object UnboundVoidProcedure extends UnboundProcedure {
     override def name: String = "void"
     override def description: String = "void procedure"
@@ -909,4 +941,12 @@ class ProcedureSuite extends QueryTest with 
SharedSparkSession with BeforeAndAft
 
     override def description: String = "simple sum integers"
   }
+
+  object SimpleZeroParameterProcedure extends SimpleProcedure {
+    override def name: String = "zero_params"
+    override def description: String = "zero parameter procedure"
+    override def isDeterministic: Boolean = true
+    override def parameters: Array[ProcedureParameter] = Array.empty
+    override def call(input: InternalRow): java.util.Iterator[Scan] = 
Collections.emptyIterator
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to