cloud-fan commented on a change in pull request #31791:
URL: https://github.com/apache/spark/pull/31791#discussion_r600757993



##########
File path: 
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala
##########
@@ -17,146 +17,45 @@
 
 package org.apache.spark.sql.catalyst.analysis
 
-import java.util.Locale
-
-import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.catalyst.expressions.{Alias, Expression}
-import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, 
Range}
+import org.apache.spark.sql.catalyst.catalog.SessionCatalog
+import org.apache.spark.sql.catalyst.expressions.Alias
+import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
 import org.apache.spark.sql.catalyst.rules._
-import org.apache.spark.sql.types.{DataType, IntegerType, LongType}
 
 /**
  * Rule that resolves table-valued function references.
  */
-object ResolveTableValuedFunctions extends Rule[LogicalPlan] {
-  /**
-   * List of argument names and their types, used to declare a function.
-   */
-  private case class ArgumentList(args: (String, DataType)*) {
-    /**
-     * Try to cast the expressions to satisfy the expected types of this 
argument list. If there
-     * are any types that cannot be casted, then None is returned.
-     */
-    def implicitCast(values: Seq[Expression]): Option[Seq[Expression]] = {
-      if (args.length == values.length) {
-        val casted = values.zip(args).map { case (value, (_, expectedType)) =>
-          TypeCoercion.implicitCast(value, expectedType)
-        }
-        if (casted.forall(_.isDefined)) {
-          return Some(casted.map(_.get))
-        }
-      }
-      None
-    }
-
-    override def toString: String = {
-      args.map { a =>
-        s"${a._1}: ${a._2.typeName}"
-      }.mkString(", ")
-    }
-  }
-
-  /**
-   * A TVF maps argument lists to resolver functions that accept those 
arguments. Using a map
-   * here allows for function overloading.
-   */
-  private type TVF = Map[ArgumentList, Seq[Any] => LogicalPlan]
-
-  /**
-   * TVF builder.
-   */
-  private def tvf(args: (String, DataType)*)(pf: PartialFunction[Seq[Any], 
LogicalPlan])
-      : (ArgumentList, Seq[Any] => LogicalPlan) = {
-    (ArgumentList(args: _*),
-     pf orElse {
-       case arguments =>
-         // This is caught again by the apply function and rethrow with richer 
information about
-         // position, etc, for a better error message.
-         throw new AnalysisException(
-           "Invalid arguments for resolved function: " + arguments.mkString(", 
"))
-     })
-  }
-
-  /**
-   * Internal registry of table-valued functions.
-   */
-  private val builtinFunctions: Map[String, TVF] = Map(
-    "range" -> Map(
-      /* range(end) */
-      tvf("end" -> LongType) { case Seq(end: Long) =>
-        Range(0, end, 1, None)
-      },
-
-      /* range(start, end) */
-      tvf("start" -> LongType, "end" -> LongType) { case Seq(start: Long, end: 
Long) =>
-        Range(start, end, 1, None)
-      },
-
-      /* range(start, end, step) */
-      tvf("start" -> LongType, "end" -> LongType, "step" -> LongType) {
-        case Seq(start: Long, end: Long, step: Long) =>
-          Range(start, end, step, None)
-      },
-
-      /* range(start, end, step, numPartitions) */
-      tvf("start" -> LongType, "end" -> LongType, "step" -> LongType,
-          "numPartitions" -> IntegerType) {
-        case Seq(start: Long, end: Long, step: Long, numPartitions: Int) =>
-          Range(start, end, step, Some(numPartitions))
-      })
-  )
+case class ResolveTableValuedFunctions(catalog: SessionCatalog) extends 
Rule[LogicalPlan] {
 
   override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
     case u: UnresolvedTableValuedFunction if u.functionArgs.forall(_.resolved) 
=>
       // The whole resolution is somewhat difficult to understand here due to 
too much abstractions.
       // We should probably rewrite the following at some point. Reynold was 
just here to improve
       // error messages and didn't have time to do a proper rewrite.

Review comment:
       seems we can remove this comment now :)




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to