spark git commit: Revert "[SPARK-20311][SQL] Support aliases for table value functions"
Repository: spark Updated Branches: refs/heads/branch-2.2 9e8d23b3a -> d191b962d Revert "[SPARK-20311][SQL] Support aliases for table value functions" This reverts commit 714811d0b5bcb5d47c39782ff74f898d276ecc59. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d191b962 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d191b962 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d191b962 Branch: refs/heads/branch-2.2 Commit: d191b962dc81c015fa92a38d882a8c7ea620ef06 Parents: 9e8d23b Author: Yin HuaiAuthored: Tue May 9 14:47:45 2017 -0700 Committer: Yin Huai Committed: Tue May 9 14:49:02 2017 -0700 -- .../apache/spark/sql/catalyst/parser/SqlBase.g4 | 20 ++ .../analysis/ResolveTableValuedFunctions.scala | 22 +++- .../sql/catalyst/analysis/unresolved.scala | 10 ++--- .../spark/sql/catalyst/parser/AstBuilder.scala | 17 --- .../sql/catalyst/analysis/AnalysisSuite.scala | 14 + .../sql/catalyst/parser/PlanParserSuite.scala | 13 +--- 6 files changed, 17 insertions(+), 79 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/d191b962/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 -- diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index 15e4dd4..1ecb3d1 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -472,23 +472,15 @@ identifierComment ; relationPrimary -: tableIdentifier sample? (AS? strictIdentifier)? #tableName -| '(' queryNoWith ')' sample? (AS? strictIdentifier)? #aliasedQuery -| '(' relation ')' sample? (AS? strictIdentifier)? #aliasedRelation -| inlineTable #inlineTableDefault2 -| functionTable#tableValuedFunction +: tableIdentifier sample? (AS? strictIdentifier)? #tableName +| '(' queryNoWith ')' sample? (AS? strictIdentifier)? #aliasedQuery +| '(' relation ')' sample? (AS? strictIdentifier)? #aliasedRelation +| inlineTable #inlineTableDefault2 +| identifier '(' (expression (',' expression)*)? ')' #tableValuedFunction ; inlineTable -: VALUES expression (',' expression)* tableAlias -; - -functionTable -: identifier '(' (expression (',' expression)*)? ')' tableAlias -; - -tableAlias -: (AS? identifier identifierList?)? +: VALUES expression (',' expression)* (AS? identifier identifierList?)? ; rowFormat http://git-wip-us.apache.org/repos/asf/spark/blob/d191b962/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala -- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala index dad1340..de6de24 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.catalyst.analysis import java.util.Locale -import org.apache.spark.sql.catalyst.expressions.{Alias, Expression} -import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, Range} +import org.apache.spark.sql.catalyst.expressions.Expression +import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Range} import org.apache.spark.sql.catalyst.rules._ import org.apache.spark.sql.types.{DataType, IntegerType, LongType} @@ -105,7 +105,7 @@ object ResolveTableValuedFunctions extends Rule[LogicalPlan] { override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators { case u: UnresolvedTableValuedFunction if u.functionArgs.forall(_.resolved) => - val resolvedFunc = builtinFunctions.get(u.functionName.toLowerCase(Locale.ROOT)) match { + builtinFunctions.get(u.functionName.toLowerCase(Locale.ROOT)) match { case Some(tvf) => val resolved = tvf.flatMap { case (argList, resolver) => argList.implicitCast(u.functionArgs) match { @@ -125,21 +125,5 @@ object ResolveTableValuedFunctions extends Rule[LogicalPlan]
spark git commit: Revert "[SPARK-20311][SQL] Support aliases for table value functions"
Repository: spark Updated Branches: refs/heads/master ac1ab6b9d -> f79aa285c Revert "[SPARK-20311][SQL] Support aliases for table value functions" This reverts commit 714811d0b5bcb5d47c39782ff74f898d276ecc59. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f79aa285 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f79aa285 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f79aa285 Branch: refs/heads/master Commit: f79aa285cf115963ba06a9cacb3dbd7e3cbf7728 Parents: ac1ab6b Author: Yin HuaiAuthored: Tue May 9 14:47:45 2017 -0700 Committer: Yin Huai Committed: Tue May 9 14:47:45 2017 -0700 -- .../apache/spark/sql/catalyst/parser/SqlBase.g4 | 20 ++ .../analysis/ResolveTableValuedFunctions.scala | 22 +++- .../sql/catalyst/analysis/unresolved.scala | 10 ++--- .../spark/sql/catalyst/parser/AstBuilder.scala | 17 --- .../sql/catalyst/analysis/AnalysisSuite.scala | 14 + .../sql/catalyst/parser/PlanParserSuite.scala | 13 +--- 6 files changed, 17 insertions(+), 79 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/f79aa285/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 -- diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index 41daf58..14c511f 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -472,23 +472,15 @@ identifierComment ; relationPrimary -: tableIdentifier sample? (AS? strictIdentifier)? #tableName -| '(' queryNoWith ')' sample? (AS? strictIdentifier)? #aliasedQuery -| '(' relation ')' sample? (AS? strictIdentifier)? #aliasedRelation -| inlineTable #inlineTableDefault2 -| functionTable#tableValuedFunction +: tableIdentifier sample? (AS? strictIdentifier)? #tableName +| '(' queryNoWith ')' sample? (AS? strictIdentifier)? #aliasedQuery +| '(' relation ')' sample? (AS? strictIdentifier)? #aliasedRelation +| inlineTable #inlineTableDefault2 +| identifier '(' (expression (',' expression)*)? ')' #tableValuedFunction ; inlineTable -: VALUES expression (',' expression)* tableAlias -; - -functionTable -: identifier '(' (expression (',' expression)*)? ')' tableAlias -; - -tableAlias -: (AS? identifier identifierList?)? +: VALUES expression (',' expression)* (AS? identifier identifierList?)? ; rowFormat http://git-wip-us.apache.org/repos/asf/spark/blob/f79aa285/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala -- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala index dad1340..de6de24 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveTableValuedFunctions.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.catalyst.analysis import java.util.Locale -import org.apache.spark.sql.catalyst.expressions.{Alias, Expression} -import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project, Range} +import org.apache.spark.sql.catalyst.expressions.Expression +import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Range} import org.apache.spark.sql.catalyst.rules._ import org.apache.spark.sql.types.{DataType, IntegerType, LongType} @@ -105,7 +105,7 @@ object ResolveTableValuedFunctions extends Rule[LogicalPlan] { override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators { case u: UnresolvedTableValuedFunction if u.functionArgs.forall(_.resolved) => - val resolvedFunc = builtinFunctions.get(u.functionName.toLowerCase(Locale.ROOT)) match { + builtinFunctions.get(u.functionName.toLowerCase(Locale.ROOT)) match { case Some(tvf) => val resolved = tvf.flatMap { case (argList, resolver) => argList.implicitCast(u.functionArgs) match { @@ -125,21 +125,5 @@ object ResolveTableValuedFunctions extends Rule[LogicalPlan] {