imback82 commented on a change in pull request #26369: [SPARK-29678][SQL] ALTER TABLE (ADD PARTITION) should look up catalog/table like v2 commands URL: https://github.com/apache/spark/pull/26369#discussion_r341818796
########## File path: sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala ########## @@ -426,41 +426,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) { Option(ctx.partitionSpec).map(visitNonOptionalPartitionSpec)) } - /** - * Create an [[AlterTableAddPartitionCommand]] command. - * - * For example: - * {{{ - * ALTER TABLE table ADD [IF NOT EXISTS] PARTITION spec [LOCATION 'loc1'] - * ALTER VIEW view ADD [IF NOT EXISTS] PARTITION spec - * }}} - * - * ALTER VIEW ... ADD PARTITION ... is not supported because the concept of partitioning - * is associated with physical tables - */ - override def visitAddTablePartition( - ctx: AddTablePartitionContext): LogicalPlan = withOrigin(ctx) { - if (ctx.VIEW != null) { - operationNotAllowed("ALTER VIEW ... ADD PARTITION", ctx) - } - // Create partition spec to location mapping. - val specsAndLocs = if (ctx.partitionSpec.isEmpty) { - ctx.partitionSpecLocation.asScala.map { - splCtx => - val spec = visitNonOptionalPartitionSpec(splCtx.partitionSpec) - val location = Option(splCtx.locationSpec).map(visitLocationSpec) - spec -> location - } - } else { - // Alter View: the location clauses are not allowed. Review comment: I also removed this check in `ASTBuilder.scala` since view check is already done at the top. ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org