gengliangwang commented on code in PR #36745: URL: https://github.com/apache/spark/pull/36745#discussion_r890519223
########## sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala: ########## @@ -41,21 +41,29 @@ import org.apache.spark.sql.types.{MetadataBuilder, StructField, StructType} * * We can remove this rule once we implement all the catalog functionality in `V2SessionCatalog`. */ -class ResolveSessionCatalog(val catalogManager: CatalogManager) +class ResolveSessionCatalog(val analyzer: Analyzer) extends Rule[LogicalPlan] with LookupCatalog { import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ import org.apache.spark.sql.connector.catalog.CatalogV2Util._ import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Implicits._ + override val catalogManager = analyzer.catalogManager + override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp { - case AddColumns(ResolvedV1TableIdentifier(ident), cols) => + case AddColumns( + ResolvedTable(catalog, ident, v1Table: V1Table, _), cols) + if isSessionCatalog(catalog) => cols.foreach { c => assertTopLevelColumn(c.name, "AlterTableAddColumnsCommand") if (!c.nullable) { throw QueryCompilationErrors.addColumnWithV1TableCannotSpecifyNotNullError } } - AlterTableAddColumnsCommand(ident.asTableIdentifier, cols.map(convertToStructField)) + val prevSchema = StructType(cols.map(convertToStructField)) + val newSchema: StructType = + DefaultCols.constantFoldCurrentDefaultsToExistDefaults( Review Comment: `constantFoldCurrentDefaultsToExistDefaults` is called in `AlterTableAddColumnsCommand` too. Seems duplicated here? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org