cloud-fan commented on a change in pull request #31368: URL: https://github.com/apache/spark/pull/31368#discussion_r565880329
########## File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala ########## @@ -844,18 +844,41 @@ class SessionCatalog( } } + def getTempViewSchema(plan: LogicalPlan): StructType = { + plan match { + case viewInfo: TemporaryViewRelation => viewInfo.tableMeta.schema + case v => v.schema + } + } + private def fromCatalogTable(metadata: CatalogTable, isTempView: Boolean): View = { - val viewText = metadata.viewText.getOrElse(sys.error("Invalid view without text.")) + val viewText = metadata.viewText.getOrElse { + throw new IllegalStateException("Invalid view without text.") + } val viewConfigs = metadata.viewSQLConfigs - val viewPlan = + val parsedPlan = SQLConf.withExistingConf(View.effectiveSQLConf(viewConfigs, isTempView = isTempView)) { parser.parsePlan(viewText) } - View( - desc = metadata, - isTempView = isTempView, - output = metadata.schema.toAttributes, - child = viewPlan) + val viewColumnNames = metadata.viewQueryColumnNames + val viewPlan = if (viewColumnNames.nonEmpty) { + assert(viewColumnNames.length == metadata.schema.length) + // For view queries like `SELECT * FROM t`, the schema of the referenced table/view may + // change after the view has been created. We need to add an extra SELECT to pick the columns + // according to the recorded column names (to get the correct view column ordering and omit + // the extra columns that we don't require), add UpCast (to make sure the type change is + // safe) and Alias according to the schema in the catalog. + val projectList = viewColumnNames.zip(metadata.schema).map { case (col, field) => + Alias(UpCast(UnresolvedAttribute.quoted(col), field.dataType), field.name)( + explicitMetadata = Some(field.metadata)) + } + Project(projectList, parsedPlan) + } else { + // For view created before Spark 2.2.0, the view text is already fully qualified, the plan + // output is the same with the view output. + parsedPlan Review comment: Before Spark 2.2.0, we generate SQL from logical plan, and the logical plan already has extra `Project` to add alias, see https://github.com/apache/spark/blob/branch-2.1/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala#L214 ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org