imback82 commented on a change in pull request #30229: URL: https://github.com/apache/spark/pull/30229#discussion_r516433011
########## File path: sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala ########## @@ -175,7 +175,10 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { }.getMessage assert(e2.contains("SHOW CREATE TABLE is not supported on a temporary view")) assertNoSuchTable(s"SHOW PARTITIONS $viewName") - assertNoSuchTable(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") + val e3 = intercept[AnalysisException] { + sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") + }.getMessage + assert(e3.contains("ANALYZE TABLE is not supported on a temporary view")) assertNoSuchTable(s"ANALYZE TABLE $viewName COMPUTE STATISTICS FOR COLUMNS id") Review comment: Weirdly, `AnalyzeColumnCommand` throws `NoSuchTableException` if the temp view is not cached (even if it exists): https://github.com/apache/spark/blob/3959f0d9879fa7fa9e8f2e8ed8c8b12003d21788/sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzeColumnCommand.scala#L72-L78 So, this line doesn't need to be updated. ########## File path: sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala ########## @@ -255,19 +254,11 @@ class ResolveSessionCatalog( case RenameTableStatement(TempViewOrV1Table(oldName), newName, isView) => AlterTableRenameCommand(oldName.asTableIdentifier, newName.asTableIdentifier, isView) - case DescribeRelation(r @ ResolvedTable(_, ident, _: V1Table), partitionSpec, isExtended) - if isSessionCatalog(r.catalog) => - DescribeTableCommand(ident.asTableIdentifier, partitionSpec, isExtended) - // Use v1 command to describe (temp) view, as v2 catalog doesn't support view yet. - case DescribeRelation(ResolvedView(ident, _), partitionSpec, isExtended) => + case DescribeRelation(ResolvedV1TableOrViewIdentifier(ident), partitionSpec, isExtended) => DescribeTableCommand(ident.asTableIdentifier, partitionSpec, isExtended) - case DescribeColumn(r @ ResolvedTable(_, _, _: V1Table), colNameParts, isExtended) - if isSessionCatalog(r.catalog) => - DescribeColumnCommand(r.identifier.asTableIdentifier, colNameParts, isExtended) - - case DescribeColumn(ResolvedView(ident, _), colNameParts, isExtended) => + case DescribeColumn(ResolvedV1TableOrViewIdentifier(ident), colNameParts, isExtended) => DescribeColumnCommand(ident.asTableIdentifier, colNameParts, isExtended) Review comment: Applying `ResolvedV1TableOrViewIdentifier` extractor to existing code to simplify. ########## File path: sql/core/src/main/scala/org/apache/spark/sql/execution/command/AnalyzePartitionCommand.scala ########## @@ -75,6 +75,9 @@ case class AnalyzePartitionCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val sessionState = sparkSession.sessionState + if (sessionState.catalog.getTempView(tableIdent.identifier).isDefined) { + throw new AnalysisException("ANALYZE TABLE is not supported on a temporary view.") + } Review comment: `ANALYZE TABLE` supports both table and permanent view (if cached), but not a temporary view. Thus, we need to check this in commands to make sure temporary views are resolved first. Another approach is to introduce `UnresolvedTableOrPermanentView` such that if a temporary view is found first, it fails at the `Analyzer`. WDYT @cloud-fan? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org