Github user yhuai commented on a diff in the pull request: https://github.com/apache/spark/pull/13315#discussion_r64793761 --- Diff: sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala --- @@ -289,37 +289,53 @@ case class TruncateTableCommand( val catalog = sparkSession.sessionState.catalog if (!catalog.tableExists(tableName)) { throw new AnalysisException(s"Table '$tableName' in TRUNCATE TABLE does not exist.") - } else if (catalog.isTemporaryTable(tableName)) { + } + if (catalog.isTemporaryTable(tableName)) { throw new AnalysisException( s"Operation not allowed: TRUNCATE TABLE on temporary tables: '$tableName'") + } + val table = catalog.getTableMetadata(tableName) + if (table.tableType == CatalogTableType.EXTERNAL) { + throw new AnalysisException( + s"Operation not allowed: TRUNCATE TABLE on external tables: '$tableName'") + } + if (table.tableType == CatalogTableType.VIEW) { + throw new AnalysisException( + s"Operation not allowed: TRUNCATE TABLE on views: '$tableName'") + } + if (DDLUtils.isDatasourceTable(table) && partitionSpec.isDefined) { + throw new AnalysisException( + s"Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported " + + s"for tables created using the data sources API: '$tableName'") + } + val locations = if (partitionSpec.isDefined) { + catalog.listPartitions(tableName, partitionSpec).map(_.storage.locationUri) } else { - val locations = if (partitionSpec.isDefined) { - catalog.listPartitions(tableName, partitionSpec).map(_.storage.locationUri) + if (table.partitionColumnNames.nonEmpty) { + catalog.listPartitions(tableName).map(_.storage.locationUri) --- End diff -- This is for hive table, right?
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org