cloud-fan commented on code in PR #44190: URL: https://github.com/apache/spark/pull/44190#discussion_r1418295715
########## sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2Suite.scala: ########## @@ -723,6 +724,158 @@ class DataSourceV2Suite extends QueryTest with SharedSparkSession with AdaptiveS } } } + + test("SPARK-46272: create table as select") { + val cls = classOf[SupportsExternalMetadataDataSource] + withTable("test") { + sql( + s""" + |CREATE TABLE test USING ${cls.getName} + |AS VALUES (0, 1), (1, 2) + |""".stripMargin) + checkAnswer(sql("SELECT * FROM test"), Seq(Row(0, 1), Row(1, 2))) + sql( + s""" + |CREATE OR REPLACE TABLE test USING ${cls.getName} + |AS VALUES (2, 3), (4, 5) + |""".stripMargin) + checkAnswer(sql("SELECT * FROM test"), Seq(Row(2, 3), Row(4, 5))) + sql( + s""" + |CREATE TABLE IF NOT EXISTS test USING ${cls.getName} + |AS VALUES (3, 4), (4, 5) + |""".stripMargin) + checkAnswer(sql("SELECT * FROM test"), Seq(Row(2, 3), Row(4, 5))) + } + } + + test("SPARK-46272: create table as select - error cases") { + val cls = classOf[SupportsExternalMetadataDataSource] + // CTAS with too many columns + withTable("test") { + checkError( + exception = intercept[AnalysisException] { + sql( + s""" + |CREATE TABLE test USING ${cls.getName} + |AS VALUES (0, 1, 2), (1, 2, 3) + |""".stripMargin) + }, + errorClass = "INSERT_COLUMN_ARITY_MISMATCH.TOO_MANY_DATA_COLUMNS", Review Comment: I'm a bit confused here. So for this data source, we probably don't check anything when creating the table. But when load the table, and get the `Table` instance in `V2SessionCatalog`, shall we check if `Table#schema` match what we stored in HMS? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org