cloud-fan commented on code in PR #37588: URL: https://github.com/apache/spark/pull/37588#discussion_r1380283326
########## sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowTablesSuiteBase.scala: ########## @@ -126,4 +158,258 @@ trait ShowTablesSuiteBase extends QueryTest with DDLCommandTestUtils { } } } + + test("show table in a not existing namespace") { + checkError( + exception = intercept[AnalysisException] { + sql(s"SHOW TABLES IN $catalog.nonexist") + }, + errorClass = "SCHEMA_NOT_FOUND", + parameters = Map("schemaName" -> "`nonexist`")) + } + + test("show table extended in a not existing namespace") { + checkError( + exception = intercept[AnalysisException] { + sql(s"SHOW TABLE EXTENDED IN $catalog.nonexist LIKE '*tbl*'") + }, + errorClass = "SCHEMA_NOT_FOUND", + parameters = Map("schemaName" -> "`nonexist`")) + } + + test("show table extended in a not existing table") { + val namespace = "ns1" + val table = "nonexist" + withNamespaceAndTable(namespace, table, catalog) { _ => + val result = sql(s"SHOW TABLE EXTENDED IN $catalog.$namespace LIKE '*$table*'") + assert(result.schema.fieldNames === + Seq("namespace", "tableName", "isTemporary", "information")) + assert(result.collect().isEmpty) + } + } + + test("show table extended in a not existing partition") { + val namespace = "ns1" + val table = "tbl" + withNamespaceAndTable(namespace, table, catalog) { tbl => + sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing PARTITIONED BY (id)") + sql(s"ALTER TABLE $tbl ADD PARTITION (id = 1)") + checkError( + exception = intercept[AnalysisException] { + sql(s"SHOW TABLE EXTENDED IN $catalog.$namespace LIKE '$table' PARTITION(id = 2)") + }, + errorClass = "PARTITIONS_NOT_FOUND", + parameters = Map( + "partitionList" -> "PARTITION (`id` = 2)", + "tableName" -> "`ns1`.`tbl`" + ) + ) + } + } + + test("show table extended in non-partitioned table") { + val namespace = "ns1" + val table = "tbl" + withNamespaceAndTable(namespace, table, catalog) { tbl => + sql(s"CREATE TABLE $tbl (id bigint, data string) $defaultUsing") + val e = intercept[AnalysisException] { + sql(s"SHOW TABLE EXTENDED IN $catalog.$namespace LIKE '$table' PARTITION(id = 1)") + } + val (errorClass, parameters) = extendedPartInNonPartedTableError(catalog, namespace, table) + checkError(exception = e, errorClass = errorClass, parameters = parameters) + } + } + + test("show table extended in multi partition key - " + + "the command's partition parameters are complete") { + val namespace = "ns1" + val table = "tbl" + withNamespaceAndTable(namespace, table, catalog) { tbl => + sql(s"CREATE TABLE $tbl (id1 bigint, id2 bigint, data string) " + + s"$defaultUsing PARTITIONED BY (id1, id2)") Review Comment: nit: I think tests in the base suite should always partition by the ending columns. We can add a new simple test in v1 suite to prove that partition columns are always at the end, and a new simple test in v2 to prove that we respect the original table schema. I think the v1 behavior is probably a bug, but we may never fix it as it becomes a feature :( -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org