Github user yhuai commented on a diff in the pull request: https://github.com/apache/spark/pull/12871#discussion_r62296164 --- Diff: sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala --- @@ -488,6 +491,79 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac assert(catalog.listFunctions("db2", "func*").toSet == Set("func1", "func2")) } + // -------------------------------------------------------------------------- + // File System operations + // -------------------------------------------------------------------------- + + private def exists(uri: String, children: String*): Boolean = { + val base = new File(new URI(uri)) + logWarning(base.listFiles().toSeq.map(_.getAbsolutePath).mkString("\n###")) + val f = children.foldLeft(base) { + case (parent, child) => new File(parent, child) + } + logWarning("exists? " + f.getAbsolutePath) + f.exists() + } + + test("create/drop database should create/delete the directory") { + val catalog = newBasicCatalog() + val db = newDb("mydb") + catalog.createDatabase(db, ignoreIfExists = false) + assert(exists(db.locationUri)) + + catalog.dropDatabase("mydb", ignoreIfNotExists = false, cascade = false) + assert(!exists(db.locationUri)) + } + + test("create/drop/rename table should create/delete/rename the directory") { + val catalog = newBasicCatalog() + val db = catalog.getDatabase("db1") + val table = CatalogTable( + identifier = TableIdentifier("myTable", Some("db1")), + tableType = CatalogTableType.MANAGED, + storage = CatalogStorageFormat(None, None, None, None, false, Map.empty), + schema = Seq(CatalogColumn("a", "int"), CatalogColumn("b", "string")) + ) + + catalog.createTable("db1", table, ignoreIfExists = false) + assert(exists(db.locationUri, "myTable")) + + catalog.renameTable("db1", "myTable", "yourTable") + assert(!exists(db.locationUri, "myTable")) + assert(exists(db.locationUri, "yourTable")) + + catalog.dropTable("db1", "yourTable", ignoreIfNotExists = false) + assert(!exists(db.locationUri, "yourTable")) + } + + test("create/drop/rename partitions should create/delete/rename the directory") { + val catalog = newBasicCatalog() + val databaseDir = catalog.getDatabase("db1").locationUri + val table = CatalogTable( + identifier = TableIdentifier("myTable", Some("db1")), + tableType = CatalogTableType.MANAGED, + storage = CatalogStorageFormat(None, None, None, None, false, Map.empty), + schema = Seq( + CatalogColumn("col1", "int"), + CatalogColumn("col2", "string"), + CatalogColumn("a", "int"), + CatalogColumn("b", "string")), + partitionColumnNames = Seq("a", "b") + ) + catalog.createTable("db1", table, ignoreIfExists = false) + + catalog.createPartitions("db1", "myTable", Seq(part1, part2), ignoreIfExists = false) + assert(exists(databaseDir, "myTable", "a=1", "b=2")) + assert(exists(databaseDir, "myTable", "a=3", "b=4")) + + catalog.renamePartitions("db1", "myTable", Seq(part1.spec), Seq(part3.spec)) + assert(!exists(databaseDir, "myTable", "a=1", "b=2")) + assert(exists(databaseDir, "myTable", "a=5", "b=6")) + + catalog.dropPartitions("db1", "myTable", Seq(part2.spec, part3.spec), ignoreIfNotExists = false) + assert(!exists(databaseDir, "myTable", "a=3", "b=4")) + assert(!exists(databaseDir, "myTable", "a=5", "b=6")) + } --- End diff -- Also test the case when partition has a user-specified location?
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org