Github user attilapiros commented on a diff in the pull request: https://github.com/apache/spark/pull/20249#discussion_r161487862 --- Diff: sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala --- @@ -1869,6 +1869,65 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils { } } + test("SPARK-23057: SET LOCATION for managed table with partition") { + withTable("tbl_partition") { + withTempDir { dir => + sql("CREATE TABLE tbl_partition(col1 INT, col2 INT) USING parquet PARTITIONED BY (col1)") + sql("INSERT INTO tbl_partition PARTITION(col1=1) SELECT 11") + sql("INSERT INTO tbl_partition PARTITION(col1=2) SELECT 22") + checkAnswer(spark.table("tbl_partition"), Seq(Row(11, 1), Row(22, 2))) + val defaultTablePath = spark.sessionState.catalog + .getTableMetadata(TableIdentifier("tbl_partition")).storage.locationUri.get + try { + // before set location of partition col1 =1 and 2 + checkPath(defaultTablePath.toString, Map("col1" -> "1"), "tbl_partition") + checkPath(defaultTablePath.toString, Map("col1" -> "2"), "tbl_partition") + val path = dir.getCanonicalPath + + // set location of partition col1 =1 + sql(s"ALTER TABLE tbl_partition PARTITION (col1='1') SET LOCATION '$path'") + checkPath(dir.getCanonicalPath, Map("col1" -> "1"), "tbl_partition") + checkPath(defaultTablePath.toString, Map("col1" -> "2"), "tbl_partition") + + // set location of partition col1 =2 + sql(s"ALTER TABLE tbl_partition PARTITION (col1='2') SET LOCATION '$path'") + checkPath(dir.getCanonicalPath, Map("col1" -> "1"), "tbl_partition") + checkPath(dir.getCanonicalPath, Map("col1" -> "2"), "tbl_partition") + + spark.catalog.refreshTable("tbl_partition") + // SET LOCATION won't move data from previous table path to new table path. + assert(spark.table("tbl_partition").count() == 0) --- End diff -- For me this assert states the old data is there which is not consistent with the comment above. I would suggest to check whether the new location dir is empty: ```scala assert(dir.listFiles().isEmpty) ```
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org