Github user gatorsmile commented on a diff in the pull request: https://github.com/apache/spark/pull/15998#discussion_r91006034 --- Diff: sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalogSuite.scala --- @@ -346,6 +346,31 @@ abstract class ExternalCatalogSuite extends SparkFunSuite with BeforeAndAfterEac assert(new Path(partitionLocation) == defaultPartitionLocation) } + test("list partition names") { + val catalog = newBasicCatalog() + val newPart = CatalogTablePartition(Map("a" -> "1", "b" -> "%="), storageFormat) + catalog.createPartitions("db2", "tbl2", Seq(newPart), ignoreIfExists = false) + + val partitionNames = catalog.listPartitionNames("db2", "tbl2") + assert(partitionNames == Seq("a=1/b=%25%3D", "a=1/b=2", "a=3/b=4")) + } + + test("list partition names with partial partition spec") { + val catalog = newBasicCatalog() + val newPart = CatalogTablePartition(Map("a" -> "1", "b" -> "%="), storageFormat) + catalog.createPartitions("db2", "tbl2", Seq(newPart), ignoreIfExists = false) + + val partitionNames1 = catalog.listPartitionNames("db2", "tbl2", Some(Map("a" -> "1"))) + assert(partitionNames1 == Seq("a=1/b=%25%3D", "a=1/b=2")) --- End diff -- Yeah, I tried Hive 1.2. It actually returns the weird value. ``` hive> create table partTab (col1 int, col2 int) partitioned by (pcol1 String, pcol2 String); OK hive> insert into table partTab partition(pcol1='1', pcol2='2') select 3, 4 from dummy; OK hive> insert into table partTab partition(pcol1='1', pcol2='%=') select 3, 4 from dummy; OK hive> show partitions partTab; OK pcol1=1/pcol2=%25%3D pcol1=1/pcol2=2 hive> show partitions partTab PARTITION(pcol1=1); OK pcol1=1/pcol2=2 pcol1=1/pcol2=%25%3D ```
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org