Github user jinxing64 commented on a diff in the pull request: https://github.com/apache/spark/pull/19602#discussion_r192312477 --- Diff: sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala --- @@ -207,65 +271,68 @@ class HiveClientSuite(version: String) } private def testMetastorePartitionFiltering( - filterString: String, + table: String, + filterExpr: Expression, expectedDs: Seq[Int], expectedH: Seq[Int], expectedChunks: Seq[String]): Unit = { testMetastorePartitionFiltering( - filterString, - (expectedDs, expectedH, expectedChunks) :: Nil, + table, + filterExpr, + Map("ds" -> expectedDs, "h" -> expectedH, "chunk" -> expectedChunks) :: Nil, identity) } private def testMetastorePartitionFiltering( - filterString: String, + table: String, + filterExpr: Expression, expectedDs: Seq[Int], expectedH: Seq[Int], expectedChunks: Seq[String], transform: Expression => Expression): Unit = { testMetastorePartitionFiltering( - filterString, - (expectedDs, expectedH, expectedChunks) :: Nil, + table, + filterExpr, + Map("ds" -> expectedDs, "h" -> expectedH, "chunk" -> expectedChunks) :: Nil, identity) } private def testMetastorePartitionFiltering( - filterString: String, - expectedPartitionCubes: Seq[(Seq[Int], Seq[Int], Seq[String])]): Unit = { - testMetastorePartitionFiltering(filterString, expectedPartitionCubes, identity) + table: String, + filterExpr: Expression, + expectedPartitionCubes: Seq[Map[String, Seq[Any]]]): Unit = { + testMetastorePartitionFiltering(table, filterExpr, expectedPartitionCubes, identity) } private def testMetastorePartitionFiltering( - filterString: String, - expectedPartitionCubes: Seq[(Seq[Int], Seq[Int], Seq[String])], + table: String, + filterExpr: Expression, + expectedPartitionCubes: Seq[Map[String, Seq[Any]]], --- End diff -- With this change, number of partition names in `expectedPartitionCubes` is not necessarily to be 3. And schema of `expectedPartitionCubes` is like Seq[Map[partition name, partition values]]
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org