Github user jinxing64 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19602#discussion_r192550679
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala 
---
    @@ -207,65 +271,68 @@ class HiveClientSuite(version: String)
       }
     
       private def testMetastorePartitionFiltering(
    -      filterString: String,
    +      table: String,
    +      filterExpr: Expression,
           expectedDs: Seq[Int],
           expectedH: Seq[Int],
           expectedChunks: Seq[String]): Unit = {
         testMetastorePartitionFiltering(
    -      filterString,
    -      (expectedDs, expectedH, expectedChunks) :: Nil,
    +      table,
    +      filterExpr,
    +      Map("ds" -> expectedDs, "h" -> expectedH, "chunk" -> expectedChunks) 
:: Nil,
           identity)
       }
     
       private def testMetastorePartitionFiltering(
    -      filterString: String,
    +      table: String,
    +      filterExpr: Expression,
           expectedDs: Seq[Int],
           expectedH: Seq[Int],
           expectedChunks: Seq[String],
           transform: Expression => Expression): Unit = {
         testMetastorePartitionFiltering(
    -      filterString,
    -      (expectedDs, expectedH, expectedChunks) :: Nil,
    +      table,
    +      filterExpr,
    +      Map("ds" -> expectedDs, "h" -> expectedH, "chunk" -> expectedChunks) 
:: Nil,
           identity)
       }
     
       private def testMetastorePartitionFiltering(
    -      filterString: String,
    -      expectedPartitionCubes: Seq[(Seq[Int], Seq[Int], Seq[String])]): 
Unit = {
    -    testMetastorePartitionFiltering(filterString, expectedPartitionCubes, 
identity)
    +      table: String,
    +      filterExpr: Expression,
    +      expectedPartitionCubes: Seq[Map[String, Seq[Any]]]): Unit = {
    +    testMetastorePartitionFiltering(table, filterExpr, 
expectedPartitionCubes, identity)
       }
     
       private def testMetastorePartitionFiltering(
    -      filterString: String,
    -      expectedPartitionCubes: Seq[(Seq[Int], Seq[Int], Seq[String])],
    +      table: String,
    +      filterExpr: Expression,
    +      expectedPartitionCubes: Seq[Map[String, Seq[Any]]],
           transform: Expression => Expression): Unit = {
    -    val filteredPartitions = 
client.getPartitionsByFilter(client.getTable("default", "test"),
    +    val filteredPartitions = 
client.getPartitionsByFilter(client.getTable("default", table),
           Seq(
    -        transform(parseExpression(filterString))
    +        transform(filterExpr)
           ))
     
    -    val expectedPartitionCount = expectedPartitionCubes.map {
    -      case (expectedDs, expectedH, expectedChunks) =>
    -        expectedDs.size * expectedH.size * expectedChunks.size
    -    }.sum
    -
    -    val expectedPartitions = expectedPartitionCubes.map {
    -      case (expectedDs, expectedH, expectedChunks) =>
    -        for {
    -          ds <- expectedDs
    -          h <- expectedH
    -          chunk <- expectedChunks
    -        } yield Set(
    -          "ds" -> ds.toString,
    -          "h" -> h.toString,
    -          "chunk" -> chunk
    -        )
    -    }.reduce(_ ++ _)
    +    val expectedPartitionCount = 
expectedPartitionCubes.map(_.map(_._2.size).product).sum
    +
    +    val expectedPartitions = 
expectedPartitionCubes.map(getPartitionsFromCube(_)).reduce(_ ++ _)
     
         val actualFilteredPartitionCount = filteredPartitions.size
     
         assert(actualFilteredPartitionCount == expectedPartitionCount,
           s"Expected $expectedPartitionCount partitions but got 
$actualFilteredPartitionCount")
    -    assert(filteredPartitions.map(_.spec.toSet).toSet == 
expectedPartitions.toSet)
    +    assert(filteredPartitions.map(_.spec).toSet == 
expectedPartitions.toSet)
    +  }
    +
    +  private def getPartitionsFromCube(cube: Map[String, Seq[Any]]): 
Seq[Map[String, String]] = {
    +    cube.map {
    +      case (k: String, pts: Seq[Any]) => pts.map(pt => (k, pt.toString))
    +    }.foldLeft(Seq(Seq[(String, String)]()))((seq0, seq1) => {
    --- End diff --
    
    Hmm, it's  a recursion problem. I tried to use loop&mutable states 
directly, but seems not become easier readable;
    
    In current change, I extract a `PartitionSpec` type and add comment. I 
think it's better now?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to