Github user jinxing64 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19602#discussion_r192550486
  
    --- Diff: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala 
---
    @@ -59,38 +61,62 @@ class HiveClientSuite(version: String)
             "h" -> h.toString,
             "chunk" -> chunk
           ), storageFormat)
    -    assert(partitions.size == testPartitionCount)
    +    assert(partitions0.size == testPartitionCount0)
     
         client.createPartitions(
    -      "default", "test", partitions, ignoreIfExists = false)
    +      "default", "test0", partitions0, ignoreIfExists = false)
    +
    +    val partitions1 =
    +      for {
    +        pt <- 0 until 10
    +        chunk <- Seq("aa", "ab", "ba", "bb")
    +      } yield CatalogTablePartition(Map(
    +        "pt" -> pt.toString,
    +        "chunk" -> chunk
    +      ), storageFormat)
    +    assert(partitions1.size == testPartitionCount1)
    +
    +    client.createPartitions(
    +      "default", "test1", partitions1, ignoreIfExists = false)
    +
         client
       }
     
    +  private def pAttr(table: String, name: String): Attribute = {
    +    val partTypes = client.getTable("default", 
table).partitionSchema.fields
    +        .map(field => (field.name, field.dataType)).toMap
    +    partTypes.get(name) match {
    +      case Some(dt) => AttributeReference(name, dt)()
    +      case None =>
    +        fail(s"Illegal name of partition attribute: $name")
    +    }
    +  }
    +
       override def beforeAll() {
         super.beforeAll()
         client = init(true)
       }
     
       test(s"getPartitionsByFilter returns all partitions when 
$tryDirectSqlKey=false") {
         val client = init(false)
    -    val filteredPartitions = 
client.getPartitionsByFilter(client.getTable("default", "test"),
    -      Seq(parseExpression("ds=20170101")))
    +    val filteredPartitions = 
client.getPartitionsByFilter(client.getTable("default", "test0"),
    +      Seq(EqualTo(pAttr("test0", "ds"), Literal(20170101, IntegerType))))
    --- End diff --
    
    Thanks, with `org.apache.spark.sql.catalyst.dsl.expressions._`, code can be 
much cleaner.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to