Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22313#discussion_r214810021
  
    --- Diff: 
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilters.scala
 ---
    @@ -54,27 +55,27 @@ import org.apache.spark.sql.types._
      * builder methods mentioned above can only be found in test code, where 
all tested filters are
      * known to be convertible.
      */
    -private[orc] object OrcFilters {
    +private[sql] object OrcFilters {
    +  private[sql] def buildTree(filters: Seq[Filter]): Option[Filter] = {
    +    filters match {
    +      case Seq() => None
    +      case Seq(filter) => Some(filter)
    +      case Seq(filter1, filter2) => Some(And(filter1, filter2))
    +      case _ => // length > 2
    +        val (left, right) = filters.splitAt(filters.length / 2)
    +        Some(And(buildTree(left).get, buildTree(right).get))
    +    }
    +  }
     
       /**
        * Create ORC filter as a SearchArgument instance.
        */
       def createFilter(schema: StructType, filters: Seq[Filter]): 
Option[SearchArgument] = {
         val dataTypeMap = schema.map(f => f.name -> f.dataType).toMap
     
    -    // First, tries to convert each filter individually to see whether 
it's convertible, and then
    -    // collect all convertible ones to build the final `SearchArgument`.
    -    val convertibleFilters = for {
    -      filter <- filters
    -      _ <- buildSearchArgument(dataTypeMap, filter, 
SearchArgumentFactory.newBuilder())
    -    } yield filter
    -
    -    for {
    -      // Combines all convertible filters using `And` to produce a single 
conjunction
    -      conjunction <- 
convertibleFilters.reduceOption(org.apache.spark.sql.sources.And)
    -      // Then tries to build a single ORC `SearchArgument` for the 
conjunction predicate
    -      builder <- buildSearchArgument(dataTypeMap, conjunction, 
SearchArgumentFactory.newBuilder())
    -    } yield builder.build()
    +    buildTree(filters.filter(buildSearchArgument(dataTypeMap, _, 
newBuilder).isDefined))
    +      .flatMap(buildSearchArgument(dataTypeMap, _, newBuilder))
    +      .map(_.build)
    --- End diff --
    
    ah i see what you mean now. Can we restore to the previous version? That 
seems better. Sorry for the back and forth!


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to