Github user dongjoon-hyun commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19651#discussion_r148932599
  
    --- Diff: 
sql/hive/src/main/scala/org/apache/spark/sql/hive/orc/OrcFileOperator.scala ---
    @@ -87,15 +88,10 @@ private[hive] object OrcFileOperator extends Logging {
         getFileReader(path, 
conf).map(_.getObjectInspector.asInstanceOf[StructObjectInspector])
       }
     
    -  def listOrcFiles(pathStr: String, conf: Configuration): Seq[Path] = {
    -    // TODO: Check if the paths coming in are already qualified and 
simplify.
    -    val origPath = new Path(pathStr)
    -    val fs = origPath.getFileSystem(conf)
    -    val paths = SparkHadoopUtil.get.listLeafStatuses(fs, origPath)
    -      .filterNot(_.isDirectory)
    -      .map(_.getPath)
    -      .filterNot(_.getName.startsWith("_"))
    -      .filterNot(_.getName.startsWith("."))
    -    paths
    +  def setRequiredColumns(
    --- End diff --
    
    This is moved from [object 
ORCFileFormat](https://github.com/apache/spark/pull/19651/files#diff-01999ccbf13e95a0ea2d223f69d8ae23L265)
 inside `sql/hive`.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to