dongjoon-hyun commented on a change in pull request #29427: URL: https://github.com/apache/spark/pull/29427#discussion_r470396482
########## File path: sql/core/v1.2/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcFilterSuite.scala ########## @@ -513,5 +513,98 @@ class OrcFilterSuite extends OrcTest with SharedSparkSession { ).get.toString } } + + test("SPARK-25557: case sensitivity in predicate pushdown") { + withTempPath { dir => + val count = 10 + val tableName = "spark_25557" + val tableDir1 = dir.getAbsoluteFile + "/table1" + + // Physical ORC files have both `A` and `a` fields. + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { + spark.range(count).repartition(count).selectExpr("id - 1 as A", "id as a") + .write.mode("overwrite").orc(tableDir1) + } + + // Metastore table has both `A` and `a` fields too. + withTable(tableName) { + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { + sql( + s""" + |CREATE TABLE $tableName (A LONG, a LONG) USING ORC LOCATION '$tableDir1' + """.stripMargin) + + checkAnswer(sql(s"select a, A from $tableName"), (0 until count).map(c => Row(c, c - 1))) + + val actual1 = stripSparkFilter(sql(s"select A from $tableName where A < 0")) + assert(actual1.count() == 1) + + val actual2 = stripSparkFilter(sql(s"select A from $tableName where a < 0")) + assert(actual2.count() == 0) + } + + // Exception thrown for ambiguous case. + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { + val e = intercept[AnalysisException] { + sql(s"select a from $tableName where a < 0").collect() + } + assert(e.getMessage.contains( + "Reference 'a' is ambiguous")) + } + } + + // Metastore table has only `A` field. + withTable(tableName) { + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { + sql( + s""" + |CREATE TABLE $tableName (A LONG) USING ORC LOCATION '$tableDir1' + """.stripMargin) + + val e = intercept[SparkException] { + sql(s"select A from $tableName where A < 0").collect() + } + assert(e.getCause.isInstanceOf[RuntimeException] && e.getCause.getMessage.contains( + """Found duplicate field(s) "A": [A, a] in case-insensitive mode""")) + } + } + + // Physical ORC files have only `A` field. + val tableDir2 = dir.getAbsoluteFile + "/table2" + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { + spark.range(count).repartition(count).selectExpr("id - 1 as A") + .write.mode("overwrite").orc(tableDir2) + } + + withTable(tableName) { + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { + sql( + s""" + |CREATE TABLE $tableName (a LONG) USING ORC LOCATION '$tableDir2' + """.stripMargin) + + checkAnswer(sql(s"select a from $tableName"), (0 until count).map(c => Row(c - 1))) + + val actual = stripSparkFilter(sql(s"select a from $tableName where a < 0")) + // TODO: ORC predicate pushdown should work under case-insensitive analysis. + // assert(actual.count() == 1) Review comment: Can we have this non-nested test case on `branch-3.0`, too? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org