sujith71955 commented on a change in pull request #24903: [SPARK-28084][SQL] Resolving the partition column name based on the resolver in sql load command URL: https://github.com/apache/spark/pull/24903#discussion_r305624456
########## File path: sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala ########## @@ -290,6 +291,44 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto checkAnswer( sql("SELECT employeeID, employeeName FROM part_table WHERE c = '2' AND d = '1'"), sql("SELECT * FROM non_part_table").collect()) + + // check for case insensitive property of partition column name in load command. + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { + withInputFile { f => + sql(s"""$loadQuery INPATH "${f.toURI}" INTO TABLE part_table PARTITION(C="1", D="2")""") + } + checkAnswer( + sql("SELECT employeeID, employeeName FROM part_table WHERE c = '2' AND d = '1'"), + sql("SELECT * FROM non_part_table").collect()) + } + } + } + + test("SPARK-28084 case insensitive property of partition column name in insert command") { + // check for case insensitive property of partition column name in insert command. + withTable("part_table") { + withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { + sql("CREATE TABLE part_table (price int, qty int) partitioned by (year int, month int)") + sql("INSERT INTO part_table PARTITION(YEar = 2015, month = 1) SELECT 1, 1") + checkAnswer( + sql("SELECT * FROM part_table"), + Row(1, 1, 2015, 1)) + } + } + } + + test("SPARK-28084 case insensitive property of partition column name " + + "in insert command - dynamic partition") { Review comment: you are right, thanks for the suggestion :) ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org With regards, Apache Git Services --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org