This is an automated email from the ASF dual-hosted git repository. gurwls223 pushed a commit to branch branch-3.0 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push: new 43d8d54 [SPARK-31361][SQL][FOLLOWUP] Use LEGACY_PARQUET_REBASE_DATETIME_IN_READ instead of avro config in ParquetIOSuite 43d8d54 is described below commit 43d8d54e8beab25dbdf75ca93943f774b93297ea Author: Max Gekk <max.g...@gmail.com> AuthorDate: Thu May 7 09:46:42 2020 +0900 [SPARK-31361][SQL][FOLLOWUP] Use LEGACY_PARQUET_REBASE_DATETIME_IN_READ instead of avro config in ParquetIOSuite ### What changes were proposed in this pull request? Replace the Avro SQL config `LEGACY_AVRO_REBASE_DATETIME_IN_READ ` by `LEGACY_PARQUET_REBASE_DATETIME_IN_READ ` in `ParquetIOSuite`. ### Why are the changes needed? Avro config is not relevant to the parquet tests. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? By running `ParquetIOSuite` via ``` ./build/sbt "test:testOnly *ParquetIOSuite" ``` Closes #28461 from MaxGekk/fix-conf-in-ParquetIOSuite. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: HyukjinKwon <gurwls...@apache.org> (cherry picked from commit 3d38bc2605ab01d61127c09e1bf6ed6a6683ed3e) Signed-off-by: HyukjinKwon <gurwls...@apache.org> --- .../spark/sql/execution/datasources/parquet/ParquetIOSuite.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala index 239db7d..7f0a228 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetIOSuite.scala @@ -955,7 +955,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSparkSession // The file metadata indicates if it needs rebase or not, so we can always get the // correct result regardless of the "rebaseInRead" config. Seq(true, false).foreach { rebase => - withSQLConf(SQLConf.LEGACY_AVRO_REBASE_DATETIME_IN_READ.key -> rebase.toString) { + withSQLConf(SQLConf.LEGACY_PARQUET_REBASE_DATETIME_IN_READ.key -> rebase.toString) { checkAnswer(spark.read.parquet(path), Row(Timestamp.valueOf(tsStr))) } } @@ -984,7 +984,7 @@ class ParquetIOSuite extends QueryTest with ParquetTest with SharedSparkSession // The file metadata indicates if it needs rebase or not, so we can always get the correct // result regardless of the "rebaseInRead" config. Seq(true, false).foreach { rebase => - withSQLConf(SQLConf.LEGACY_AVRO_REBASE_DATETIME_IN_READ.key -> rebase.toString) { + withSQLConf(SQLConf.LEGACY_PARQUET_REBASE_DATETIME_IN_READ.key -> rebase.toString) { checkAnswer(spark.read.parquet(path), Row(Date.valueOf("1001-01-01"))) } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org