sadikovi commented on a change in pull request #34495:
URL: https://github.com/apache/spark/pull/34495#discussion_r744323273



##########
File path: 
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetQuerySuite.scala
##########
@@ -158,6 +158,48 @@ abstract class ParquetQuerySuite extends QueryTest with 
ParquetTest with SharedS
     }
   }
 
+  test("SPARK-36182: writing and reading TimestampNTZType column") {
+    withTable("ts") {
+      sql("create table ts (c1 timestamp_ntz) using parquet")
+      sql("insert into ts values (timestamp_ntz'2016-01-01 10:11:12.123456')")
+      sql("insert into ts values (null)")
+      sql("insert into ts values (timestamp_ntz'1965-01-01 10:11:12.123456')")
+      val expectedSchema = new StructType().add(StructField("c1", 
TimestampNTZType))
+      assert(spark.table("ts").schema == expectedSchema)
+      val expected = Seq(
+        ("2016-01-01 10:11:12.123456"),
+        (null),
+        ("1965-01-01 10:11:12.123456"))
+        .toDS().select($"value".cast("timestamp_ntz"))
+      checkAnswer(sql("select * from ts"), expected)
+    }
+  }
+
+  test("SPARK-36182: can't read TimestampLTZ as TimestampNTZ") {

Review comment:
       Can we add a test for reading TimestampNTZ as TimestampLTZ?

##########
File path: 
sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
##########
@@ -179,6 +183,21 @@ boolean 
isTimestampTypeMatched(LogicalTypeAnnotation.TimeUnit unit) {
       ((TimestampLogicalTypeAnnotation) logicalTypeAnnotation).getUnit() == 
unit;
   }
 
+  void validateTimestampType(DataType sparkType) {
+    assert(logicalTypeAnnotation instanceof TimestampLogicalTypeAnnotation);
+    // Throw an exception if the Parquet type is TimestampLTZ and the Catalyst 
type is TimestampNTZ.
+    // This is to avoid mistakes in reading the timestamp values.
+    if (((TimestampLogicalTypeAnnotation) 
logicalTypeAnnotation).isAdjustedToUTC() &&
+      sparkType == DataTypes.TimestampNTZType) {
+      converterErrorForTimestampNTZ("int64 time(" + logicalTypeAnnotation + 
")");
+    }
+  }
+
+  void converterErrorForTimestampNTZ(String parquetType) {

Review comment:
       nit: `convertErrorForTimestampNTZ`?

##########
File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala
##########
@@ -370,6 +370,31 @@ private[parquet] class ParquetRowConverter(
           }
         }
 
+      // The converter doesn't support the TimestampLTZ Parquet type and 
TimestampNTZ Catalyst type.
+      // This is to avoid mistakes in reading the timestamp values.
+      case TimestampNTZType
+        if parquetType.asPrimitiveType().getPrimitiveTypeName == INT64 &&
+          
parquetType.getLogicalTypeAnnotation.isInstanceOf[TimestampLogicalTypeAnnotation]
 &&
+          !parquetType.getLogicalTypeAnnotation
+            .asInstanceOf[TimestampLogicalTypeAnnotation].isAdjustedToUTC &&
+          parquetType.getLogicalTypeAnnotation
+            .asInstanceOf[TimestampLogicalTypeAnnotation].getUnit == 
TimeUnit.MICROS =>
+        new ParquetPrimitiveConverter(updater)
+
+      case TimestampNTZType
+        if parquetType.asPrimitiveType().getPrimitiveTypeName == INT64 &&
+          
parquetType.getLogicalTypeAnnotation.isInstanceOf[TimestampLogicalTypeAnnotation]
 &&
+          !parquetType.getLogicalTypeAnnotation
+            .asInstanceOf[TimestampLogicalTypeAnnotation].isAdjustedToUTC &&
+          parquetType.getLogicalTypeAnnotation
+            .asInstanceOf[TimestampLogicalTypeAnnotation].getUnit == 
TimeUnit.MILLIS =>

Review comment:
       I think this one is for millisecond conversion, the one above is for 
microsecond conversion.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to