Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21667#discussion_r201753187
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/FileBasedDataSourceSuite.scala ---
    @@ -276,44 +334,31 @@ class FileBasedDataSourceSuite extends QueryTest with 
SharedSQLContext with Befo
             }.getMessage
             assert(msg.contains("Cannot save interval data type into external 
storage."))
     
    -        msg = intercept[UnsupportedOperationException] {
    +        msg = intercept[AnalysisException] {
               spark.udf.register("testType", () => new IntervalData())
               sql("select 
testType()").write.format(format).mode("overwrite").save(tempDir)
             }.getMessage
             assert(msg.toLowerCase(Locale.ROOT)
    -          .contains(s"$format data source does not support 
calendarinterval data type."))
    +          .contains(s"$format data source does not support interval data 
type."))
           }
     
           // read path
           Seq("parquet", "csv").foreach { format =>
    -        var msg = intercept[UnsupportedOperationException] {
    +        var msg = intercept[AnalysisException] {
               val schema = StructType(StructField("a", CalendarIntervalType, 
true) :: Nil)
               
spark.range(1).write.format(format).mode("overwrite").save(tempDir)
               spark.read.schema(schema).format(format).load(tempDir).collect()
             }.getMessage
             assert(msg.toLowerCase(Locale.ROOT)
               .contains(s"$format data source does not support 
calendarinterval data type."))
     
    -        msg = intercept[UnsupportedOperationException] {
    +        msg = intercept[AnalysisException] {
               val schema = StructType(StructField("a", new IntervalUDT(), 
true) :: Nil)
               
spark.range(1).write.format(format).mode("overwrite").save(tempDir)
               spark.read.schema(schema).format(format).load(tempDir).collect()
             }.getMessage
             assert(msg.toLowerCase(Locale.ROOT)
    -          .contains(s"$format data source does not support 
calendarinterval data type."))
    -      }
    -
    -      // We expect the types below should be passed for 
backward-compatibility
    -      Seq("orc", "json").foreach { format =>
    -        // Interval type
    -        var schema = StructType(StructField("a", CalendarIntervalType, 
true) :: Nil)
    -        spark.range(1).write.format(format).mode("overwrite").save(tempDir)
    -        spark.read.schema(schema).format(format).load(tempDir).collect()
    --- End diff --
    
    when the user-specified schema doesn't match the physical schema, the 
behavior is undefined. So I don't think this is about backward compatibility, 
+1 to forbid interval type.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to