MaxGekk commented on a change in pull request #31979:
URL: https://github.com/apache/spark/pull/31979#discussion_r602906304



##########
File path: 
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationSuite.scala
##########
@@ -528,4 +530,65 @@ class HiveScriptTransformationSuite extends 
BaseScriptTransformationSuite with T
     checkAnswer(query2, identity, Row("\\N,\\N,\\N") :: Nil)
 
   }
+
+  test("SPARK-34879: HiveInspector supports DayTimeIntervalType and 
YearMonthIntervalType") {
+    assume(TestUtils.testCommandAvailable("/bin/bash"))
+    withTempView("v") {
+      val df = Seq(
+        (Duration.ofDays(1),
+          Duration.ofSeconds(100).plusNanos(123456),
+          Duration.ofSeconds(Long.MaxValue / 
DateTimeConstants.MICROS_PER_SECOND),
+          Period.ofMonths(10)),
+        (Duration.ofDays(1),
+          Duration.ofSeconds(100).plusNanos(1123456789),
+          Duration.ofSeconds(Long.MaxValue / 
DateTimeConstants.MICROS_PER_SECOND),
+          Period.ofMonths(10))
+      ).toDF("a", "b", "c", "d")
+        .select('a, 'b, 'c.cast(DayTimeIntervalType).as("c_1"), 'd)
+      df.createTempView("v")
+
+      // Hive serde supports DayTimeIntervalType/YearMonthIntervalType as 
input and output data type
+      checkAnswer(
+        df,
+        (child: SparkPlan) => createScriptTransformationExec(
+          input = Seq(
+            df.col("a").expr,
+            df.col("b").expr,
+            df.col("c_1").expr,
+            df.col("d").expr),
+          script = "cat",
+          output = Seq(
+            AttributeReference("a", DayTimeIntervalType)(),
+            AttributeReference("b", DayTimeIntervalType)(),
+            AttributeReference("c_1", DayTimeIntervalType)(),
+            AttributeReference("d", YearMonthIntervalType)()),
+          child = child,
+          ioschema = hiveIOSchema),
+        df.select('a, 'b, 'c_1, 'd).collect())
+    }
+  }
+
+  test("SPARK-34879: HiveInceptor throw overflow when" +
+    " HiveIntervalDayTime overflow then DayTimeIntervalType") {
+    withTempView("v") {
+      val df = Seq(
+        ("579025220 15:30:06.000001000")
+      ).toDF("a")
+      df.createTempView("v")
+
+      val e = intercept[Exception] {
+        checkAnswer(
+          df,
+          (child: SparkPlan) => createScriptTransformationExec(
+            input = Seq(
+              df.col("a").expr),

Review comment:
       nit:
   ```suggestion
               input = Seq(df.col("a").expr),
   ```




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to