This is an automated email from the ASF dual-hosted git repository.

nicholasjiang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 8f2b735  [KYUUBI #2186] Manage test failures with kyuubi spark nightly 
build - execute statement - select interval
8f2b735 is described below

commit 8f2b7358e47bb5f01989d61bb4eb16a4af9b64ee
Author: Kent Yao <[email protected]>
AuthorDate: Wed Mar 23 23:58:55 2022 +0800

    [KYUUBI #2186] Manage test failures with kyuubi spark nightly build - 
execute statement - select interval
    
    ### _Why are the changes needed?_
    
    In this PR, we handle newly added daytime interval type
    
    TTypeId.INTERVAL_DAY_TIME_TYPE is used for compatible hive thrift type.
    jdbc Type is java.sql.Types.OTHER
    
    The data is converted from java duration to hive compatible string 
representation
    
    ### _How was this patch tested?_
    - [x] Add some test cases that check the changes thoroughly including 
negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [x] [Run 
test](https://kyuubi.apache.org/docs/latest/develop_tools/testing.html#running-tests)
 locally before make a pull request
    
    Closes #2199 from yaooqinn/2186.
    
    Closes #2186
    
    408954ec [Kent Yao] fix
    11ad6311 [Kent Yao] interval day
    8c329250 [Kent Yao] interval day
    
    Authored-by: Kent Yao <[email protected]>
    Signed-off-by: SteNicholas <[email protected]>
---
 .../engine/spark/schema/IntervalQualifier.scala    | 66 ++++++++++++++++++++++
 .../apache/kyuubi/engine/spark/schema/RowSet.scala | 14 ++++-
 .../kyuubi/engine/spark/schema/SchemaHelper.scala  |  2 +
 .../apache/kyuubi/operation/SparkQueryTests.scala  | 50 ++++++++--------
 4 files changed, 107 insertions(+), 25 deletions(-)

diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/IntervalQualifier.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/IntervalQualifier.scala
new file mode 100644
index 0000000..13f5ff3
--- /dev/null
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/IntervalQualifier.scala
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kyuubi.engine.spark.schema
+
+import java.time.Duration
+import java.util.concurrent.TimeUnit
+
+object IntervalQualifier extends Enumeration {
+
+  type IntervalQualifier = Value
+
+  val DAY = new Val(0, "interval day")
+  val HOUR = new Val(1, "interval day to hour")
+  val MINUTE = new Val(2, "interval day to minute")
+  val SECOND = new Val(3, "interval day to second")
+  final private val SECOND_PER_MINUTE: Long = 60L
+  final private val SECOND_PER_HOUR: Long = SECOND_PER_MINUTE * 60L
+  final private val SECOND_PER_DAY: Long = SECOND_PER_HOUR * 24L
+
+  def toDayTimeIntervalString(d: Duration, iq: IntervalQualifier): String = {
+    var sign = ""
+    var rest = d.getSeconds
+    if (d.getSeconds < 0) {
+      sign = "-"
+      rest = -rest
+    }
+    iq match {
+      case DAY =>
+        val days = TimeUnit.SECONDS.toDays(rest)
+        s"$sign$days 00:00:00.000000000"
+      case HOUR =>
+        val days = TimeUnit.SECONDS.toDays(rest)
+        val hours = TimeUnit.SECONDS.toHours(rest % SECOND_PER_DAY)
+        f"$sign$days $hours%02d:00:00.000000000"
+      case MINUTE =>
+        val days = TimeUnit.SECONDS.toDays(rest)
+        rest %= SECOND_PER_DAY
+        val hours = TimeUnit.SECONDS.toHours(rest)
+        val minutes = TimeUnit.SECONDS.toMinutes(rest % SECOND_PER_HOUR)
+        f"$sign$days $hours%02d:$minutes%02d:00.000000000"
+      case SECOND =>
+        val days = TimeUnit.SECONDS.toDays(rest)
+        rest %= SECOND_PER_DAY
+        val hours = TimeUnit.SECONDS.toHours(rest)
+        rest %= SECOND_PER_HOUR
+        val minutes = TimeUnit.SECONDS.toMinutes(rest)
+        val seconds = rest % SECOND_PER_MINUTE
+        f"$sign$days $hours%02d:$minutes%02d:$seconds%02d.${d.getNano}%09d"
+    }
+  }
+}
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/RowSet.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/RowSet.scala
index 9823703..57367a8 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/RowSet.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/RowSet.scala
@@ -20,7 +20,7 @@ package org.apache.kyuubi.engine.spark.schema
 import java.nio.ByteBuffer
 import java.nio.charset.StandardCharsets
 import java.sql.Timestamp
-import java.time.{Instant, LocalDate, ZoneId}
+import java.time.{Duration, Instant, LocalDate, ZoneId}
 import java.util.Date
 
 import scala.collection.JavaConverters._
@@ -29,6 +29,7 @@ import org.apache.hive.service.rpc.thrift._
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.types._
 
+import org.apache.kyuubi.engine.spark.schema.IntervalQualifier.{DAY, HOUR, 
MINUTE, SECOND}
 import org.apache.kyuubi.util.RowSetUtils._
 
 object RowSet {
@@ -254,6 +255,17 @@ object RowSet {
         // Only match string in nested type values
         "\"" + s + "\""
 
+      case (d: Duration, dt) =>
+        if (dt.simpleString == DAY.toString()) {
+          IntervalQualifier.toDayTimeIntervalString(d, DAY)
+        } else if (dt.simpleString == HOUR.toString()) {
+          IntervalQualifier.toDayTimeIntervalString(d, HOUR)
+        } else if (dt.simpleString == MINUTE.toString()) {
+          IntervalQualifier.toDayTimeIntervalString(d, MINUTE)
+        } else {
+          IntervalQualifier.toDayTimeIntervalString(d, SECOND)
+        }
+
       case (seq: scala.collection.Seq[_], ArrayType(typ, _)) =>
         seq.map(v => (v, typ)).map(e => toHiveString(e, 
timeZone)).mkString("[", ",", "]")
 
diff --git 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
index 4133b57..62cb7b1 100644
--- 
a/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
+++ 
b/externals/kyuubi-spark-sql-engine/src/main/scala/org/apache/kyuubi/engine/spark/schema/SchemaHelper.scala
@@ -41,6 +41,8 @@ object SchemaHelper {
     case TimestampType => TTypeId.TIMESTAMP_TYPE
     case BinaryType => TTypeId.BINARY_TYPE
     case CalendarIntervalType => TTypeId.STRING_TYPE
+    case dt if dt.simpleString.startsWith("interval day") =>
+      TTypeId.INTERVAL_DAY_TIME_TYPE
     case _: ArrayType => TTypeId.ARRAY_TYPE
     case _: MapType => TTypeId.MAP_TYPE
     case _: StructType => TTypeId.STRUCT_TYPE
diff --git 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
index ac33b7c..ed4fe76 100644
--- 
a/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
+++ 
b/kyuubi-common/src/test/scala/org/apache/kyuubi/operation/SparkQueryTests.scala
@@ -24,12 +24,10 @@ import scala.collection.JavaConverters._
 import org.apache.commons.lang3.StringUtils
 import org.apache.hive.service.rpc.thrift.{TExecuteStatementReq, 
TFetchResultsReq, TOpenSessionReq, TStatusCode}
 
-import org.apache.kyuubi.{KYUUBI_VERSION, Utils}
+import org.apache.kyuubi.KYUUBI_VERSION
 
 trait SparkQueryTests extends HiveJDBCTestHelper {
 
-  protected lazy val SPARK_ENGINE_MAJOR_MINOR_VERSION: (Int, Int) = 
sparkEngineMajorMinorVersion
-
   test("execute statement - select null") {
     withJdbcStatement() { statement =>
       val resultSet = statement.executeQuery("SELECT NULL AS col")
@@ -174,17 +172,32 @@ trait SparkQueryTests extends HiveJDBCTestHelper {
     }
   }
 
-  test("execute statement - select interval") {
-    // FIXME: [KYUUBI #1250]
-    assume(SPARK_ENGINE_MAJOR_MINOR_VERSION !== ((3, 2)))
+  test("execute statement - select daytime interval") {
     withJdbcStatement() { statement =>
-      val resultSet = statement.executeQuery("SELECT interval '1' day AS col")
-      assert(resultSet.next())
-      assert(resultSet.getString("col") === "1 days")
-      assert(resultSet.getMetaData.getColumnType(1) === java.sql.Types.VARCHAR)
-      val metaData = resultSet.getMetaData
-      assert(metaData.getPrecision(1) === Int.MaxValue)
-      assert(metaData.getScale(1) === 0)
+      Map(
+        "-interval 2 day" -> "-2 00:00:00.000000000",
+        "-interval 200 day" -> "-200 00:00:00.000000000",
+        "interval 1 day 1 hour" -> "1 01:00:00.000000000",
+        "interval 1 day 1 hour -60 minutes" -> "1 00:00:00.000000000",
+        "interval 1 day 1 hour -60 minutes 30 seconds" -> "1 
00:00:30.000000000",
+        "interval 1 day 1 hour 59 minutes 30 seconds 12345 milliseconds" ->
+          "1 01:59:42.345000000").foreach { kv => // value -> result pair
+        val resultSet = statement.executeQuery(s"SELECT ${kv._1} AS col")
+        assert(resultSet.next())
+        val result = resultSet.getString("col")
+        val metaData = resultSet.getMetaData
+        if (result.contains("days")) {
+          // for spark 3.1 and backwards
+          assert(result.split("days").head.trim === kv._2.split(" ").head)
+          assert(metaData.getPrecision(1) === Int.MaxValue)
+          assert(resultSet.getMetaData.getColumnType(1) === 
java.sql.Types.VARCHAR)
+        } else {
+          assert(result === kv._2)
+          assert(metaData.getPrecision(1) === 29)
+          assert(resultSet.getMetaData.getColumnType(1) === 
java.sql.Types.OTHER)
+        }
+        assert(metaData.getScale(1) === 0)
+      }
     }
   }
 
@@ -532,15 +545,4 @@ trait SparkQueryTests extends HiveJDBCTestHelper {
       assert(foundOperationLangItem)
     }
   }
-
-  def sparkEngineMajorMinorVersion: (Int, Int) = {
-    var sparkRuntimeVer = ""
-    withJdbcStatement() { stmt =>
-      val result = stmt.executeQuery("SELECT version()")
-      assert(result.next())
-      sparkRuntimeVer = result.getString(1)
-      assert(!result.next())
-    }
-    Utils.majorMinorVersion(sparkRuntimeVer)
-  }
 }

Reply via email to