This is an automated email from the ASF dual-hosted git repository.

jingzhang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
     new 207975b416c [FLINK-30452][hive] fix use wrong argument type of null 
literal is used when call Hive's function
207975b416c is described below

commit 207975b416c324c634f0620e049669afea8cb8db
Author: luoyuxia <luoyu...@alumni.sjtu.edu.cn>
AuthorDate: Mon Dec 19 20:15:30 2022 +0800

    [FLINK-30452][hive] fix use wrong argument type of null literal is used 
when call Hive's function
    
    This closes #21528
---
 .../functions/hive/conversion/HiveInspectors.java  | 14 ++++++--
 .../connectors/hive/HiveDialectQueryITCase.java    | 39 ++++++++++++++++++++++
 2 files changed, 51 insertions(+), 2 deletions(-)

diff --git 
a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java
 
b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java
index d26bb0dbd5e..8cbaa825520 100644
--- 
a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java
+++ 
b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java
@@ -85,6 +85,7 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantL
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantShortObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantStringObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableConstantTimestampObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableVoidObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -549,9 +550,18 @@ public class HiveInspectors {
                 className = 
WritableConstantBinaryObjectInspector.class.getName();
                 return HiveReflectionUtils.createConstantObjectInspector(
                         className, BytesWritable.class, value);
-            case UNKNOWN:
             case VOID:
-                // If type is null, we use the Constant String to replace
+                try {
+                    Constructor<WritableVoidObjectInspector> constructor =
+                            
WritableVoidObjectInspector.class.getDeclaredConstructor();
+                    constructor.setAccessible(true);
+                    return constructor.newInstance();
+                } catch (Exception e) {
+                    throw new FlinkHiveUDFException(
+                            "Failed to create writable constant object 
inspector", e);
+                }
+            case UNKNOWN:
+                // If type is unknown, we use the Constant String to replace
                 className = 
WritableConstantStringObjectInspector.class.getName();
                 return HiveReflectionUtils.createConstantObjectInspector(
                         className, Text.class, value == null ? null : 
value.toString());
diff --git 
a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
 
b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
index 869f3fcb486..9955b4386c6 100644
--- 
a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
+++ 
b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
@@ -43,7 +43,9 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaConstantLongObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.TimestampObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.BeforeClass;
@@ -58,6 +60,7 @@ import java.io.FileReader;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.sql.Timestamp;
+import java.time.LocalDateTime;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -938,6 +941,42 @@ public class HiveDialectQueryITCase {
         }
     }
 
+    @Test
+    public void testNullLiteralAsArgument() throws Exception {
+        tableEnv.executeSql("create table test_ts(ts timestamp)");
+        tableEnv.executeSql("create table t_bigint(ts bigint)");
+        Long testTimestamp = 1671058803926L;
+        // timestamp's behavior is different between hive2 and hive3, so
+        // use HiveShim in this test to hide such difference
+        HiveShim hiveShim = 
HiveShimLoader.loadHiveShim(HiveShimLoader.getHiveVersion());
+        LocalDateTime expectDateTime =
+                hiveShim.toFlinkTimestamp(
+                        PrimitiveObjectInspectorUtils.getTimestamp(
+                                testTimestamp, new 
JavaConstantLongObjectInspector(testTimestamp)));
+        try {
+            tableEnv.executeSql(
+                            String.format(
+                                    "insert into table t_bigint values (%s), 
(null)",
+                                    testTimestamp))
+                    .await();
+            // the return data type for expression if(ts = 0, null ,ts) should 
be bigint instead of
+            // string. otherwise, the all values in table t_bigint wll be null 
since
+            // cast("1671058803926" as timestamp) will return null
+            tableEnv.executeSql(
+                            "insert into table test_ts select if(ts = 0, null 
,ts) from t_bigint")
+                    .await();
+            List<Row> result =
+                    CollectionUtil.iteratorToList(
+                            tableEnv.executeSql("select * from 
test_ts").collect());
+            // verify it can cast to timestamp value correctly
+            assertThat(result.toString())
+                    .isEqualTo(String.format("[+I[%s], +I[null]]", 
expectDateTime));
+        } finally {
+            tableEnv.executeSql("drop table test_ts");
+            tableEnv.executeSql("drop table t_bigint");
+        }
+    }
+
     private void runQFile(File qfile) throws Exception {
         QTest qTest = extractQTest(qfile);
         for (int i = 0; i < qTest.statements.size(); i++) {

Reply via email to