This is an automated email from the ASF dual-hosted git repository.

dkuzmenko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new c98456d737b HIVE-28358: Enable JDBC getClob retrieval from String 
columns (Valentino Pinna, reviewed by Denys Kuzmenko)
c98456d737b is described below

commit c98456d737bcacd20c97c7dd6abae163028fbc23
Author: Valentino Pinna <vpinn...@users.noreply.github.com>
AuthorDate: Wed Aug 21 09:02:45 2024 +0200

    HIVE-28358: Enable JDBC getClob retrieval from String columns (Valentino 
Pinna, reviewed by Denys Kuzmenko)
    
    Closes #5336
---
 .../org/apache/hive/jdbc/HiveBaseResultSet.java    |  8 +++-
 .../apache/hive/jdbc/TestHiveBaseResultSet.java    | 48 ++++++++++++++++++++++
 2 files changed, 54 insertions(+), 2 deletions(-)

diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java 
b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
index 0ee0027d8cb..3988b02fd32 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveBaseResultSet.java
@@ -46,6 +46,8 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import javax.sql.rowset.serial.SerialClob;
+
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.TimestampTZUtil;
@@ -309,12 +311,14 @@ public abstract class HiveBaseResultSet implements 
ResultSet {
 
   @Override
   public Clob getClob(int i) throws SQLException {
-    throw new SQLFeatureNotSupportedException("Method not supported");
+    String str = getString(i);
+    return str == null ? null : new SerialClob(str.toCharArray());
   }
 
   @Override
   public Clob getClob(String colName) throws SQLException {
-    throw new SQLFeatureNotSupportedException("Method not supported");
+    String str = getString(colName);
+    return str == null ? null : new SerialClob(str.toCharArray());
   }
 
   @Override
diff --git a/jdbc/src/test/org/apache/hive/jdbc/TestHiveBaseResultSet.java 
b/jdbc/src/test/org/apache/hive/jdbc/TestHiveBaseResultSet.java
index bca26f336f3..5a2eecd6ea9 100644
--- a/jdbc/src/test/org/apache/hive/jdbc/TestHiveBaseResultSet.java
+++ b/jdbc/src/test/org/apache/hive/jdbc/TestHiveBaseResultSet.java
@@ -20,12 +20,17 @@ package org.apache.hive.jdbc;
 
 import static org.mockito.Mockito.when;
 
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.Reader;
 import java.lang.reflect.Field;
 import java.nio.charset.StandardCharsets;
+import java.sql.Clob;
 import java.sql.SQLException;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.stream.Collectors;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hive.service.cli.TableSchema;
@@ -240,6 +245,49 @@ public class TestHiveBaseResultSet {
     Assert.assertFalse(resultSet.wasNull());
   }
 
+  /**
+   * HIVE-28358 getClob(int) != null
+   */
+  @Test
+  public void testGetClobString() throws SQLException, IOException {
+    FieldSchema fieldSchema = new FieldSchema();
+    fieldSchema.setType("varchar(64)");
+
+    List<FieldSchema> fieldSchemas = Arrays.asList(fieldSchema);
+    TableSchema schema = new TableSchema(fieldSchemas);
+
+    HiveBaseResultSet resultSet = Mockito.spy(HiveBaseResultSet.class);
+    resultSet.row = new Object[] {"ABC"};
+
+    when(resultSet.getSchema()).thenReturn(schema);
+    
+    Clob clob = resultSet.getClob(1);
+    try (Reader clobReader = clob.getCharacterStream()) {
+      Assert.assertEquals("ABC", new 
BufferedReader(clobReader).lines().collect(Collectors.joining(System.lineSeparator())));
+    }
+    Assert.assertFalse(resultSet.wasNull());
+  }
+
+  /**
+   * HIVE-28358 getClob(int) == null 
+   */
+  @Test
+  public void testGetClobNull() throws SQLException {
+    FieldSchema fieldSchema = new FieldSchema();
+    fieldSchema.setType("varchar(64)");
+
+    List<FieldSchema> fieldSchemas = Arrays.asList(fieldSchema);
+    TableSchema schema = new TableSchema(fieldSchemas);
+
+    HiveBaseResultSet resultSet = Mockito.spy(HiveBaseResultSet.class);
+    resultSet.row = new Object[] {null};
+
+    when(resultSet.getSchema()).thenReturn(schema);
+
+    Assert.assertNull(resultSet.getClob(1));
+    Assert.assertTrue(resultSet.wasNull());
+  }
+
   @Test
   public void testFindColumnUnqualified() throws Exception {
     FieldSchema fieldSchema1 = new FieldSchema();

Reply via email to