This is an automated email from the ASF dual-hosted git repository.

ngangam pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit 3e41782d7c23e9e76a4e2f5f0876b4fd6e6b9a8d
Author: Naveen Gangam <ngan...@cloudera.com>
AuthorDate: Tue Dec 1 01:02:29 2020 -0500

    HIVE-24396: Unhandled longvarchar and integer types for derby
---
 .../apache/hadoop/hive/metastore/HiveMetaStore.java   | 17 ++++++++---------
 .../dataconnector/jdbc/DerbySQLConnectorProvider.java | 19 ++++++++++++++++++-
 .../dataconnector/jdbc/MySQLConnectorProvider.java    |  1 +
 .../jdbc/PostgreSQLConnectorProvider.java             |  3 +++
 4 files changed, 30 insertions(+), 10 deletions(-)

diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 88261e2..7288ca3 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -2366,15 +2366,9 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       List<String> processorCapabilities = req.getProcessorCapabilities();
       String processorId = req.getProcessorIdentifier();
 
-      Database db = null;
-      try {
-        db = ms.getDatabase(tbl.getCatName(), tbl.getDbName());
-      } catch (Exception e) {
-        LOG.info("Database {} does exist, exception: {}", tbl.getDbName(), 
e.getMessage());
-        return;
-      }
+      Database db = get_database_core(tbl.getCatName(), tbl.getDbName());
       if (db != null && db.getType().equals(DatabaseType.REMOTE)) {
-        boolean success = 
DataConnectorProviderFactory.getDataConnectorProvider(db).createTable(tbl);
+        
DataConnectorProviderFactory.getDataConnectorProvider(db).createTable(tbl);
         return;
       }
 
@@ -4533,7 +4527,12 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       Database db = null;
       try {
         ms.openTransaction();
-        db = ms.getDatabase(catName, dbName);
+        try {
+          db = ms.getDatabase(catName, dbName);
+        } catch (NoSuchObjectException notExists) {
+          throw new InvalidObjectException("Unable to add partitions because "
+              + "database or table " + dbName + "." + tblName + " does not 
exist");
+        }
         if (db.getType() == DatabaseType.REMOTE)
           throw new MetaException("Operation add_partitions_pspec not 
supported on tables in REMOTE database");
         tbl = ms.getTable(catName, dbName, tblName, null);
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/DerbySQLConnectorProvider.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/DerbySQLConnectorProvider.java
index 1cf90bc..c212098 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/DerbySQLConnectorProvider.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/DerbySQLConnectorProvider.java
@@ -1,5 +1,6 @@
 package org.apache.hadoop.hive.metastore.dataconnector.jdbc;
 
+import org.apache.hadoop.hive.metastore.ColumnType;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -63,7 +64,23 @@ public class DerbySQLConnectorProvider extends 
AbstractJDBCConnectorProvider {
 
   protected String getDataType(String dbDataType, int size) {
     String mappedType = super.getDataType(dbDataType, size);
-    // map any db specific types here. or return
+    if (!mappedType.equalsIgnoreCase(ColumnType.VOID_TYPE_NAME)) {
+      return mappedType;
+    }
+
+    // map any db specific types here.
+    switch (dbDataType.toLowerCase())
+    {
+    case "integer":
+      mappedType = ColumnType.INT_TYPE_NAME;
+      break;
+    case "long varchar":
+      mappedType = ColumnType.STRING_TYPE_NAME;
+      break;
+    default:
+      mappedType = ColumnType.VOID_TYPE_NAME;
+      break;
+    }
     return mappedType;
   }
 }
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
index cb80c4f..17d5a8b 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
@@ -80,6 +80,7 @@ public class MySQLConnectorProvider extends 
AbstractJDBCConnectorProvider {
     {
     default:
       mappedType = ColumnType.VOID_TYPE_NAME;
+      break;
     }
     return mappedType;
   }
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/PostgreSQLConnectorProvider.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/PostgreSQLConnectorProvider.java
index 1e7c65a..51bc97e 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/PostgreSQLConnectorProvider.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/PostgreSQLConnectorProvider.java
@@ -57,10 +57,13 @@ public class PostgreSQLConnectorProvider extends 
AbstractJDBCConnectorProvider {
     {
     case "bpchar":
       mappedType = ColumnType.CHAR_TYPE_NAME + wrapSize(size);
+      break;
     case "int8":
       mappedType = ColumnType.BIGINT_TYPE_NAME;
+      break;
     default:
       mappedType = ColumnType.VOID_TYPE_NAME;
+      break;
     }
     return mappedType;
   }

Reply via email to