This is an automated email from the ASF dual-hosted git repository.

ngangam pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit 80013b401ce13311d923896eff7e5fff96d5ec5e
Author: Naveen Gangam <ngan...@cloudera.com>
AuthorDate: Mon Nov 16 14:13:38 2020 -0500

    Deleted commented out code and fixed location and IO classes
---
 .../apache/hadoop/hive/metastore/Warehouse.java    |  6 +--
 .../AbstractDataConnectorProvider.java             |  8 +--
 .../jdbc/AbstractJDBCConnectorProvider.java        | 60 ++++++----------------
 .../dataconnector/jdbc/MySQLConnectorProvider.java | 57 --------------------
 4 files changed, 22 insertions(+), 109 deletions(-)

diff --git 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java
 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java
index ef52ed9..608c56f 100755
--- 
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java
+++ 
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/Warehouse.java
@@ -227,7 +227,7 @@ public class Warehouse {
    */
   public Path determineDatabasePath(Catalog cat, Database db) throws 
MetaException {
     if (db.getType() == DatabaseType.REMOTE) {
-      return getRemoteDatabasePath();
+      return getDefaultDatabasePath(db.getName(), true);
     }
     if (db.isSetLocationUri()) {
       return getDnsPath(new Path(db.getLocationUri()));
@@ -334,10 +334,6 @@ public class Warehouse {
     }
   }
 
-  public Path getRemoteDatabasePath() throws MetaException {
-    return new Path(getWhRootExternal(), "dummy_path_for_remote_database.db");
-  }
-
   private boolean hasExternalWarehouseRoot() {
     return !StringUtils.isBlank(whRootExternalString);
   }
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/AbstractDataConnectorProvider.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/AbstractDataConnectorProvider.java
index 74ddbdd..04a5842 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/AbstractDataConnectorProvider.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/AbstractDataConnectorProvider.java
@@ -89,9 +89,9 @@ public abstract class AbstractDataConnectorProvider 
implements IDataConnectorPro
     serdeInfo.setParameters(serdeParams);
 
     sd.setSerdeInfo(serdeInfo);
-    sd.setInputFormat("org.apache.hive.storage.jdbc.JdbcInputFormat"); // TODO
-    sd.setOutputFormat("org.apache.hive.storage.jdbc.JdbcOutputFormat"); // 
TODO
-    sd.setLocation("/tmp/some_dummy_path"); // TODO
+    sd.setInputFormat(getInputClass());
+    sd.setOutputFormat(getOutputClass());
+    sd.setLocation(getTableLocation(tableName));
     sd.setBucketCols(new ArrayList<String>());
     sd.setSortCols(new ArrayList<Order>());
 
@@ -113,4 +113,6 @@ public abstract class AbstractDataConnectorProvider 
implements IDataConnectorPro
   abstract protected String getInputClass();
 
   abstract protected String getOutputClass();
+
+  abstract protected String getTableLocation(String tblName);
 }
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
index d436123..62a7786 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/AbstractJDBCConnectorProvider.java
@@ -1,5 +1,6 @@
 package org.apache.hadoop.hive.metastore.dataconnector.jdbc;
 
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.DataConnector;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -22,6 +23,7 @@ import java.util.Map;
 
 public abstract class AbstractJDBCConnectorProvider extends 
AbstractDataConnectorProvider {
   private static Logger LOG = 
LoggerFactory.getLogger(AbstractJDBCConnectorProvider.class);
+  protected static Warehouse warehouse = null;
 
   // duplicate constants from Constants.java to avoid a dependency on 
hive-common
   public static final String JDBC_HIVE_STORAGE_HANDLER_ID =
@@ -72,6 +74,10 @@ public abstract class AbstractJDBCConnectorProvider extends 
AbstractDataConnecto
         LOG.warn("Could not read key value from keystore");
       }
     }
+
+    try {
+      warehouse = new Warehouse(MetastoreConf.newMetastoreConf());
+    } catch (MetaException e) { /* ignore */ }
   }
 
   @Override public void open() throws ConnectException {
@@ -165,48 +171,17 @@ public abstract class AbstractJDBCConnectorProvider 
extends AbstractDataConnecto
     try {
       // rs = fetchTableMetadata(tableName);
       rs = fetchTableViaDBMetaData(tableName);
-      /*
-      Statement stmt = getConnection().createStatement();
-      rs = stmt.executeQuery(
-          "SELECT table_name, column_name, is_nullable, data_type, 
character_maximum_length FROM INFORMATION_SCHEMA.Columns where table_schema='"
-              + scoped_db + "' and table_name='" + tableName + "'");
-       */
       List<FieldSchema> cols = new ArrayList<>();
       // TODO throw exception is RS is empty
       while (rs.next()) {
         FieldSchema fs = new FieldSchema();
         fs.setName(rs.getString("COLUMN_NAME"));
-        // fs.setType(getDataType(rs.getString("DATA_TYPE"), 
rs.getInt("CHARACTER_MAXIMUM_LENGTH")));
         fs.setType(getDataType(rs.getString("TYPE_NAME"), 
rs.getInt("COLUMN_SIZE")));
         fs.setComment("inferred column type");
         cols.add(fs);
       }
 
       table = buildTableFromColsList(tableName, cols);
-      /*
-      //Setting the storage descriptor.
-      StorageDescriptor sd = new StorageDescriptor();
-      sd.setCols(cols);
-      // sd.se
-      SerDeInfo serdeInfo = new SerDeInfo();
-      serdeInfo.setName(tableName);
-      serdeInfo.setSerializationLib("org.apache.hive.storage.jdbc.JdbcSerDe");
-      Map<String, String> serdeParams = new HashMap<String, String>();
-      serdeParams.put("serialization.format", "1");
-      serdeInfo.setParameters(serdeParams);
-
-      // StorageHandler
-
-      // serdeInfo.setDeserializerClass();
-      sd.setSerdeInfo(serdeInfo);
-      // sd.getSerdeInfo().setName(tableName);
-      sd.setInputFormat("org.apache.hive.storage.jdbc.JdbcInputFormat"); // 
TODO
-      sd.setOutputFormat("org.apache.hive.storage.jdbc.JdbcOutputFormat"); // 
TODO
-      sd.setLocation("/tmp/some_dummy_path"); // TODO
-      sd.setBucketCols(new ArrayList<String>());
-      sd.setSortCols(new ArrayList<Order>());
-       */
-
       //Setting the table properties.
       table.getParameters().put(JDBC_DATABASE_TYPE, this.type);
       table.getParameters().put(JDBC_DRIVER, this.driverClassName);
@@ -220,19 +195,6 @@ public abstract class AbstractJDBCConnectorProvider 
extends AbstractDataConnecto
           table.getParameters().put(param, connectorParams.get(param));
         }
       }
-      // TODO: Need to include schema, catalog info in the parameters list.
-
-      //Setting the required table information
-      // Table table = new Table();
-      // table.setTableName(tableName);
-      // table.setTableType(TableType.EXTERNAL_TABLE.toString());
-      // table.setDbName(scoped_db);
-      // table.setSd(sd);
-      // set partition keys to empty
-      // table.setPartitionKeys(new
-          // ArrayList<FieldSchema>());
-
-      // table.setParameters(tblProps);
       return table;
     } catch (Exception e) {
       LOG.warn("Exception retrieving remote table " + scoped_db + "." + 
tableName + " via data connector "
@@ -335,4 +297,14 @@ public abstract class AbstractJDBCConnectorProvider 
extends AbstractDataConnecto
   @Override protected String getOutputClass() {
     return JDBC_INPUTFORMAT_CLASS;
   }
+  @Override protected String getTableLocation(String tableName) {
+    if (warehouse != null) {
+      try {
+        return warehouse.getDefaultTablePath(scoped_db, tableName, 
true).toString();
+      } catch (MetaException e) {
+        LOG.info("Error determining default table path, cause:" + 
e.getMessage());
+      }
+    }
+    return "some_dummy_path";
+  }
 }
diff --git 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
index cdeb6fa..f4f9b62 100644
--- 
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
+++ 
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/dataconnector/jdbc/MySQLConnectorProvider.java
@@ -61,63 +61,6 @@ public class MySQLConnectorProvider extends 
AbstractJDBCConnectorProvider {
       ResultSet rs = stmt.executeQuery(
           "SELECT table_name, column_name, is_nullable, data_type, 
character_maximum_length FROM INFORMATION_SCHEMA.Columns where table_schema='"
               + scoped_db + "' and table_name='" + tableName + "'");
-      /*
-      List<FieldSchema> cols = new ArrayList<>();
-      // TODO throw exception is RS is empty
-      while (rs.next()) {
-        FieldSchema fs = new FieldSchema();
-        fs.setName(rs.getString("COLUMN_NAME"));
-        fs.setType(getDataType(rs.getString("DATA_TYPE"), 
rs.getInt("CHARACTER_MAXIMUM_LENGTH")));
-        fs.setComment("inferred column type");
-        cols.add(fs);
-      }
-
-      //Setting the storage descriptor.
-      StorageDescriptor sd = new StorageDescriptor();
-      sd.setCols(cols);
-      // sd.se
-      SerDeInfo serdeInfo = new SerDeInfo();
-      serdeInfo.setName(tableName);
-      serdeInfo.setSerializationLib("org.apache.hive.storage.jdbc.JdbcSerDe");
-      Map<String, String> serdeParams = new HashMap<String, String>();
-      serdeParams.put("serialization.format", "1");
-      serdeInfo.setParameters(serdeParams);
-
-      // StorageHandler
-
-      // serdeInfo.setDeserializerClass();
-      sd.setSerdeInfo(serdeInfo);
-      // sd.getSerdeInfo().setName(tableName);
-      sd.setInputFormat("org.apache.hive.storage.jdbc.JdbcInputFormat"); // 
TODO
-      sd.setOutputFormat("org.apache.hive.storage.jdbc.JdbcOutputFormat"); // 
TODO
-      sd.setLocation("/tmp/some_dummy_path"); // TODO
-      sd.setBucketCols(new ArrayList<String>());
-      sd.setSortCols(new ArrayList<Order>());
-
-      //Setting the table properties.
-      Map<String, String> tblProps = new HashMap<>();
-      tblProps.put(Constants.JDBC_DATABASE_TYPE, this.type);
-      tblProps.put(Constants.JDBC_DRIVER, this.driverClassName);
-      tblProps.put(Constants.JDBC_URL, this.jdbcUrl); // 
"jdbc://localhost:3306/hive"
-      tblProps.put(Constants.JDBC_USERNAME, this.username);
-      tblProps.put(Constants.JDBC_PASSWORD, this.password);
-      tblProps.put(Constants.JDBC_TABLE, tableName);
-      tblProps.put(hive_metastoreConstants.META_TABLE_STORAGE, 
Constants.JDBC_HIVE_STORAGE_HANDLER_ID);
-      tblProps.put("EXTERNAL", "TRUE");
-      // TODO: Need to include schema, catalog info in the paramters list.
-
-      //Setting the required table information
-      Table table = new Table();
-      table.setTableName(tableName);
-      table.setTableType(TableType.EXTERNAL_TABLE.toString());
-      table.setDbName(scoped_db);
-      table.setSd(sd);
-      table.setParameters(tblProps);
-      // set partition keys to empty
-      table.setPartitionKeys(new ArrayList<FieldSchema>());
-
-      return table;
-       */
       return rs;
     } catch (Exception e) {
       LOG.warn("Exception retrieving remote table " + scoped_db + "." + 
tableName + " via data connector "

Reply via email to