Github user ppadma commented on a diff in the pull request:

    https://github.com/apache/drill/pull/592#discussion_r83324591
  
    --- Diff: 
contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/schema/HiveDatabaseSchema.java
 ---
    @@ -78,32 +79,49 @@ public String getTypeName() {
       }
     
       @Override
    -  public List<Pair<String, ? extends Table>> 
getTablesByNamesByBulkLoad(final List<String> tableNames) {
    +  public List<Pair<String, ? extends Table>> 
getTablesByNamesByBulkLoad(final List<String> tableNames,
    +      final int bulkSize) {
    +    final int totalTables = tableNames.size();
         final String schemaName = getName();
    -    final List<Pair<String, ? extends Table>> tableNameToTable = 
Lists.newArrayList();
    -    List<org.apache.hadoop.hive.metastore.api.Table> tables;
    -    try {
    -      tables = 
DrillHiveMetaStoreClient.getTableObjectsByNameHelper(mClient, schemaName, 
tableNames);
    -    } catch (TException e) {
    -      logger.warn("Exception occurred while trying to list tables by names 
from {}: {}", schemaName, e.getCause());
    -      return tableNameToTable;
    +    final List<org.apache.hadoop.hive.metastore.api.Table> tables = 
Lists.newArrayList();
    +
    +    // In each round, Drill asks for a sub-list of all the requested tables
    +    for (int fromIndex = 0; fromIndex < totalTables; fromIndex += 
bulkSize) {
    +      final int toIndex = Math.min(fromIndex + bulkSize, totalTables);
    +      final List<String> eachBulkofTableNames = 
tableNames.subList(fromIndex, toIndex);
    +      List<org.apache.hadoop.hive.metastore.api.Table> eachBulkofTables;
    +      // Retries once if the first call to fetch the metadata fails
    +      synchronized (mClient) {
    +        try {
    +          eachBulkofTables = mClient.getTableObjectsByName(schemaName, 
eachBulkofTableNames);
    +        } catch (TException tException) {
    +          try {
    +            mClient.reconnect();
    +            eachBulkofTables = mClient.getTableObjectsByName(schemaName, 
eachBulkofTableNames);
    +          } catch (Exception e) {
    +            logger.warn("Exception occurred while trying to read tables 
from {}: {}", schemaName,
    +                e.getCause());
    +            return ImmutableList.of();
    +          }
    +        }
    +        tables.addAll(eachBulkofTables);
    +      }
         }
     
    -    for(final org.apache.hadoop.hive.metastore.api.Table table : tables) {
    -      if(table == null) {
    +    final List<Pair<String, ? extends Table>> tableNameToTable = 
Lists.newArrayList();
    +    for (final org.apache.hadoop.hive.metastore.api.Table table : tables) {
    +      if (table == null) {
    --- End diff --
    
    can this table be null ? 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

Reply via email to