Repository: hive
Updated Branches:
  refs/heads/llap a7b0ca733 -> 79c1c691e


HIVE-13318: Cache the result of getTable from metastore (Pengcheng Xiong, 
reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/255069e4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/255069e4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/255069e4

Branch: refs/heads/llap
Commit: 255069e4f1bb1ac874f5a3472ebed1abf26e8187
Parents: 4e9f95a
Author: Pengcheng Xiong <pxi...@apache.org>
Authored: Wed Mar 30 14:43:44 2016 -0700
Committer: Pengcheng Xiong <pxi...@apache.org>
Committed: Wed Mar 30 14:43:44 2016 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/parse/ParseContext.java      |  7 +++++
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  | 29 +++++++++++---------
 .../hadoop/hive/ql/parse/TaskCompiler.java      |  2 +-
 3 files changed, 24 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/255069e4/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
index 95c254c..1bccf20 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
@@ -87,6 +87,7 @@ public class ParseContext {
   // reducer
   private Map<String, PrunedPartitionList> prunedPartitions;
   private Map<String, ReadEntity> viewAliasToInput;
+  private Map<String, Table> tabNameToTabObject;
 
   /**
    * The lineage information.
@@ -162,6 +163,7 @@ public class ParseContext {
       Context ctx, HashMap<String, String> idToTableNameMap, int destTableId,
       UnionProcContext uCtx, List<AbstractMapJoinOperator<? extends 
MapJoinDesc>> listMapJoinOpsNoReducer,
       Map<String, PrunedPartitionList> prunedPartitions,
+      Map<String, Table> tabNameToTabObject,
       HashMap<TableScanOperator, SampleDesc> opToSamplePruner,
       GlobalLimitCtx globalLimitCtx,
       HashMap<String, SplitSample> nameToSplitSample,
@@ -185,6 +187,7 @@ public class ParseContext {
     this.uCtx = uCtx;
     this.listMapJoinOpsNoReducer = listMapJoinOpsNoReducer;
     this.prunedPartitions = prunedPartitions;
+    this.tabNameToTabObject = tabNameToTabObject;
     this.opToSamplePruner = opToSamplePruner;
     this.nameToSplitSample = nameToSplitSample;
     this.globalLimitCtx = globalLimitCtx;
@@ -577,4 +580,8 @@ public class ParseContext {
   public void setNeedViewColumnAuthorization(boolean 
needViewColumnAuthorization) {
     this.needViewColumnAuthorization = needViewColumnAuthorization;
   }
+
+  public Map<String, Table> getTabNameToTabObject() {
+    return tabNameToTabObject;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/255069e4/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index adee14b..e81d46e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -324,7 +324,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
   protected AnalyzeRewriteContext analyzeRewrite;
 
   // A mapping from a tableName to a table object in metastore.
-  Map<String, Table> tableNameToMetaDataTableObject;
+  Map<String, Table> tabNameToTabObject;
 
   // The tokens we should ignore when we are trying to do table masking.
   private final Set<Integer> ignoredTokens = 
Sets.newHashSet(HiveParser.TOK_GROUPBY,
@@ -359,6 +359,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     listMapJoinOpsNoReducer = new ArrayList<AbstractMapJoinOperator<? extends 
MapJoinDesc>>();
     groupOpToInputTables = new HashMap<GroupByOperator, Set<String>>();
     prunedPartitions = new HashMap<String, PrunedPartitionList>();
+    tabNameToTabObject = new HashMap<String, Table>();
     unparseTranslator = new UnparseTranslator(conf);
     autogenColAliasPrfxLbl = HiveConf.getVar(conf,
         HiveConf.ConfVars.HIVE_AUTOGEN_COLUMNALIAS_PREFIX_LABEL);
@@ -371,7 +372,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     viewAliasToInput = new HashMap<String, ReadEntity>();
     noscan = partialscan = false;
     tableMask = new TableMask(this, conf);
-    tableNameToMetaDataTableObject = new HashMap<>();
+    tabNameToTabObject = new HashMap<>();
   }
 
   @Override
@@ -380,6 +381,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     if(clearPartsCache) {
       prunedPartitions.clear();
     }
+    tabNameToTabObject.clear();
     loadTableWork.clear();
     loadFileWork.clear();
     topOps.clear();
@@ -429,6 +431,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     uCtx = pctx.getUCtx();
     listMapJoinOpsNoReducer = pctx.getListMapJoinOpsNoReducer();
     prunedPartitions = pctx.getPrunedPartitions();
+    tabNameToTabObject = pctx.getTabNameToTabObject();
     fetchTask = pctx.getFetchTask();
     setLineageInfo(pctx.getLineageInfo());
   }
@@ -440,7 +443,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
         new HashSet<JoinOperator>(joinContext.keySet()),
         new HashSet<SMBMapJoinOperator>(smbMapJoinContext.keySet()),
         loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
-        listMapJoinOpsNoReducer, prunedPartitions,
+        listMapJoinOpsNoReducer, prunedPartitions, tabNameToTabObject,
         opToSamplePruner, globalLimitCtx, nameToSplitSample, inputs, rootTasks,
         opToPartToSkewedPruner, viewAliasToInput, 
reduceSinkOperatorsAddedByEnforceBucketingSorting,
         analyzeRewrite, tableDesc, queryProperties, viewProjectToTableSchema);
@@ -1606,7 +1609,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
           }
           Table table = null;
           try {
-            table = db.getTable(tableName);
+            table = this.getTableObjectByName(tableName);
           } catch (HiveException ex) {
             throw new SemanticException(ex);
           }
@@ -10344,13 +10347,13 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
   }
 
-  private Table getMetaDataTableObjectByName(String tableName) throws 
HiveException {
-    if (!tableNameToMetaDataTableObject.containsKey(tableName)) {
+  private Table getTableObjectByName(String tableName) throws HiveException {
+    if (!tabNameToTabObject.containsKey(tableName)) {
       Table table = db.getTable(tableName);
-      tableNameToMetaDataTableObject.put(tableName, table);
+      tabNameToTabObject.put(tableName, table);
       return table;
     } else {
-      return tableNameToMetaDataTableObject.get(tableName);
+      return tabNameToTabObject.get(tableName);
     }
   }
 
@@ -10400,7 +10403,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         String replacementText = null;
         Table table = null;
         try {
-          table = getMetaDataTableObjectByName(tabIdName);
+          table = getTableObjectByName(tabIdName);
         } catch (HiveException e) {
           throw new SemanticException("Table " + tabIdName + " is not found.");
         }
@@ -10636,7 +10639,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         new HashSet<JoinOperator>(joinContext.keySet()),
         new HashSet<SMBMapJoinOperator>(smbMapJoinContext.keySet()),
         loadTableWork, loadFileWork, ctx, idToTableNameMap, destTableId, uCtx,
-        listMapJoinOpsNoReducer, prunedPartitions, opToSamplePruner,
+        listMapJoinOpsNoReducer, prunedPartitions, tabNameToTabObject, 
opToSamplePruner,
         globalLimitCtx, nameToSplitSample, inputs, rootTasks, 
opToPartToSkewedPruner,
         viewAliasToInput, reduceSinkOperatorsAddedByEnforceBucketingSorting,
         analyzeRewrite, tableDesc, queryProperties, viewProjectToTableSchema);
@@ -11671,7 +11674,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
       Set<String> tableAliases = qb.getTabAliases();
       for (String alias : tableAliases) {
         try {
-          Table table = db.getTable(qb.getTabNameForAlias(alias));
+          Table table = 
this.getTableObjectByName(qb.getTabNameForAlias(alias));
           if (table.isTemporary()) {
             throw new SemanticException("View definition references temporary 
table " + alias);
           }
@@ -11874,7 +11877,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     String tableName = getUnescapedName((ASTNode) 
tree.getChild(0).getChild(0));
     Table tbl;
     try {
-      tbl = db.getTable(tableName);
+      tbl = this.getTableObjectByName(tableName);
     } catch (InvalidTableException e) {
       throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e);
     }
@@ -11903,7 +11906,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     String tableName = getUnescapedName((ASTNode) 
tree.getChild(0).getChild(0));
     Table tbl;
     try {
-      tbl = db.getTable(tableName);
+      tbl = this.getTableObjectByName(tableName);
     } catch (InvalidTableException e) {
       throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName), e);
     } catch (HiveException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/255069e4/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
index 8e64a0b..f7d7a40 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/TaskCompiler.java
@@ -399,7 +399,7 @@ public abstract class TaskCompiler {
         pCtx.getLoadTableWork(), pCtx.getLoadFileWork(), pCtx.getContext(),
         pCtx.getIdToTableNameMap(), pCtx.getDestTableId(), pCtx.getUCtx(),
         pCtx.getListMapJoinOpsNoReducer(),
-        pCtx.getPrunedPartitions(), pCtx.getOpToSamplePruner(), 
pCtx.getGlobalLimitCtx(),
+        pCtx.getPrunedPartitions(), pCtx.getTabNameToTabObject(), 
pCtx.getOpToSamplePruner(), pCtx.getGlobalLimitCtx(),
         pCtx.getNameToSplitSample(), pCtx.getSemanticInputs(), rootTasks,
         pCtx.getOpToPartToSkewedPruner(), pCtx.getViewAliasToInput(),
         pCtx.getReduceSinkOperatorsAddedByEnforceBucketingSorting(),

Reply via email to