Repository: hive
Updated Branches:
  refs/heads/master 0538e5102 -> 84cf8beab


HIVE-17578 : Create a TableRef object for Table/Partition (Gergely Hajos via 
Ashutosh Chauhan)

Signed-off-by: Ashutosh Chauhan <hashut...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/84cf8bea
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/84cf8bea
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/84cf8bea

Branch: refs/heads/master
Commit: 84cf8beab2279cd8b9e00b6101c7be6ec70f979b
Parents: 077a9e5
Author: Gergely Hajós <rogoz...@gmail.com>
Authored: Fri Oct 6 08:19:00 2017 -0700
Committer: Ashutosh Chauhan <hashut...@apache.org>
Committed: Thu Oct 19 18:11:14 2017 -0700

----------------------------------------------------------------------
 .../hadoop/hive/ql/TestAutoPurgeTables.java     | 15 ++++++-----
 .../hive/ql/parse/TestReplicationScenarios.java |  4 ++-
 .../TestHiveAuthorizerCheckInvocation.java      |  8 +++---
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 23 +++++++++-------
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java | 12 ++++-----
 .../hadoop/hive/ql/exec/StatsNoJobTask.java     |  4 +--
 .../apache/hadoop/hive/ql/exec/StatsTask.java   |  8 +++---
 .../apache/hadoop/hive/ql/hooks/ATSHook.java    |  2 +-
 .../hadoop/hive/ql/hooks/LineageInfo.java       |  9 ++++---
 .../hadoop/hive/ql/hooks/LineageLogger.java     |  7 ++---
 .../ql/hooks/UpdateInputAccessTimeHook.java     |  4 +--
 .../hive/ql/index/IndexMetadataChangeTask.java  |  2 +-
 .../ql/index/bitmap/BitmapIndexHandler.java     | 12 +++++----
 .../ql/index/compact/CompactIndexHandler.java   | 13 ++++-----
 .../apache/hadoop/hive/ql/metadata/Hive.java    | 28 ++++++++++++++------
 .../ql/metadata/SessionHiveMetaStoreClient.java | 11 ++++----
 .../apache/hadoop/hive/ql/metadata/Table.java   |  5 ++++
 .../hive/ql/optimizer/SharedWorkOptimizer.java  | 17 ++++++------
 .../ql/optimizer/lineage/OpProcFactory.java     |  3 ++-
 .../hive/ql/optimizer/ppr/PartitionPruner.java  |  2 +-
 .../hive/ql/parse/BaseSemanticAnalyzer.java     |  9 ++++---
 .../hadoop/hive/ql/parse/CalcitePlanner.java    |  2 +-
 .../ql/parse/ColumnStatsAutoGatherContext.java  |  2 +-
 .../ql/parse/ColumnStatsSemanticAnalyzer.java   |  4 +--
 .../ql/parse/ReplicationSemanticAnalyzer.java   |  3 ++-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  2 +-
 .../repl/load/message/RenameTableHandler.java   |  5 ++--
 .../apache/hadoop/hive/ql/plan/TableDesc.java   |  5 ++++
 .../hive/ql/txn/compactor/CompactorThread.java  |  3 ++-
 .../hadoop/hive/ql/txn/compactor/Worker.java    |  8 ++++--
 .../exec/TestMsckCreatePartitionsInBatches.java |  3 ++-
 .../hadoop/hive/ql/metadata/TestHive.java       |  3 ++-
 32 files changed, 145 insertions(+), 93 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java
index abf9769..62d0109 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAutoPurgeTables.java
@@ -29,6 +29,7 @@ import org.apache.hadoop.fs.LocatedFileStatus;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hive.jdbc.miniHS2.MiniHS2;
 import org.junit.AfterClass;
@@ -79,26 +80,28 @@ public class TestAutoPurgeTables {
     } else {
       createTablePrefix = "create table ";
     }
+    String qualifiedTableName = 
StatsUtils.getFullyQualifiedTableName(testDbName, testTableName);
     if (isPartitioned) {
       // create a partitioned table
-      stmt.execute(createTablePrefix + testDbName + "." + testTableName + " 
(id int, value string) "
+      stmt.execute(
+          createTablePrefix + qualifiedTableName + " (id int, value string) "
           + " partitioned by (" + partitionedColumnName + " STRING)");
       // load data
-      stmt.execute("insert into " + testDbName + "." + testTableName + " 
PARTITION ("
+      stmt.execute("insert into " + qualifiedTableName + " PARTITION ("
           + partitionedColumnName + "=" + partitionedColumnValue1
           + ") values (1, \"dummy1\"), (2, \"dummy2\"), (3, \"dummy3\")");
-      stmt.execute("insert into " + testDbName + "." + testTableName + " 
PARTITION ("
+      stmt.execute("insert into " + qualifiedTableName + " PARTITION ("
           + partitionedColumnName + "=" + partitionedColumnValue2
           + ") values (4, \"dummy4\"), (5, \"dummy5\"), (6, \"dummy6\")");
     } else {
       // create a table
-      stmt.execute(createTablePrefix + testDbName + "." + testTableName + " 
(id int, value string)");
+      stmt.execute(createTablePrefix + qualifiedTableName + " (id int, value 
string)");
       // load data
-      stmt.execute("insert into " + testDbName + "." + testTableName
+      stmt.execute("insert into " + qualifiedTableName
           + " values (1, \"dummy1\"), (2, \"dummy2\"), (3, \"dummy3\")");
     }
     if (isAutopurge != null) {
-      stmt.execute("alter table " + testDbName + "." + testTableName
+      stmt.execute("alter table " + qualifiedTableName
           + " set tblproperties (\"auto.purge\"=\"" + isAutopurge + "\")");
     }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index cec8490..c1aac83 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -53,6 +53,7 @@ import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.hive.shims.Utils;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hive.hcatalog.api.repl.ReplicationV1CompatRule;
@@ -3157,7 +3158,8 @@ public class TestReplicationScenarios {
        String testName = "deleteStagingDir";
        String dbName = createDB(testName, driver);
        String tableName = "unptned";
-    run("CREATE TABLE " + dbName + "." + tableName + "(a string) STORED AS 
TEXTFILE", driver);
+    run("CREATE TABLE " + StatsUtils.getFullyQualifiedTableName(dbName, 
tableName) + "(a string) STORED AS TEXTFILE",
+        driver);
 
     String[] unptn_data = new String[] {"one", "two"};
     String unptn_locn = new Path(TEST_PATH , testName + 
"_unptn").toUri().getPath();

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
index 456e80d..19694b0 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerCheckInvocation.java
@@ -45,6 +45,7 @@ import 
org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
 import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -70,6 +71,7 @@ public class TestHiveAuthorizerCheckInvocation {
   private static final String acidTableName = tableName + "_acid";
   private static final String dbName = 
TestHiveAuthorizerCheckInvocation.class.getSimpleName()
       + "Db";
+  private static final String fullInTableName = 
StatsUtils.getFullyQualifiedTableName(dbName, inDbTableName);
   static HiveAuthorizer mockedAuthorizer;
 
   /**
@@ -105,7 +107,7 @@ public class TestHiveAuthorizerCheckInvocation {
         + " (i int, j int, k string) partitioned by (city string, `date` 
string) ");
     runCmd("create view " + viewName + " as select * from " + tableName);
     runCmd("create database " + dbName);
-    runCmd("create table " + dbName + "." + inDbTableName + "(i int)");
+    runCmd("create table " + fullInTableName + "(i int)");
     // Need a separate table for ACID testing since it has to be bucketed and 
it has to be Acid
     runCmd("create table " + acidTableName + " (i int, j int, k int) clustered 
by (k) into 2 buckets " +
         "stored as orc TBLPROPERTIES ('transactional'='true')");
@@ -122,7 +124,7 @@ public class TestHiveAuthorizerCheckInvocation {
     runCmd("drop table if exists " + acidTableName);
     runCmd("drop table if exists " + tableName);
     runCmd("drop table if exists " + viewName);
-    runCmd("drop table if exists " + dbName + "." + inDbTableName);
+    runCmd("drop table if exists " + fullInTableName);
     runCmd("drop database if exists " + dbName );
     driver.close();
   }
@@ -420,7 +422,7 @@ public class TestHiveAuthorizerCheckInvocation {
     assertEquals("db name", dbName.toLowerCase(), dbObj.getDbname());
 
     resetAuthorizer();
-    status = driver.compile("repl dump " + dbName + "." + inDbTableName);
+    status = driver.compile("repl dump " + fullInTableName);
     assertEquals(0, status);
     inputs = getHivePrivilegeObjectInputs().getLeft();
     dbObj = inputs.get(0);

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 1108934..fb7363a 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -28,6 +28,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.TableType;
 import org.apache.hadoop.hive.ql.exec.UDF;
 import org.apache.hadoop.hive.ql.processors.DfsProcessor;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hive.common.util.HiveVersionInfo;
 import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
 import org.apache.hive.service.cli.HiveSQLException;
@@ -1089,9 +1090,10 @@ public class TestJdbcDriver2 {
     assertNotNull("Statement is null", stmt);
 
     String tableNameInDbUnique = tableName + "_unique";
+    String fullTestTableName = 
StatsUtils.getFullyQualifiedTableName(testDbName, tableNameInDbUnique);
     // create a table with a unique name in testDb
-    stmt.execute("drop table if exists " + testDbName + "." + 
tableNameInDbUnique);
-    stmt.execute("create table " + testDbName + "." + tableNameInDbUnique
+    stmt.execute("drop table if exists " + fullTestTableName);
+    stmt.execute("create table " + fullTestTableName
         + " (under_col int comment 'the under column', value string) comment 
'" + tableComment
         + "'");
 
@@ -1106,7 +1108,7 @@ public class TestJdbcDriver2 {
     }
     assertTrue("table name " + tableNameInDbUnique
         + " not found in SHOW TABLES result set", testTableExists);
-    stmt.execute("drop table if exists " + testDbName + "." + 
tableNameInDbUnique);
+    stmt.execute("drop table if exists " + fullTestTableName);
     stmt.close();
   }
 
@@ -1226,11 +1228,11 @@ public class TestJdbcDriver2 {
     Set<String> viewOrTableArray = new HashSet<String>();
     viewOrTableArray.addAll(tableTypeNames);
     viewOrTableArray.add(viewTypeName);
-    String testTblWithDb = testDbName + "." + tableName;
-    String testPartTblWithDb = testDbName + "." + partitionedTableName;
-    String testDataTypeTblWithDb = testDbName + "." + dataTypeTableName;
-    String testViewWithDb = testDbName + "." + viewName;
-    String testExtTblWithDb = testDbName + "." + externalTableName;
+    String testTblWithDb = StatsUtils.getFullyQualifiedTableName(testDbName, 
tableName);
+    String testPartTblWithDb = 
StatsUtils.getFullyQualifiedTableName(testDbName, partitionedTableName);
+    String testDataTypeTblWithDb = 
StatsUtils.getFullyQualifiedTableName(testDbName, dataTypeTableName);
+    String testViewWithDb = StatsUtils.getFullyQualifiedTableName(testDbName, 
viewName);
+    String testExtTblWithDb = 
StatsUtils.getFullyQualifiedTableName(testDbName, externalTableName);
 
     Map<Object[], String[]> tests = new IdentityHashMap<Object[], String[]>();
     tests.put(new Object[] { null, "testjdbc%", ALL }, new String[] { 
testTblWithDb,
@@ -1273,8 +1275,9 @@ public class TestJdbcDriver2 {
       while (rs.next()) {
         String resultDbName = rs.getString("TABLE_SCHEM");
         String resultTableName = rs.getString("TABLE_NAME");
-        assertTrue("Invalid table " + resultDbName + "." + resultTableName + " 
for test "
-            + debugString, expectedTables.contains(resultDbName + "." + 
resultTableName));
+        String fullTableName = 
StatsUtils.getFullyQualifiedTableName(resultDbName, resultTableName);
+        assertTrue("Invalid table " + fullTableName + " for test " + 
debugString,
+            expectedTables.contains(fullTableName));
 
         String resultTableComment = rs.getString("REMARKS");
         assertTrue("Missing comment on the table.", 
resultTableComment.length() > 0);

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 3b2454d..88a2cdd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -1215,7 +1215,6 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
       throws HiveException {
 
     Table tbl = db.getTable(alterPartitionDesc.getTableName(), true);
-    String tabName = alterPartitionDesc.getTableName();
 
     // This is checked by DDLSemanticAnalyzer
     assert(tbl.isPartitioned());
@@ -1282,9 +1281,9 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     tbl.getTTable().setPartitionKeys(newPartitionKeys);
 
     try {
-      db.alterTable(tabName, tbl, null);
+      db.alterTable(tbl, null);
     } catch (InvalidOperationException e) {
-      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to alter " + 
tabName);
+      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, "Unable to alter " + 
tbl.getFullyQualifiedName());
     }
 
     work.getInputs().add(new ReadEntity(tbl));
@@ -1312,7 +1311,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
 
     if (touchDesc.getPartSpec() == null) {
       try {
-        db.alterTable(touchDesc.getTableName(), tbl, environmentContext);
+        db.alterTable(tbl, environmentContext);
       } catch (InvalidOperationException e) {
         throw new HiveException("Uable to update table");
       }
@@ -4610,8 +4609,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     List<SQLForeignKey> foreignKeys = crtTbl.getForeignKeys();
     List<SQLUniqueConstraint> uniqueConstraints = 
crtTbl.getUniqueConstraints();
     List<SQLNotNullConstraint> notNullConstraints = 
crtTbl.getNotNullConstraints();
-    LOG.info("creating table {}.{} on {}", tbl.getDbName(), tbl.getTableName(),
-      tbl.getDataLocation());
+    LOG.debug("creating table {} on 
{}",tbl.getFullyQualifiedName(),tbl.getDataLocation());
 
     if (crtTbl.getReplicationSpec().isInReplicationScope() && 
(!crtTbl.getReplaceMode())){
       // if this is a replication spec, then replace-mode semantics might 
apply.
@@ -4635,7 +4633,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     if (crtTbl.getReplaceMode()) {
       // replace-mode creates are really alters using CreateTableDesc.
       try {
-        db.alterTable(tbl.getDbName()+"."+tbl.getTableName(),tbl,null);
+        db.alterTable(tbl, null);
       } catch (InvalidOperationException e) {
         throw new HiveException("Unable to alter table. " + e.getMessage(), e);
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
index e5d4978..c333c49 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsNoJobTask.java
@@ -103,7 +103,7 @@ public class StatsNoJobTask extends Task<StatsNoJobWork> 
implements Serializable
       tableName = work.getTableSpecs().tableName;
       table = db.getTable(tableName);
       int numThreads = HiveConf.getIntVar(conf, 
ConfVars.HIVE_STATS_GATHER_NUM_THREADS);
-      tableFullName = table.getDbName() + "." + table.getTableName();
+      tableFullName = table.getFullyQualifiedName();
       threadPool = Executors.newFixedThreadPool(numThreads,
           new 
ThreadFactoryBuilder().setDaemon(true).setNameFormat("StatsNoJobTask-Thread-%d")
               .build());
@@ -283,7 +283,7 @@ public class StatsNoJobTask extends Task<StatsNoJobWork> 
implements Serializable
             EnvironmentContext environmentContext = new EnvironmentContext();
             
environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, 
StatsSetupConst.TASK);
 
-            db.alterTable(tableFullName, new Table(tTable), 
environmentContext);
+            db.alterTable(table, environmentContext);
 
             String msg = "Table " + tableFullName + " stats: [" + 
toString(parameters) + ']';
             if (Utilities.FILE_OP_LOGGER.isTraceEnabled()) {

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
index ff46d3a..682b42c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
@@ -60,6 +60,7 @@ import org.apache.hadoop.hive.ql.stats.StatsAggregator;
 import org.apache.hadoop.hive.ql.stats.StatsCollectionContext;
 import org.apache.hadoop.hive.ql.stats.StatsFactory;
 import org.apache.hadoop.hive.ql.stats.StatsPublisher;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.util.StringUtils;
 
 import com.google.common.collect.Lists;
@@ -170,7 +171,7 @@ public class StatsTask extends Task<StatsWork> implements 
Serializable {
       List<Partition> partitions = getPartitionsList(db);
       boolean atomic = HiveConf.getBoolVar(conf, 
HiveConf.ConfVars.HIVE_STATS_ATOMIC);
 
-      String tableFullName = table.getDbName() + "." + table.getTableName();
+      String tableFullName = table.getFullyQualifiedName();
 
       if (partitions == null) {
         org.apache.hadoop.hive.metastore.api.Table tTable = table.getTTable();
@@ -215,7 +216,7 @@ public class StatsTask extends Task<StatsWork> implements 
Serializable {
           }
         }
 
-        getHive().alterTable(tableFullName, new Table(tTable), 
environmentContext);
+        getHive().alterTable(table, environmentContext);
         if (conf.getBoolVar(ConfVars.TEZ_EXEC_SUMMARY)) {
           console.printInfo("Table " + tableFullName + " stats: [" + 
toString(parameters) + ']');
         }
@@ -356,7 +357,8 @@ public class StatsTask extends Task<StatsWork> implements 
Serializable {
       throws MetaException {
 
     // prefix is of the form dbName.tblName
-    String prefix = table.getDbName() + "." + 
org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.encodeTableName(table.getTableName());
+    String prefix = StatsUtils.getFullyQualifiedTableName(table.getDbName(),
+        
org.apache.hadoop.hive.metastore.utils.MetaStoreUtils.encodeTableName(table.getTableName()));
     if (partition != null) {
       return Utilities.join(prefix, 
Warehouse.makePartPath(partition.getSpec()));
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
index 84f992a..de036a3 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/ATSHook.java
@@ -314,7 +314,7 @@ public class ATSHook implements ExecuteWithHookContext {
     List<String> tableNames = new ArrayList<String>();
     for (Entity entity : entities) {
       if (entity.getType() == Entity.Type.TABLE) {
-        tableNames.add(entity.getTable().getDbName() + "." + 
entity.getTable().getTableName());
+        tableNames.add(entity.getTable().getFullyQualifiedName());
       }
     }
     return tableNames;

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java
index 05b7d71..2a42325 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageInfo.java
@@ -28,9 +28,11 @@ import java.util.Set;
 import org.apache.commons.collections.SetUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.common.StringInternUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 
 /**
  * This class contains the lineage information that is passed
@@ -120,8 +122,9 @@ public class LineageInfo implements Serializable {
     @Override
     public String toString() {
       return isPartition() ?
-          part.getDbName() + "." + part.getTableName() + "@" + 
part.getValues() :
-          tab.getDbName() + "." + tab.getTableName();
+        StatsUtils.getFullyQualifiedTableName(part.getDbName(), 
part.getTableName()) + "@"
+            + part.getValues()
+        : Warehouse.getQualifiedName(tab);
     }
   }
 
@@ -331,7 +334,7 @@ public class LineageInfo implements Serializable {
 
     @Override
     public String toString() {
-      return table.getDbName() + "." + table.getTableName() + "(" + alias + 
")";
+      return Warehouse.getQualifiedName(table) + "(" + alias + ")";
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
index c1f6883..bdd98cf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/LineageLogger.java
@@ -27,6 +27,7 @@ import org.apache.commons.io.output.StringBuilderWriter;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.QueryPlan;
@@ -226,7 +227,7 @@ public class LineageLogger implements 
ExecuteWithHookContext {
       String destTableName = null;
       List<String> colNames = null;
       if (t != null) {
-        destTableName = t.getDbName() + "." + t.getTableName();
+        destTableName = t.getFullyQualifiedName();
         fieldSchemas = t.getCols();
       } else {
         // Based on the plan outputs, find out the target table name and 
column names.
@@ -235,7 +236,7 @@ public class LineageLogger implements 
ExecuteWithHookContext {
           if (entityType == Entity.Type.TABLE
               || entityType == Entity.Type.PARTITION) {
             t = output.getTable();
-            destTableName = t.getDbName() + "." + t.getTableName();
+            destTableName = t.getFullyQualifiedName();
             List<FieldSchema> cols = t.getCols();
             if (cols != null && !cols.isEmpty()) {
               colNames = Utilities.getColumnNamesFromFieldSchema(cols);
@@ -329,7 +330,7 @@ public class LineageLogger implements 
ExecuteWithHookContext {
           continue;
         }
         Vertex.Type type = Vertex.Type.TABLE;
-        String tableName = table.getDbName() + "." + table.getTableName();
+        String tableName = Warehouse.getQualifiedName(table);
         FieldSchema fieldSchema = col.getColumn();
         String label = tableName;
         if (fieldSchema != null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java 
b/ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java
index 8a9a551..dd1c1e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/hooks/UpdateInputAccessTimeHook.java
@@ -60,7 +60,7 @@ public class UpdateInputAccessTimeHook {
         case TABLE: {
           Table t = db.getTable(re.getTable().getTableName());
           t.setLastAccessTime(lastAccessTime);
-          db.alterTable(t.getDbName() + "." + t.getTableName(), t, null);
+          db.alterTable(t, null);
           break;
         }
         case PARTITION: {
@@ -70,7 +70,7 @@ public class UpdateInputAccessTimeHook {
           p.setLastAccessTime(lastAccessTime);
           db.alterPartition(t.getTableName(), p, null);
           t.setLastAccessTime(lastAccessTime);
-          db.alterTable(t.getDbName() + "." + t.getTableName(), t, null);
+          db.alterTable(t, null);
           break;
         }
         default:

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java
index 02f7f55..66b14e6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/IndexMetadataChangeTask.java
@@ -77,7 +77,7 @@ public class IndexMetadataChangeTask extends 
Task<IndexMetadataChangeWork>{
         FileSystem fs = url.getFileSystem(conf);
         FileStatus fstat = fs.getFileStatus(url);
         tbl.getParameters().put(HiveIndex.INDEX_TABLE_CREATETIME, 
Long.toString(fstat.getModificationTime()));
-        db.alterTable(tbl.getDbName() + "." + tbl.getTableName(), tbl, null);
+        db.alterTable(tbl, null);
       }
     } catch (Exception e) {
       e.printStackTrace();

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java 
b/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
index 5ddbd0b..7b067a0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/index/bitmap/BitmapIndexHandler.java
@@ -28,7 +28,6 @@ import java.util.Set;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Index;
@@ -51,6 +50,7 @@ import org.apache.hadoop.hive.ql.optimizer.IndexUtils;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
@@ -233,8 +233,9 @@ public class BitmapIndexHandler extends 
TableBasedIndexHandler {
     StringBuilder command= new StringBuilder();
     LinkedHashMap<String, String> partSpec = indexTblPartDesc.getPartSpec();
 
-    command.append("INSERT OVERWRITE TABLE " +
-        HiveUtils.unparseIdentifier(dbName) + "." + 
HiveUtils.unparseIdentifier(indexTableName ));
+    String fullIndexTableName = 
StatsUtils.getFullyQualifiedTableName(HiveUtils.unparseIdentifier(dbName),
+        HiveUtils.unparseIdentifier(indexTableName));
+    command.append("INSERT OVERWRITE TABLE " + fullIndexTableName);
     if (partitioned && indexTblPartDesc != null) {
       command.append(" PARTITION ( ");
       List<String> ret = getPartKVPairStringArray(partSpec);
@@ -248,6 +249,8 @@ public class BitmapIndexHandler extends 
TableBasedIndexHandler {
       command.append(" ) ");
     }
 
+    String fullBaseTableName = 
StatsUtils.getFullyQualifiedTableName(HiveUtils.unparseIdentifier(dbName),
+        HiveUtils.unparseIdentifier(baseTableName));
     command.append(" SELECT ");
     command.append(indexCols);
     command.append(",");
@@ -258,8 +261,7 @@ public class BitmapIndexHandler extends 
TableBasedIndexHandler {
     command.append("EWAH_BITMAP(");
     command.append(VirtualColumn.ROWOFFSET.getName());
     command.append(")");
-    command.append(" FROM " +
-        HiveUtils.unparseIdentifier(dbName) + "." + 
HiveUtils.unparseIdentifier(baseTableName));
+    command.append(" FROM " + fullBaseTableName);
     LinkedHashMap<String, String> basePartSpec = 
baseTablePartDesc.getPartSpec();
     if(basePartSpec != null) {
       command.append(" WHERE ");

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java 
b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
index 1d9e131..504b062 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/index/compact/CompactIndexHandler.java
@@ -27,7 +27,6 @@ import java.util.Set;
 
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
@@ -60,6 +59,7 @@ import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqual;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrGreaterThan;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPEqualOrLessThan;
@@ -68,7 +68,6 @@ import 
org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan;
 
 public class CompactIndexHandler extends TableBasedIndexHandler {
 
-  private Configuration configuration;
   // The names of the partition columns
   private Set<String> partitionCols;
   // Whether or not the conditions have been met to use the fact the index is 
sorted
@@ -103,8 +102,9 @@ public class CompactIndexHandler extends 
TableBasedIndexHandler {
     StringBuilder command= new StringBuilder();
     LinkedHashMap<String, String> partSpec = indexTblPartDesc.getPartSpec();
 
-    command.append("INSERT OVERWRITE TABLE " +
-        HiveUtils.unparseIdentifier(dbName) + "." + 
HiveUtils.unparseIdentifier(indexTableName ));
+    String fullIndexTableName = 
StatsUtils.getFullyQualifiedTableName(HiveUtils.unparseIdentifier(dbName),
+        HiveUtils.unparseIdentifier(indexTableName));
+    command.append("INSERT OVERWRITE TABLE " + fullIndexTableName);
     if (partitioned && indexTblPartDesc != null) {
       command.append(" PARTITION ( ");
       List<String> ret = getPartKVPairStringArray(partSpec);
@@ -118,6 +118,8 @@ public class CompactIndexHandler extends 
TableBasedIndexHandler {
       command.append(" ) ");
     }
 
+    String fullBaseTableName = 
StatsUtils.getFullyQualifiedTableName(HiveUtils.unparseIdentifier(dbName),
+        HiveUtils.unparseIdentifier(baseTableName));
     command.append(" SELECT ");
     command.append(indexCols);
     command.append(",");
@@ -127,8 +129,7 @@ public class CompactIndexHandler extends 
TableBasedIndexHandler {
     command.append(" collect_set (");
     command.append(VirtualColumn.BLOCKOFFSET.getName());
     command.append(") ");
-    command.append(" FROM " +
-        HiveUtils.unparseIdentifier(dbName) + "." + 
HiveUtils.unparseIdentifier(baseTableName));
+    command.append(" FROM " + fullBaseTableName);
     LinkedHashMap<String, String> basePartSpec = 
baseTablePartDesc.getPartSpec();
     if(basePartSpec != null) {
       command.append(" WHERE ");

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 793ce9e..4980381 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -607,10 +607,15 @@ public class Hive {
     createTable(tbl);
   }
 
+  public void alterTable(Table newTbl, EnvironmentContext environmentContext)
+      throws InvalidOperationException, HiveException {
+    alterTable(newTbl.getDbName(), newTbl.getTableName(), newTbl, false, 
environmentContext);
+  }
+
   /**
    * Updates the existing table metadata with the new metadata.
    *
-   * @param tblName
+   * @param fullyQlfdTblName
    *          name of the existing table
    * @param newTbl
    *          new name of the table. could be the old name
@@ -618,14 +623,21 @@ public class Hive {
    *           if the changes in metadata is not acceptable
    * @throws TException
    */
-  public void alterTable(String tblName, Table newTbl, EnvironmentContext 
environmentContext)
+  public void alterTable(String fullyQlfdTblName, Table newTbl, 
EnvironmentContext environmentContext)
       throws InvalidOperationException, HiveException {
-    alterTable(tblName, newTbl, false, environmentContext);
+    alterTable(fullyQlfdTblName, newTbl, false, environmentContext);
   }
 
-  public void alterTable(String tblName, Table newTbl, boolean cascade, 
EnvironmentContext environmentContext)
+  public void alterTable(String fullyQlfdTblName, Table newTbl, boolean 
cascade, EnvironmentContext environmentContext)
       throws InvalidOperationException, HiveException {
-    String[] names = Utilities.getDbTableName(tblName);
+    String[] names = Utilities.getDbTableName(fullyQlfdTblName);
+    alterTable(names[0], names[1], newTbl, cascade, environmentContext);
+  }
+
+  public void alterTable(String dbName, String tblName, Table newTbl, boolean 
cascade,
+      EnvironmentContext environmentContext)
+      throws InvalidOperationException, HiveException {
+
     try {
       // Remove the DDL_TIME so it gets refreshed
       if (newTbl.getParameters() != null) {
@@ -638,7 +650,7 @@ public class Hive {
       if (cascade) {
         environmentContext.putToProperties(StatsSetupConst.CASCADE, 
StatsSetupConst.TRUE);
       }
-      getMSC().alter_table_with_environmentContext(names[0], names[1], 
newTbl.getTTable(), environmentContext);
+      getMSC().alter_table_with_environmentContext(dbName, tblName, 
newTbl.getTTable(), environmentContext);
     } catch (MetaException e) {
       throw new HiveException("Unable to alter table. " + e.getMessage(), e);
     } catch (TException e) {
@@ -2306,7 +2318,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
       environmentContext.putToProperties(StatsSetupConst.DO_NOT_UPDATE_STATS, 
StatsSetupConst.TRUE);
     }
     try {
-      alterTable(tableName, tbl, environmentContext);
+      alterTable(tbl, environmentContext);
     } catch (InvalidOperationException e) {
       throw new HiveException(e);
     }
@@ -2558,7 +2570,7 @@ private void constructOneLBLocationMap(FileStatus fSta,
     alterPartitionSpecInMemory(tbl, partSpec, tpart, inheritTableSpecs, 
partPath);
     String fullName = tbl.getTableName();
     if (!org.apache.commons.lang.StringUtils.isEmpty(tbl.getDbName())) {
-      fullName = tbl.getDbName() + "." + tbl.getTableName();
+      fullName = tbl.getFullyQualifiedName();
     }
     alterPartition(fullName, new Partition(tbl, tpart), null);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
index 61f6a7c..78e83af 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/SessionHiveMetaStoreClient.java
@@ -214,7 +214,6 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     for (String element : tablePatterns.split("\\|")) {
       tblPatternList.add(Pattern.compile(element.replaceAll("\\*", 
".*")).matcher(""));
     }
-    StringBuilder builder = new StringBuilder();
     for (Map.Entry<String, Map<String, Table>> outer : tmpTables.entrySet()) {
       if (!matchesAny(outer.getKey(), dbPatternList)) {
         continue;
@@ -403,7 +402,7 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     SessionState ss = SessionState.get();
     if (ss == null) {
       throw new MetaException("No current SessionState, cannot create 
temporary table"
-          + tbl.getDbName() + "." + tbl.getTableName());
+          + Warehouse.getQualifiedName(tbl));
     }
 
     // We may not own the table object, create a copy
@@ -413,7 +412,8 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     String tblName = tbl.getTableName();
     Map<String, Table> tables = getTempTablesForDatabase(dbName);
     if (tables != null && tables.containsKey(tblName)) {
-      throw new MetaException("Temporary table " + dbName + "." + tblName + " 
already exists");
+      throw new MetaException(
+          "Temporary table " + StatsUtils.getFullyQualifiedTableName(dbName, 
tblName) + " already exists");
     }
 
     // Create temp table directory
@@ -626,7 +626,8 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     // Remove table entry from SessionState
     Map<String, Table> tables = getTempTablesForDatabase(dbName);
     if (tables == null || tables.remove(tableName) == null) {
-      throw new MetaException("Could not find temp table entry for " + dbName 
+ "." + tableName);
+      throw new MetaException(
+          "Could not find temp table entry for " + 
StatsUtils.getFullyQualifiedTableName(dbName, tableName));
     }
 
     // Delete table data
@@ -699,7 +700,7 @@ public class SessionHiveMetaStoreClient extends 
HiveMetaStoreClient implements I
     SessionState ss = SessionState.get();
     if (ss == null) {
       throw new MetaException("No current SessionState, cannot update 
temporary table stats for "
-          + dbName + "." + tableName);
+          + StatsUtils.getFullyQualifiedTableName(dbName, tableName));
     }
     Map<String, ColumnStatisticsObj> ssTableColStats =
         getTempTableColumnStatsForTable(dbName, tableName);

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
index 58181c3..a1cad9e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
@@ -41,6 +41,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Order;
@@ -909,6 +910,10 @@ public class Table implements Serializable {
       != null;
   }
 
+  public String getFullyQualifiedName() {
+    return Warehouse.getQualifiedName(tTable);
+  }
+
   /**
    * @return include the db name
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedWorkOptimizer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedWorkOptimizer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedWorkOptimizer.java
index 37fdb00..d4ddb75 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedWorkOptimizer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/SharedWorkOptimizer.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.hive.ql.exec.RowSchema;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.exec.UnionOperator;
+import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.parse.GenTezUtils;
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.PrunedPartitionList;
@@ -779,14 +780,14 @@ public class SharedWorkOptimizer extends Transform {
       TableScanDesc op1Conf = tsOp1.getConf();
       TableScanDesc op2Conf = tsOp2.getConf();
 
-      if (StringUtils.equals(
-              op1Conf.getTableMetadata().getDbName() + "." + 
op1Conf.getTableMetadata().getTableName(),
-              op2Conf.getTableMetadata().getDbName() + "." + 
op2Conf.getTableMetadata().getTableName()) &&
-        op1Conf.getNeededColumns().equals(op2Conf.getNeededColumns()) &&
-        StringUtils.equals(op1Conf.getFilterExprString(), 
op2Conf.getFilterExprString()) &&
-        pctx.getPrunedPartitions(tsOp1).getPartitions().equals(
-              pctx.getPrunedPartitions(tsOp2).getPartitions()) &&
-        op1Conf.getRowLimit() == op2Conf.getRowLimit()) {
+      Table tableMeta1 = op1Conf.getTableMetadata();
+      Table tableMeta2 = op2Conf.getTableMetadata();
+      if (StringUtils.equals(tableMeta1.getFullyQualifiedName(), 
tableMeta2.getFullyQualifiedName())
+          && op1Conf.getNeededColumns().equals(op2Conf.getNeededColumns())
+          && StringUtils.equals(op1Conf.getFilterExprString(), 
op2Conf.getFilterExprString())
+          && pctx.getPrunedPartitions(tsOp1).getPartitions().equals(
+              pctx.getPrunedPartitions(tsOp2).getPartitions())
+          && op1Conf.getRowLimit() == op2Conf.getRowLimit()) {
         return true;
       } else {
         return false;

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
index 73f88e0..6ca08e5 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/lineage/OpProcFactory.java
@@ -27,6 +27,7 @@ import java.util.Map;
 import java.util.Set;
 import java.util.Stack;
 
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
@@ -455,7 +456,7 @@ public class OpProcFactory {
               BaseColumnInfo col = expr_dep.getBaseCols().iterator().next();
               Table t = col.getTabAlias().getTable();
               if (t != null) {
-                
sb.append(t.getDbName()).append(".").append(t.getTableName()).append(".");
+                sb.append(Warehouse.getQualifiedName(t)).append(".");
               }
               sb.append(col.getColumn().getName());
             }

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
index 8704b0d..ac37cc4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
@@ -177,7 +177,7 @@ public class PartitionPruner extends Transform {
       LOG.trace("prune Expression = " + (prunerExpr == null ? "" : 
prunerExpr));
     }
 
-    String key = tab.getDbName() + "." + tab.getTableName() + ";";
+    String key = tab.getFullyQualifiedName() + ";";
 
     if (!tab.isPartitioned()) {
       // If the table is not partitioned, return empty list.

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
index 3efeecd..840e4ce 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
@@ -81,6 +81,7 @@ import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
@@ -367,10 +368,10 @@ public abstract class BaseSemanticAnalyzer {
       String dbName = dbTablePair.getKey();
       String tableName = dbTablePair.getValue();
       if (dbName != null){
-        return dbName + "." + tableName;
+        return StatsUtils.getFullyQualifiedTableName(dbName, tableName);
       }
       if (currentDatabase != null) {
-        return currentDatabase + "." + tableName;
+        return StatsUtils.getFullyQualifiedTableName(currentDatabase, 
tableName);
       }
       return tableName;
     } else if (tokenType == HiveParser.StringLiteral) {
@@ -1118,7 +1119,7 @@ public abstract class BaseSemanticAnalyzer {
 
     public TableSpec(Table table) {
       tableHandle = table;
-      tableName = table.getDbName() + "." + table.getTableName();
+      tableName = table.getFullyQualifiedName();
       specType = SpecType.TABLE_ONLY;
     }
 
@@ -1127,7 +1128,7 @@ public abstract class BaseSemanticAnalyzer {
       Table table = db.getTable(tableName);
       final Partition partition = partSpec == null ? null : 
db.getPartition(table, partSpec, false);
       tableHandle = table;
-      this.tableName = table.getDbName() + "." + table.getTableName();
+      this.tableName = table.getFullyQualifiedName();
       if (partition == null) {
         specType = SpecType.TABLE_ONLY;
       } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index ae27d60..46493ac 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -881,7 +881,7 @@ public class CalcitePlanner extends SemanticAnalyzer {
 
     ctx.addMaterializedTable(cteName, table);
     // For CalcitePlanner, store qualified name too
-    ctx.addMaterializedTable(table.getDbName() + "." + table.getTableName(), 
table);
+    ctx.addMaterializedTable(table.getFullyQualifiedName(), table);
 
     return table;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
index d72ff5cd..7dd31fb 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsAutoGatherContext.java
@@ -247,7 +247,7 @@ public class ColumnStatsAutoGatherContext {
   }
 
   public String getCompleteName() {
-    return tbl.getDbName() + "." + tbl.getTableName();
+    return tbl.getFullyQualifiedName();
   }
 
   public boolean isInsertInto() {

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
index 5733909..3eb6a21 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ColumnStatsSemanticAnalyzer.java
@@ -398,7 +398,7 @@ public class ColumnStatsSemanticAnalyzer extends 
SemanticAnalyzer {
       qb.setAnalyzeRewrite(true);
       qbp = qb.getParseInfo();
       analyzeRewrite = new AnalyzeRewriteContext();
-      analyzeRewrite.setTableName(tbl.getDbName() + "." + tbl.getTableName());
+      analyzeRewrite.setTableName(tbl.getFullyQualifiedName());
       analyzeRewrite.setTblLvl(isTableLevel);
       analyzeRewrite.setColName(colNames);
       analyzeRewrite.setColType(colType);
@@ -451,7 +451,7 @@ public class ColumnStatsSemanticAnalyzer extends 
SemanticAnalyzer {
     rewrittenTree = genRewrittenTree(rewrittenQuery);
 
     context.analyzeRewrite = new AnalyzeRewriteContext();
-    context.analyzeRewrite.setTableName(tbl.getDbName() + "." + 
tbl.getTableName());
+    context.analyzeRewrite.setTableName(tbl.getFullyQualifiedName());
     context.analyzeRewrite.setTblLvl(isTableLevel);
     context.analyzeRewrite.setColName(colNames);
     context.analyzeRewrite.setColType(colType);

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
index ade47ba..498b674 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 
 import java.io.FileNotFoundException;
 import java.io.Serializable;
@@ -459,7 +460,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     AlterTableDesc alterTblDesc =  new AlterTableDesc(
             AlterTableDesc.AlterTableTypes.ADDPROPS, new 
ReplicationSpec(replState, replState));
     alterTblDesc.setProps(mapProp);
-    alterTblDesc.setOldName(dbName + "." + tableName);
+    alterTblDesc.setOldName(StatsUtils.getFullyQualifiedTableName(dbName, 
tableName));
     alterTblDesc.setPartSpec((HashMap<String, String>)partSpec);
 
     Task<? extends Serializable> updateReplIdTask = TaskFactory.get(

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index df5c6aa..cbb2075 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -2033,7 +2033,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
         if (qb.getParseInfo().isAnalyzeCommand()) {
           throw new SemanticException(ErrorMsg.ANALYZE_VIEW.getMsg());
         }
-        String fullViewName = tab.getDbName() + "." + tab.getTableName();
+        String fullViewName = tab.getFullyQualifiedName();
         // Prevent view cycles
         if (viewsExpanded.contains(fullViewName)) {
           throw new SemanticException("Recursive view " + fullViewName +

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
index e30abad..83433d7 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/RenameTableHandler.java
@@ -23,6 +23,7 @@ import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.AlterTableDesc;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 
 import java.io.Serializable;
 import java.util.Collections;
@@ -55,8 +56,8 @@ public class RenameTableHandler extends 
AbstractMessageHandler {
         }
       }
 
-      String oldName = oldDbName + "." + 
msg.getTableObjBefore().getTableName();
-      String newName = newDbName + "." + msg.getTableObjAfter().getTableName();
+      String oldName = StatsUtils.getFullyQualifiedTableName(oldDbName, 
msg.getTableObjBefore().getTableName());
+      String newName = StatsUtils.getFullyQualifiedTableName(newDbName, 
msg.getTableObjAfter().getTableName());
       AlterTableDesc renameTableDesc = new AlterTableDesc(
               oldName, newName, false, context.eventOnlyReplicationSpec());
       Task<DDLWork> renameTableTask = TaskFactory.get(

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
index 249aaff..c5d6d18 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
@@ -165,6 +165,11 @@ public class TableDesc implements Serializable, Cloneable {
     return properties.getProperty(hive_metastoreConstants.META_TABLE_NAME);
   }
 
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
+  public String getDbName() {
+    return properties.getProperty(hive_metastoreConstants.META_TABLE_DB);
+  }
+
   @Explain(displayName = "input format")
   public String getInputFileFormatClassName() {
     return getInputFileFormatClass().getName();

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java 
b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java
index 3529fb9..029f882 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/CompactorThread.java
@@ -25,6 +25,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.MetaStoreThread;
 import org.apache.hadoop.hive.metastore.RawStore;
 import org.apache.hadoop.hive.metastore.RawStoreProxy;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -212,6 +213,6 @@ abstract class CompactorThread extends Thread implements 
MetaStoreThread {
   }
 
   protected String tableName(Table t) {
-    return t.getDbName() + "." + t.getTableName();
+    return Warehouse.getQualifiedName(t);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java 
b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
index 2d6cce9..d3be11a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/txn/compactor/Worker.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.StringUtils;
 
@@ -258,12 +259,15 @@ public class Worker extends CompactorThread {
         return;
       }
       if(columnList.isEmpty()) {
-        LOG.debug("No existing stats for " + ci.dbname + "." + ci.tableName + 
" found.  Will not run analyze.");
+        LOG.debug("No existing stats for "
+            + StatsUtils.getFullyQualifiedTableName(ci.dbname, ci.tableName)
+            + " found.  Will not run analyze.");
         return;//nothing to do
       }
       //e.g. analyze table page_view 
partition(dt='10/15/2014',country=’US’)
       // compute statistics for columns viewtime
-      StringBuilder sb = new StringBuilder("analyze table 
").append(ci.dbname).append(".").append(ci.tableName);
+      StringBuilder sb = new StringBuilder("analyze table ")
+          .append(StatsUtils.getFullyQualifiedTableName(ci.dbname, 
ci.tableName));
       if(ci.partName != null) {
         try {
           sb.append(" partition(");

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java
index 2454afb..023462d 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/exec/TestMsckCreatePartitionsInBatches.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hive.common.util.RetryUtilities.RetryException;
@@ -84,7 +85,7 @@ public class TestMsckCreatePartitionsInBatches {
           TextInputFormat.class, HiveIgnoreKeyTextOutputFormat.class);
       return db.getTable(dbName, tableName);
     } catch (Exception exception) {
-      fail("Unable to drop and create table " + dbName + "." + tableName + " 
because "
+      fail("Unable to drop and create table " + 
StatsUtils.getFullyQualifiedTableName(dbName, tableName) + " because "
           + StringUtils.stringifyException(exception));
       throw exception;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/84cf8bea/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
index 92b5c91..bbd285d 100755
--- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
@@ -43,6 +43,7 @@ import 
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.index.HiveIndex;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.stats.StatsUtils;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
 import org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer;
@@ -503,7 +504,7 @@ public class TestHive extends TestCase {
       return hm.getTable(dbName, tableName);
     }
     catch (Exception exception) {
-      fail("Unable to drop and create table " + dbName + "." + tableName
+      fail("Unable to drop and create table " + 
StatsUtils.getFullyQualifiedTableName(dbName, tableName)
           + " because " + StringUtils.stringifyException(exception));
       throw exception;
     }

Reply via email to