HIVE-17537 Move Warehouse class to standalone metastore.  This closes #252.  
(Alan Gates, reviewed by Zoltan Haindrich)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/56083008
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/56083008
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/56083008

Branch: refs/heads/hive-14535
Commit: 56083008b596f410e030551e8884761b151d8bdd
Parents: d45d462
Author: Alan Gates <alanfga...@gmail.com>
Authored: Wed Sep 20 09:47:44 2017 +1000
Committer: Alan Gates <alanfga...@gmail.com>
Committed: Wed Sep 20 09:47:44 2017 +1000

----------------------------------------------------------------------
 .../hadoop/hive/cli/TestCliSessionState.java    |   4 +-
 .../apache/hive/hcatalog/cli/HCatDriver.java    |   3 +-
 .../apache/hive/hcatalog/common/HCatUtil.java   |   4 +-
 .../hive/hcatalog/mapreduce/HCatTableInfo.java  |   4 +-
 .../hive/hcatalog/mapreduce/InputJobInfo.java   |   4 +-
 .../hive/hcatalog/mapreduce/OutputJobInfo.java  |   4 +-
 .../apache/hive/hcatalog/cli/TestPermsGrp.java  |   4 +-
 .../hive/hcatalog/cli/TestSemanticAnalysis.java |  24 +-
 .../hcatalog/mapreduce/HCatMapReduceTest.java   |   8 +-
 .../mapreduce/TestHCatPartitionPublish.java     |   3 +-
 .../hcatalog/mapreduce/TestPassProperties.java  |   4 +-
 .../apache/hive/hcatalog/pig/PigHCatUtil.java   |   4 +-
 .../hcatalog/pig/TestHCatLoaderEncryption.java  |   7 +-
 .../hive/hcatalog/api/HCatClientHMSImpl.java    |   4 +-
 .../apache/hive/hcatalog/api/HCatPartition.java |   3 +-
 .../org/apache/hive/hcatalog/api/HCatTable.java |   6 +-
 .../mapreduce/TestSequenceFileReadWrite.java    |   6 +-
 .../hive/metastore/TestHiveMetaStore.java       |   2 +-
 .../hadoop/hive/ql/history/TestHiveHistory.java |   4 +-
 .../TestSemanticAnalyzerHookLoading.java        |   4 +-
 .../hadoop/hive/hbase/HBaseQTestUtil.java       |   4 +-
 .../org/apache/hadoop/hive/ql/QTestUtil.java    |   6 +-
 .../hadoop/hive/metastore/HiveMetaStore.java    |   4 +-
 .../hive/metastore/HiveMetaStoreClient.java     |   2 +-
 .../hadoop/hive/metastore/MetaStoreUtils.java   |  24 +-
 .../hadoop/hive/metastore/ObjectStore.java      |   4 +-
 .../hive/metastore/ReplChangeManager.java       | 393 ------------
 .../apache/hadoop/hive/metastore/Warehouse.java | 635 -------------------
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java |   8 +-
 .../apache/hadoop/hive/ql/exec/StatsTask.java   |   3 +-
 .../bootstrap/load/table/LoadPartitions.java    |   2 +-
 .../repl/bootstrap/load/table/LoadTable.java    |   2 +-
 .../hive/ql/hooks/EnforceReadOnlyTables.java    |   2 +-
 .../hadoop/hive/ql/lockmgr/DummyTxnManager.java |   5 +-
 .../hadoop/hive/ql/lockmgr/HiveLockObject.java  |   5 +-
 .../hive/ql/metadata/HiveMetaStoreChecker.java  |   3 +-
 .../RewriteQueryUsingAggregateIndexCtx.java     |   3 +-
 .../hive/ql/parse/ImportSemanticAnalyzer.java   |   3 +-
 .../hive/ql/parse/MacroSemanticAnalyzer.java    |   4 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |   2 +-
 .../plugin/sqlstd/SQLAuthorizationUtils.java    |   4 +-
 .../hadoop/hive/ql/session/SessionState.java    |   2 +-
 .../hadoop/hive/ql/exec/TestExecDriver.java     |  22 +-
 .../hadoop/hive/ql/metadata/TestHive.java       |  37 +-
 .../hive/ql/session/TestSessionState.java       |   9 +-
 standalone-metastore/pom.xml                    |  11 +
 .../hive/metastore/ReplChangeManager.java       | 393 ++++++++++++
 .../apache/hadoop/hive/metastore/Warehouse.java | 610 ++++++++++++++++++
 .../hive/metastore/conf/MetastoreConf.java      |   8 +
 .../hadoop/hive/metastore/utils/FileUtils.java  | 304 ++++++++-
 .../hadoop/hive/metastore/utils/HdfsUtils.java  |  75 +++
 .../hive/metastore/utils/MetaStoreUtils.java    |  15 +
 52 files changed, 1528 insertions(+), 1182 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java
----------------------------------------------------------------------
diff --git a/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java 
b/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java
index d9718c6..effef09 100644
--- a/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java
+++ b/cli/src/test/org/apache/hadoop/hive/cli/TestCliSessionState.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.cli;
 import static org.junit.Assert.assertEquals;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.junit.Test;
 
@@ -35,7 +35,7 @@ public class TestCliSessionState {
   @Test
   public void testgetDbName() throws Exception {
     SessionState.start(new HiveConf());
-    assertEquals(MetaStoreUtils.DEFAULT_DATABASE_NAME,
+    assertEquals(Warehouse.DEFAULT_DATABASE_NAME,
         SessionState.get().getCurrentDatabase());
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
index 07abd42..4873595 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/cli/HCatDriver.java
@@ -22,7 +22,6 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
@@ -113,7 +112,7 @@ public class HCatDriver extends Driver {
       }
     } else {
       // looks like a db operation
-      if (dbName.isEmpty() || 
dbName.equals(MetaStoreUtils.DEFAULT_DATABASE_NAME)) {
+      if (dbName.isEmpty() || dbName.equals(Warehouse.DEFAULT_DATABASE_NAME)) {
         // We dont set perms or groups for default dir.
         return 0;
       } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
index 8b927af..107faf7 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
@@ -41,7 +41,7 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
@@ -426,7 +426,7 @@ public class HCatUtil {
   public static Pair<String, String> getDbAndTableName(String tableName) 
throws IOException {
     String[] dbTableNametokens = tableName.split("\\.");
     if (dbTableNametokens.length == 1) {
-      return new Pair<String, String>(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
tableName);
+      return new Pair<String, String>(Warehouse.DEFAULT_DATABASE_NAME, 
tableName);
     } else if (dbTableNametokens.length == 2) {
       return new Pair<String, String>(dbTableNametokens[0], 
dbTableNametokens[1]);
     } else {

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatTableInfo.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatTableInfo.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatTableInfo.java
index 14c93ab..b9a3425 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatTableInfo.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatTableInfo.java
@@ -24,7 +24,7 @@ import java.io.Serializable;
 import java.util.List;
 
 import com.google.common.collect.Lists;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
@@ -76,7 +76,7 @@ public class HCatTableInfo implements Serializable {
     HCatSchema partitionColumns,
     StorerInfo storerInfo,
     Table table) {
-    this.databaseName = (databaseName == null) ? 
MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName;
+    this.databaseName = (databaseName == null) ? 
Warehouse.DEFAULT_DATABASE_NAME : databaseName;
     this.tableName = tableName;
     this.dataColumns = dataColumns;
     this.table = table;

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java
index 7ec6ae3..c593dca 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InputJobInfo.java
@@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.mapreduce;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 
 import java.io.IOException;
 import java.io.ObjectInputStream;
@@ -84,7 +84,7 @@ public class InputJobInfo implements Serializable {
              String filter,
              Properties properties) {
     this.databaseName = (databaseName == null) ?
-      MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName;
+      Warehouse.DEFAULT_DATABASE_NAME : databaseName;
     this.tableName = tableName;
     this.filter = filter;
     this.properties = properties == null ? new Properties() : properties;

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java
index 0737122..453cc0b 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/OutputJobInfo.java
@@ -28,7 +28,7 @@ import java.util.Properties;
 
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hive.hcatalog.data.schema.HCatSchema;
 
 /** The class used to serialize and store the output related information  */
@@ -95,7 +95,7 @@ public class OutputJobInfo implements Serializable {
   private OutputJobInfo(String databaseName,
               String tableName,
               Map<String, String> partitionValues) {
-    this.databaseName = (databaseName == null) ? 
MetaStoreUtils.DEFAULT_DATABASE_NAME : databaseName;
+    this.databaseName = (databaseName == null) ? 
Warehouse.DEFAULT_DATABASE_NAME : databaseName;
     this.tableName = tableName;
     this.partitionValues = partitionValues;
     this.properties = new Properties();

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
index e863372..374c1d2 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestPermsGrp.java
@@ -106,7 +106,7 @@ public class TestPermsGrp extends TestCase {
 
   public void testCustomPerms() throws Exception {
 
-    String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+    String dbName = Warehouse.DEFAULT_DATABASE_NAME;
     String tblName = "simptbl";
     String typeName = "Person";
 
@@ -151,7 +151,7 @@ public class TestPermsGrp extends TestCase {
 
       // And no metadata gets created.
       try {
-        msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
+        msc.getTable(Warehouse.DEFAULT_DATABASE_NAME, tblName);
         assert false;
       } catch (Exception e) {
         assertTrue(e instanceof NoSuchObjectException);

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
index e41b1f1..f259800 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/cli/TestSemanticAnalysis.java
@@ -29,7 +29,7 @@ import java.util.List;
 
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
@@ -95,7 +95,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
     CommandProcessorResponse resp = driver.run("create table 
junit_sem_analysis (a int) partitioned by (B string) stored as TEXTFILE");
     assertEquals(resp.getResponseCode(), 0);
     assertEquals(null, resp.getErrorMessage());
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
TBL_NAME);
+    Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
     assertEquals("Partition key name case problem", "b", 
tbl.getPartitionKeys().get(0).getName());
     driver.run("drop table junit_sem_analysis");
   }
@@ -108,13 +108,13 @@ public class TestSemanticAnalysis extends HCatBaseTest {
     driver.run("alter table junit_sem_analysis add partition 
(b='2010-10-10')");
     hcatDriver.run("alter table junit_sem_analysis partition (b='2010-10-10') 
set fileformat RCFILE");
 
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
TBL_NAME);
+    Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
     assertEquals(TextInputFormat.class.getName(), 
tbl.getSd().getInputFormat());
     assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(), 
tbl.getSd().getOutputFormat());
 
     List<String> partVals = new ArrayList<String>(1);
     partVals.add("2010-10-10");
-    Partition part = client.getPartition(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
TBL_NAME, partVals);
+    Partition part = client.getPartition(Warehouse.DEFAULT_DATABASE_NAME, 
TBL_NAME, partVals);
 
     assertEquals(RCFileInputFormat.class.getName(), 
part.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(), 
part.getSd().getOutputFormat());
@@ -161,7 +161,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
 
     hcatDriver.run("drop table " + TBL_NAME);
     hcatDriver.run("create table " + TBL_NAME + " (a int) stored as RCFILE");
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
TBL_NAME);
+    Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
     List<FieldSchema> cols = tbl.getSd().getCols();
     assertEquals(1, cols.size());
     assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
@@ -171,7 +171,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
     CommandProcessorResponse resp = hcatDriver.run("create table if not exists 
junit_sem_analysis (a int) stored as RCFILE");
     assertEquals(0, resp.getResponseCode());
     assertNull(resp.getErrorMessage());
-    tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+    tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
     cols = tbl.getSd().getCols();
     assertEquals(1, cols.size());
     assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
@@ -224,7 +224,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
 
     response = hcatDriver.run("describe extended junit_sem_analysis");
     assertEquals(0, response.getResponseCode());
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
TBL_NAME);
+    Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
     List<FieldSchema> cols = tbl.getSd().getCols();
     assertEquals(2, cols.size());
     assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null)));
@@ -247,11 +247,11 @@ public class TestSemanticAnalysis extends HCatBaseTest {
     hcatDriver.run("drop table oldname");
     hcatDriver.run("drop table newname");
     hcatDriver.run("create table oldname (a int)");
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
"oldname");
+    Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, "oldname");
     assertTrue("The old table location is: " + tbl.getSd().getLocation(), 
tbl.getSd().getLocation().contains("oldname"));
 
     hcatDriver.run("alter table oldname rename to newNAME");
-    tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "newname");
+    tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, "newname");
     // since the oldname table is not under its database (See HIVE-15059), the 
renamed oldname table will keep
     // its location after HIVE-14909. I changed to check the existence of the 
newname table and its name instead
     // of verifying its location
@@ -268,7 +268,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b 
string) stored as RCFILE");
 
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
TBL_NAME);
+    Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
     assertEquals(RCFileInputFormat.class.getName(), 
tbl.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(), 
tbl.getSd().getOutputFormat());
 
@@ -276,7 +276,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
         "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 
'mydriver' outputdriver 'yourdriver'");
     hcatDriver.run("desc extended junit_sem_analysis");
 
-    tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, TBL_NAME);
+    tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
     assertEquals(RCFileInputFormat.class.getName(), 
tbl.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(), 
tbl.getSd().getOutputFormat());
 
@@ -332,7 +332,7 @@ public class TestSemanticAnalysis extends HCatBaseTest {
         "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 
'mydriver' outputdriver 'yourdriver' ";
     assertEquals(0, hcatDriver.run(query).getResponseCode());
 
-    Table tbl = client.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
TBL_NAME);
+    Table tbl = client.getTable(Warehouse.DEFAULT_DATABASE_NAME, TBL_NAME);
     assertEquals(RCFileInputFormat.class.getName(), 
tbl.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(), 
tbl.getSd().getOutputFormat());
 

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
index deee3a0..ae56ff7 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
@@ -36,8 +36,8 @@ import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -88,7 +88,7 @@ import static org.junit.Assert.assertTrue;
 public abstract class HCatMapReduceTest extends HCatBaseTest {
   private static final Logger LOG = 
LoggerFactory.getLogger(HCatMapReduceTest.class);
 
-  protected static String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+  protected static String dbName = Warehouse.DEFAULT_DATABASE_NAME;
   protected static final String TABLE_NAME = "testHCatMapReduceTable";
 
   private static List<HCatRecord> writeRecords = new ArrayList<HCatRecord>();
@@ -155,7 +155,7 @@ public abstract class HCatMapReduceTest extends 
HCatBaseTest {
   @After
   public void deleteTable() throws Exception {
     try {
-      String databaseName = (dbName == null) ? 
MetaStoreUtils.DEFAULT_DATABASE_NAME : dbName;
+      String databaseName = (dbName == null) ? Warehouse.DEFAULT_DATABASE_NAME 
: dbName;
 
       client.dropTable(databaseName, tableName);
       // in case of external table, drop the table contents as well
@@ -176,7 +176,7 @@ public abstract class HCatMapReduceTest extends 
HCatBaseTest {
     // SerDe is in the disabled serdes list.
     Assume.assumeTrue(!DISABLED_SERDES.contains(serdeClass));
 
-    String databaseName = (dbName == null) ? 
MetaStoreUtils.DEFAULT_DATABASE_NAME : dbName;
+    String databaseName = (dbName == null) ? Warehouse.DEFAULT_DATABASE_NAME : 
dbName;
     try {
       client.dropTable(databaseName, tableName);
     } catch (Exception e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
index 358dd50..61b2f41 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestHCatPartitionPublish.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -222,7 +223,7 @@ public class TestHCatPartitionPublish {
   }
 
   private void createTable(String dbName, String tableName) throws Exception {
-    String databaseName = (dbName == null) ? 
MetaStoreUtils.DEFAULT_DATABASE_NAME
+    String databaseName = (dbName == null) ? Warehouse.DEFAULT_DATABASE_NAME
         : dbName;
     try {
       msc.dropTable(databaseName, tableName);

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
index 975f94b..31857bf 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
@@ -29,7 +29,7 @@ import java.util.ArrayList;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.LongWritable;
@@ -105,7 +105,7 @@ public class TestPassProperties {
       TextInputFormat.setInputPaths(job, INPUT_FILE_NAME);
 
       HCatOutputFormat.setOutput(job, OutputJobInfo.create(
-          MetaStoreUtils.DEFAULT_DATABASE_NAME, "bad_props_table", null));
+          Warehouse.DEFAULT_DATABASE_NAME, "bad_props_table", null));
       job.setOutputFormatClass(HCatOutputFormat.class);
       HCatOutputFormat.setSchema(job, getSchema());
       job.setNumReduceTasks(0);

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
 
b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
index 337f4fb..2e756b4 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.mapreduce.Job;
@@ -70,7 +70,7 @@ class PigHCatUtil {
   private static final Logger LOG = LoggerFactory.getLogger(PigHCatUtil.class);
 
   static final int PIG_EXCEPTION_CODE = 1115; // 
http://wiki.apache.org/pig/PigErrorHandlingFunctionalSpecification#Error_codes
-  private static final String DEFAULT_DB = 
MetaStoreUtils.DEFAULT_DATABASE_NAME;
+  private static final String DEFAULT_DB = Warehouse.DEFAULT_DATABASE_NAME;
 
   private final Map<Pair<String, String>, Table> hcatTableCache =
     new HashMap<Pair<String, String>, Table>();

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
index 903578b..496f3c8 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/test/java/org/apache/hive/hcatalog/pig/TestHCatLoaderEncryption.java
@@ -26,10 +26,9 @@ import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
-import org.apache.hadoop.hive.ql.io.IOConstants;
 import org.apache.hadoop.hive.ql.io.StorageFormats;
 import org.apache.hadoop.hive.ql.processors.CommandProcessor;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
@@ -44,7 +43,6 @@ import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
-import org.apache.hadoop.util.Shell;
 import org.apache.hive.hcatalog.HcatTestUtils;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.data.HCatRecord;
@@ -67,7 +65,6 @@ import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -333,7 +330,7 @@ public class TestHCatLoaderEncryption {
     job.setInputFormatClass(HCatInputFormat.class);
     job.setOutputFormatClass(TextOutputFormat.class);
 
-    HCatInputFormat.setInput(job, MetaStoreUtils.DEFAULT_DATABASE_NAME, 
ENCRYPTED_TABLE, null);
+    HCatInputFormat.setInput(job, Warehouse.DEFAULT_DATABASE_NAME, 
ENCRYPTED_TABLE, null);
 
     job.setMapOutputKeyClass(BytesWritable.class);
     job.setMapOutputValueClass(Text.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
index 4ab497e..17b9d03 100644
--- 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
+++ 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
@@ -34,8 +34,8 @@ import 
org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
 import org.apache.hadoop.hive.metastore.api.Database;
@@ -891,7 +891,7 @@ public class HCatClientHMSImpl extends HCatClient {
 
   private String checkDB(String name) {
     if (StringUtils.isEmpty(name)) {
-      return MetaStoreUtils.DEFAULT_DATABASE_NAME;
+      return Warehouse.DEFAULT_DATABASE_NAME;
     } else {
       return name;
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
index 4a3170a..afc9953 100644
--- 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
+++ 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatPartition.java
@@ -27,7 +27,6 @@ import java.util.Map;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -51,7 +50,7 @@ public class HCatPartition {
 
   private HCatTable hcatTable;
   private String tableName;
-  private String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+  private String dbName = Warehouse.DEFAULT_DATABASE_NAME;
   private List<String> values;
   private int createTime;
   private int lastAccessTime;

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
index c604623..99af291 100644
--- 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
+++ 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatTable.java
@@ -30,8 +30,8 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Order;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
@@ -108,7 +108,7 @@ public class HCatTable {
   public static final String DEFAULT_INPUT_FORMAT_CLASS = 
org.apache.hadoop.mapred.TextInputFormat.class.getName();
   public static final String DEFAULT_OUTPUT_FORMAT_CLASS = 
org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat.class.getName();
 
-  private String dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+  private String dbName = Warehouse.DEFAULT_DATABASE_NAME;
   private String tableName;
   private HiveConf conf;
   private String tableType;
@@ -122,7 +122,7 @@ public class HCatTable {
   private String owner;
 
   public HCatTable(String dbName, String tableName) {
-    this.dbName = StringUtils.isBlank(dbName)? 
MetaStoreUtils.DEFAULT_DATABASE_NAME : dbName;
+    this.dbName = StringUtils.isBlank(dbName)? Warehouse.DEFAULT_DATABASE_NAME 
: dbName;
     this.tableName = tableName;
     this.sd = new StorageDescriptor();
     this.sd.setInputFormat(DEFAULT_INPUT_FORMAT_CLASS);

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
----------------------------------------------------------------------
diff --git 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
index 48aed79..d2dbe8f 100644
--- 
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
+++ 
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/mapreduce/TestSequenceFileReadWrite.java
@@ -31,7 +31,7 @@ import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.io.LongWritable;
@@ -179,7 +179,7 @@ public class TestSequenceFileReadWrite {
     TextInputFormat.setInputPaths(job, inputFileName);
 
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
-        MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_2", null));
+        Warehouse.DEFAULT_DATABASE_NAME, "demo_table_2", null));
     job.setOutputFormatClass(HCatOutputFormat.class);
     HCatOutputFormat.setSchema(job, getSchema());
     job.setNumReduceTasks(0);
@@ -226,7 +226,7 @@ public class TestSequenceFileReadWrite {
     TextInputFormat.setInputPaths(job, inputFileName);
 
     HCatOutputFormat.setOutput(job, OutputJobInfo.create(
-        MetaStoreUtils.DEFAULT_DATABASE_NAME, "demo_table_3", null));
+        Warehouse.DEFAULT_DATABASE_NAME, "demo_table_3", null));
     job.setOutputFormatClass(HCatOutputFormat.class);
     HCatOutputFormat.setSchema(job, getSchema());
     assertTrue(job.waitForCompletion(true));

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
index 50e5274..98dad7a 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
@@ -3212,7 +3212,7 @@ public abstract class TestHiveMetaStore extends TestCase {
 
   @Test
   public void testDBOwner() throws NoSuchObjectException, MetaException, 
TException {
-    Database db = client.getDatabase(MetaStoreUtils.DEFAULT_DATABASE_NAME);
+    Database db = client.getDatabase(Warehouse.DEFAULT_DATABASE_NAME);
     assertEquals(db.getOwnerName(), HiveMetaStore.PUBLIC);
     assertEquals(db.getOwnerType(), PrincipalType.ROLE);
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
index 8d5530f..bec715d 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/history/TestHiveHistory.java
@@ -34,7 +34,7 @@ import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.history.HiveHistory.Keys;
 import org.apache.hadoop.hive.ql.history.HiveHistory.QueryInfo;
@@ -101,7 +101,7 @@ public class TestHiveHistory extends TestCase {
       cols.add("key");
       cols.add("value");
       for (String src : srctables) {
-        db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, src, true, true);
+        db.dropTable(Warehouse.DEFAULT_DATABASE_NAME, src, true, true);
         db.createTable(src, cols, null, TextInputFormat.class,
             IgnoreKeyTextOutputFormat.class);
         db.loadTable(hadoopDataFile[i], src, false, false, false, false, 
false);

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
index 3027ef4..2170ca3 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/metadata/TestSemanticAnalyzerHookLoading.java
@@ -24,7 +24,7 @@ import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
@@ -47,7 +47,7 @@ public class TestSemanticAnalyzerHookLoading extends TestCase 
{
     assertEquals(0, resp.getResponseCode());
     assertNull(resp.getErrorMessage());
 
-    Map<String,String> params = 
Hive.get(conf).getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, 
"testDL").getParameters();
+    Map<String,String> params = 
Hive.get(conf).getTable(Warehouse.DEFAULT_DATABASE_NAME, 
"testDL").getParameters();
 
     assertEquals(DummyCreateTableHook.class.getName(),params.get("createdBy"));
     assertEquals("Open Source rocks!!", params.get("Message"));

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
----------------------------------------------------------------------
diff --git 
a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
index aeb7215..0cc9a89 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/hbase/HBaseQTestUtil.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.hbase;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.client.HConnection;
 import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.QTestUtil;
 
 import java.util.List;
@@ -109,7 +109,7 @@ public class HBaseQTestUtil extends QTestUtil {
     super.cleanUp(tname);
 
     // drop in case leftover from unsuccessful run
-    db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, HBASE_SRC_NAME);
+    db.dropTable(Warehouse.DEFAULT_DATABASE_NAME, HBASE_SRC_NAME);
 
     HBaseAdmin admin = null;
     try {

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
----------------------------------------------------------------------
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index b367732..05f8a5f 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.ql;
 
-import static 
org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
 
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
@@ -101,7 +101,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.llap.LlapItUtils;
 import org.apache.hadoop.hive.llap.daemon.MiniLlapCluster;
 import org.apache.hadoop.hive.llap.io.api.LlapProxy;
-import org.apache.hadoop.hive.metastore.MetaStoreUtils;
+import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -1423,7 +1423,7 @@ public class QTestUtil {
       .run("FROM dest4_sequencefile INSERT OVERWRITE TABLE dest4 SELECT 
dest4_sequencefile.*");
 
     // Drop dest4_sequencefile
-    db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, "dest4_sequencefile",
+    db.dropTable(Warehouse.DEFAULT_DATABASE_NAME, "dest4_sequencefile",
         true, true);
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index f2747f9..8bbc325 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.hive.metastore;
 
 import static org.apache.commons.lang.StringUtils.join;
-import static 
org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_COMMENT;
-import static 
org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+import static 
org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_COMMENT;
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
 import static org.apache.hadoop.hive.metastore.MetaStoreUtils.validateName;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index f839ee7..6bc45b6 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.hive.metastore;
 
-import static 
org.apache.hadoop.hive.metastore.MetaStoreUtils.DEFAULT_DATABASE_NAME;
+import static org.apache.hadoop.hive.metastore.Warehouse.DEFAULT_DATABASE_NAME;
 import static org.apache.hadoop.hive.metastore.MetaStoreUtils.isIndexTable;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
index bbe13fd..6cf9a5c 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java
@@ -120,12 +120,6 @@ public class MetaStoreUtils {
 
   protected static final Logger LOG = LoggerFactory.getLogger("hive.log");
 
-  public static final String DEFAULT_DATABASE_NAME = "default";
-  public static final String DEFAULT_DATABASE_COMMENT = "Default Hive 
database";
-  public static final String DEFAULT_SERIALIZATION_FORMAT = "1";
-
-  public static final String DATABASE_WAREHOUSE_SUFFIX = ".db";
-
   // Right now we only support one special character '/'.
   // More special characters can be added accordingly in the future.
   // NOTE:
@@ -151,7 +145,7 @@ public class MetaStoreUtils {
     serdeInfo.setSerializationLib(LazySimpleSerDe.class.getName());
     serdeInfo.setParameters(new HashMap<String, String>());
     
serdeInfo.getParameters().put(org.apache.hadoop.hive.serde.serdeConstants.SERIALIZATION_FORMAT,
-        DEFAULT_SERIALIZATION_FORMAT);
+        Warehouse.DEFAULT_SERIALIZATION_FORMAT);
 
     List<FieldSchema> fields = new ArrayList<FieldSchema>(columns.size());
     sd.setCols(fields);
@@ -1887,22 +1881,6 @@ public class MetaStoreUtils {
     return new URLClassLoader(curPath.toArray(new URL[0]), loader);
   }
 
-  public static String encodeTableName(String name) {
-    // The encoding method is simple, e.g., replace
-    // all the special characters with the corresponding number in ASCII.
-    // Note that unicode is not supported in table names. And we have explicit
-    // checks for it.
-    StringBuilder sb = new StringBuilder();
-    for (char ch : name.toCharArray()) {
-      if (Character.isLetterOrDigit(ch) || ch == '_') {
-        sb.append(ch);
-      } else {
-        sb.append('-').append((int) ch).append('-');
-      }
-    }
-    return sb.toString();
-  }
-
   // this function will merge csOld into csNew.
   public static void mergeColStats(ColumnStatistics csNew, ColumnStatistics 
csOld)
       throws InvalidObjectException {

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 7ab98ef..1344e9c 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -7723,7 +7723,7 @@ public class ObjectStore implements RawStore, 
Configurable {
     boolean ret = false;
     Query query = null;
     if (dbName == null) {
-      dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+      dbName = Warehouse.DEFAULT_DATABASE_NAME;
     }
     if (tableName == null) {
       throw new InvalidInputException("Table name is null.");
@@ -7801,7 +7801,7 @@ public class ObjectStore implements RawStore, 
Configurable {
     boolean ret = false;
     Query query = null;
     if (dbName == null) {
-      dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
+      dbName = Warehouse.DEFAULT_DATABASE_NAME;
     }
     if (tableName == null) {
       throw new InvalidInputException("Table name is null.");

http://git-wip-us.apache.org/repos/asf/hive/blob/56083008/metastore/src/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java
deleted file mode 100644
index 6cb5fa8..0000000
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/ReplChangeManager.java
+++ /dev/null
@@ -1,393 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.metastore;
-
-import java.io.IOException;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.commons.lang3.concurrent.BasicThreadFactory;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileChecksum;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.PathFilter;
-import org.apache.hadoop.fs.Trash;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hive.common.FileUtils;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ReplChangeManager {
-  private static final Logger LOG = 
LoggerFactory.getLogger(ReplChangeManager.class);
-  static private ReplChangeManager instance;
-
-  private static boolean inited = false;
-  private static boolean enabled = false;
-  private static Path cmroot;
-  private static HiveConf hiveConf;
-  private String msUser;
-  private String msGroup;
-  private FileSystem fs;
-
-  private static final String ORIG_LOC_TAG = "user.original-loc";
-  static final String REMAIN_IN_TRASH_TAG = "user.remain-in-trash";
-  private static final String URI_FRAGMENT_SEPARATOR = "#";
-
-  public enum RecycleType {
-    MOVE,
-    COPY
-  }
-
-  public static ReplChangeManager getInstance(HiveConf hiveConf) throws 
MetaException {
-    if (instance == null) {
-      instance = new ReplChangeManager(hiveConf);
-    }
-    return instance;
-  }
-
-  private ReplChangeManager(HiveConf hiveConf) throws MetaException {
-    try {
-      if (!inited) {
-        if (hiveConf.getBoolVar(HiveConf.ConfVars.REPLCMENABLED)) {
-          ReplChangeManager.enabled = true;
-          ReplChangeManager.cmroot = new 
Path(hiveConf.get(HiveConf.ConfVars.REPLCMDIR.varname));
-          ReplChangeManager.hiveConf = hiveConf;
-
-          fs = cmroot.getFileSystem(hiveConf);
-          // Create cmroot with permission 700 if not exist
-          if (!fs.exists(cmroot)) {
-            fs.mkdirs(cmroot);
-            fs.setPermission(cmroot, new FsPermission("700"));
-          }
-          UserGroupInformation usergroupInfo = 
UserGroupInformation.getCurrentUser();
-          msUser = usergroupInfo.getShortUserName();
-          msGroup = usergroupInfo.getPrimaryGroupName();
-        }
-        inited = true;
-      }
-    } catch (IOException e) {
-      throw new MetaException(StringUtils.stringifyException(e));
-    }
-  }
-
-  // Filter files starts with ".". Note Hadoop consider files starts with
-  // "." or "_" as hidden file. However, we need to replicate files starts
-  // with "_". We find at least 2 use cases:
-  // 1. For har files, _index and _masterindex is required files
-  // 2. _success file is required for Oozie to indicate availability of data 
source
-  private static final PathFilter hiddenFileFilter = new PathFilter(){
-    public boolean accept(Path p){
-      return !p.getName().startsWith(".");
-    }
-  };
-
-  /***
-   * Move a path into cmroot. If the path is a directory (of a partition, or 
table if nonpartitioned),
-   *   recursively move files inside directory to cmroot. Note the table must 
be managed table
-   * @param path a single file or directory
-   * @param type if the files to be copied or moved to cmpath.
-   *             Copy is costly but preserve the source file
-   * @param ifPurge if the file should skip Trash when move/delete source file.
-   *                This is referred only if type is MOVE.
-   * @return int
-   * @throws MetaException
-   */
-  int recycle(Path path, RecycleType type, boolean ifPurge) throws 
MetaException {
-    if (!enabled) {
-      return 0;
-    }
-
-    try {
-      int count = 0;
-
-      if (fs.isDirectory(path)) {
-        FileStatus[] files = fs.listStatus(path, hiddenFileFilter);
-        for (FileStatus file : files) {
-          count += recycle(file.getPath(), type, ifPurge);
-        }
-      } else {
-        String fileCheckSum = checksumFor(path, fs);
-        Path cmPath = getCMPath(hiveConf, path.getName(), fileCheckSum);
-
-        // set timestamp before moving to cmroot, so we can
-        // avoid race condition CM remove the file before setting
-        // timestamp
-        long now = System.currentTimeMillis();
-        fs.setTimes(path, now, -1);
-
-        boolean success = false;
-        if (fs.exists(cmPath) && 
fileCheckSum.equalsIgnoreCase(checksumFor(cmPath, fs))) {
-          // If already a file with same checksum exists in cmPath, just 
ignore the copy/move
-          // Also, mark the operation is unsuccessful to notify that file with 
same name already
-          // exist which will ensure the timestamp of cmPath is updated to 
avoid clean-up by
-          // CM cleaner.
-          success = false;
-        } else {
-          switch (type) {
-            case MOVE: {
-              if (LOG.isDebugEnabled()) {
-                LOG.debug("Moving {} to {}", path.toString(), 
cmPath.toString());
-              }
-              // Rename fails if the file with same name already exist.
-              success = fs.rename(path, cmPath);
-              break;
-            }
-            case COPY: {
-              if (LOG.isDebugEnabled()) {
-                LOG.debug("Copying {} to {}", path.toString(), 
cmPath.toString());
-              }
-              // It is possible to have a file with same checksum in cmPath 
but the content is
-              // partially copied or corrupted. In this case, just overwrite 
the existing file with
-              // new one.
-              success = FileUtils.copy(fs, path, fs, cmPath, false, true, 
hiveConf);
-              break;
-            }
-            default:
-              // Operation fails as invalid input
-              break;
-          }
-        }
-
-        // Ignore if a file with same content already exist in cmroot
-        // We might want to setXAttr for the new location in the future
-        if (success) {
-          // set the file owner to hive (or the id metastore run as)
-          fs.setOwner(cmPath, msUser, msGroup);
-
-          // tag the original file name so we know where the file comes from
-          // Note we currently only track the last known trace as
-          // xattr has limited capacity. We shall revisit and store all 
original
-          // locations if orig-loc becomes important
-          try {
-            fs.setXAttr(cmPath, ORIG_LOC_TAG, path.toString().getBytes());
-          } catch (UnsupportedOperationException e) {
-            LOG.warn("Error setting xattr for {}", path.toString());
-          }
-
-          count++;
-        } else {
-          if (LOG.isDebugEnabled()) {
-            LOG.debug("A file with the same content of {} already exists, 
ignore", path.toString());
-          }
-          // Need to extend the tenancy if we saw a newer file with the same 
content
-          fs.setTimes(cmPath, now, -1);
-        }
-
-        // Tag if we want to remain in trash after deletion.
-        // If multiple files share the same content, then
-        // any file claim remain in trash would be granted
-        if ((type == RecycleType.MOVE) && !ifPurge) {
-          try {
-            fs.setXAttr(cmPath, REMAIN_IN_TRASH_TAG, new byte[]{0});
-          } catch (UnsupportedOperationException e) {
-            LOG.warn("Error setting xattr for {}", cmPath.toString());
-          }
-        }
-      }
-      return count;
-    } catch (IOException e) {
-      throw new MetaException(StringUtils.stringifyException(e));
-    }
-  }
-
-  // Get checksum of a file
-  static public String checksumFor(Path path, FileSystem fs) throws 
IOException {
-    // TODO: fs checksum only available on hdfs, need to
-    //       find a solution for other fs (eg, local fs, s3, etc)
-    String checksumString = null;
-    FileChecksum checksum = fs.getFileChecksum(path);
-    if (checksum != null) {
-      checksumString = StringUtils.byteToHexString(
-          checksum.getBytes(), 0, checksum.getLength());
-    }
-    return checksumString;
-  }
-
-  static public void setCmRoot(Path cmRoot) {
-    ReplChangeManager.cmroot = cmRoot;
-  }
-
-  /***
-   * Convert a path of file inside a partition or table (if non-partitioned)
-   *   to a deterministic location of cmroot. So user can retrieve the file 
back
-   *   with the original location plus checksum.
-   * @param conf
-   * @param name original filename
-   * @param checkSum checksum of the file, can be retrieved by {@link 
#checksumFor(Path, FileSystem)}
-   * @return Path
-   */
-  static Path getCMPath(Configuration conf, String name, String checkSum) 
throws IOException, MetaException {
-    String newFileName = name + "_" + checkSum;
-    int maxLength = 
conf.getInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY,
-        DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_DEFAULT);
-
-    if (newFileName.length() > maxLength) {
-      newFileName = newFileName.substring(0, maxLength-1);
-    }
-
-    return new Path(cmroot, newFileName);
-  }
-
-  /***
-   * Get original file specified by src and chksumString. If the file exists 
and checksum
-   * matches, return the file; otherwise, use chksumString to retrieve it from 
cmroot
-   * @param src Original file location
-   * @param checksumString Checksum of the original file
-   * @param hiveConf
-   * @return Corresponding FileStatus object
-   */
-  static public FileStatus getFileStatus(Path src, String checksumString,
-      HiveConf hiveConf) throws MetaException {
-    try {
-      FileSystem srcFs = src.getFileSystem(hiveConf);
-      if (checksumString == null) {
-        return srcFs.getFileStatus(src);
-      }
-
-      if (!srcFs.exists(src)) {
-        return srcFs.getFileStatus(getCMPath(hiveConf, src.getName(), 
checksumString));
-      }
-
-      String currentChecksumString = checksumFor(src, srcFs);
-      if (currentChecksumString == null || 
checksumString.equals(currentChecksumString)) {
-        return srcFs.getFileStatus(src);
-      } else {
-        return srcFs.getFileStatus(getCMPath(hiveConf, src.getName(), 
checksumString));
-      }
-    } catch (IOException e) {
-      throw new MetaException(StringUtils.stringifyException(e));
-    }
-  }
-
-  /***
-   * Concatenate filename and checksum with "#"
-   * @param fileUriStr Filename string
-   * @param fileChecksum Checksum string
-   * @return Concatenated Uri string
-   */
-  // TODO: this needs to be enhanced once change management based filesystem 
is implemented
-  // Currently using fileuri#checksum as the format
-  static public String encodeFileUri(String fileUriStr, String fileChecksum) {
-    if (fileChecksum != null) {
-      return fileUriStr + URI_FRAGMENT_SEPARATOR + fileChecksum;
-    } else {
-      return fileUriStr;
-    }
-  }
-
-  /***
-   * Split uri with fragment into file uri and checksum
-   * @param fileURIStr uri with fragment
-   * @return array of file name and checksum
-   */
-  static public String[] getFileWithChksumFromURI(String fileURIStr) {
-    String[] uriAndFragment = fileURIStr.split(URI_FRAGMENT_SEPARATOR);
-    String[] result = new String[2];
-    result[0] = uriAndFragment[0];
-    if (uriAndFragment.length>1) {
-      result[1] = uriAndFragment[1];
-    }
-    return result;
-  }
-
-  public static boolean isCMFileUri(Path fromPath, FileSystem srcFs) {
-    String[] result = getFileWithChksumFromURI(fromPath.toString());
-    return result[1] != null;
-  }
-
-  /**
-   * Thread to clear old files of cmroot recursively
-   */
-  static class CMClearer implements Runnable {
-    private Path cmroot;
-    private long secRetain;
-    private HiveConf hiveConf;
-
-    CMClearer(String cmrootString, long secRetain, HiveConf hiveConf) {
-      this.cmroot = new Path(cmrootString);
-      this.secRetain = secRetain;
-      this.hiveConf = hiveConf;
-    }
-
-    @Override
-    public void run() {
-      try {
-        LOG.info("CMClearer started");
-
-        long now = System.currentTimeMillis();
-        FileSystem fs = cmroot.getFileSystem(hiveConf);
-        FileStatus[] files = fs.listStatus(cmroot);
-
-        for (FileStatus file : files) {
-          long modifiedTime = file.getModificationTime();
-          if (now - modifiedTime > secRetain*1000) {
-            try {
-              if 
(fs.getXAttrs(file.getPath()).containsKey(REMAIN_IN_TRASH_TAG)) {
-                boolean succ = Trash.moveToAppropriateTrash(fs, 
file.getPath(), hiveConf);
-                if (succ) {
-                  if (LOG.isDebugEnabled()) {
-                    LOG.debug("Move " + file.toString() + " to trash");
-                  }
-                } else {
-                  LOG.warn("Fail to move " + file.toString() + " to trash");
-                }
-              } else {
-                boolean succ = fs.delete(file.getPath(), false);
-                if (succ) {
-                  if (LOG.isDebugEnabled()) {
-                    LOG.debug("Remove " + file.toString());
-                  }
-                } else {
-                  LOG.warn("Fail to remove " + file.toString());
-                }
-              }
-            } catch (UnsupportedOperationException e) {
-              LOG.warn("Error getting xattr for " + file.getPath().toString());
-            }
-          }
-        }
-      } catch (IOException e) {
-        LOG.error("Exception when clearing cmroot:" + 
StringUtils.stringifyException(e));
-      }
-    }
-  }
-
-  // Schedule CMClearer thread. Will be invoked by metastore
-  static void scheduleCMClearer(HiveConf hiveConf) {
-    if (HiveConf.getBoolVar(hiveConf, HiveConf.ConfVars.REPLCMENABLED)) {
-      ScheduledExecutorService executor = 
Executors.newSingleThreadScheduledExecutor(
-          new BasicThreadFactory.Builder()
-          .namingPattern("cmclearer-%d")
-          .daemon(true)
-          .build());
-      executor.scheduleAtFixedRate(new 
CMClearer(hiveConf.get(HiveConf.ConfVars.REPLCMDIR.varname),
-          hiveConf.getTimeVar(ConfVars.REPLCMRETIAN, TimeUnit.SECONDS), 
hiveConf),
-          0, hiveConf.getTimeVar(ConfVars.REPLCMINTERVAL, TimeUnit.SECONDS), 
TimeUnit.SECONDS);
-    }
-  }
-}

Reply via email to