HIVE-9582 : HCatalog should use IMetaStoreClient interface (Thiruvel 
Thirumoolan, reviewed by Sushanth Sowmyan, Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/45307c10
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/45307c10
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/45307c10

Branch: refs/heads/beeline-cli
Commit: 45307c10e472e7dd42b28310f9adf7afe27bf6d7
Parents: c011673
Author: Sushanth Sowmyan <khorg...@gmail.com>
Authored: Wed May 6 02:32:06 2015 -0700
Committer: Sushanth Sowmyan <khorg...@gmail.com>
Committed: Wed May 6 02:34:23 2015 -0700

----------------------------------------------------------------------
 .../apache/hive/hcatalog/common/HCatUtil.java   | 37 ++++++---
 .../hive/hcatalog/common/HiveClientCache.java   | 85 +++++++++++++-------
 .../DefaultOutputCommitterContainer.java        |  6 +-
 .../mapreduce/FileOutputCommitterContainer.java | 14 ++--
 .../mapreduce/FileOutputFormatContainer.java    |  8 +-
 .../hcatalog/mapreduce/HCatOutputFormat.java    |  6 +-
 .../hcatalog/mapreduce/InitializeInput.java     |  6 +-
 .../hive/hcatalog/mapreduce/Security.java       | 10 +--
 .../hcatalog/common/TestHiveClientCache.java    | 37 +++++----
 .../hcatalog/mapreduce/HCatMapReduceTest.java   |  2 +-
 .../hcatalog/mapreduce/TestPassProperties.java  |  2 +-
 .../apache/hive/hcatalog/pig/PigHCatUtil.java   | 10 +--
 .../streaming/AbstractRecordWriter.java         | 11 ++-
 .../hive/hcatalog/streaming/HiveEndPoint.java   |  9 ++-
 .../hive/hcatalog/api/HCatClientHMSImpl.java    | 17 ++--
 .../hcatalog/templeton/CompleteDelegator.java   |  6 +-
 .../hcatalog/templeton/SecureProxySupport.java  |  9 ++-
 .../templeton/tool/TempletonControllerJob.java  |  7 +-
 .../hadoop/hive/metastore/IMetaStoreClient.java |  3 +
 19 files changed, 173 insertions(+), 112 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
index 63909b8..3ee30ed 100644
--- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
+++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HCatUtil.java
@@ -38,9 +38,9 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
@@ -175,7 +175,7 @@ public class HCatUtil {
     }
   }
 
-  public static Table getTable(HiveMetaStoreClient client, String dbName, 
String tableName)
+  public static Table getTable(IMetaStoreClient client, String dbName, String 
tableName)
     throws NoSuchObjectException, TException, MetaException {
     return new Table(client.getTable(dbName, tableName));
   }
@@ -538,17 +538,17 @@ public class HCatUtil {
    * @throws MetaException When HiveMetaStoreClient couldn't be created
    * @throws IOException
    */
-  public static HiveMetaStoreClient getHiveClient(HiveConf hiveConf)
-    throws MetaException, IOException {
+  public static IMetaStoreClient getHiveMetastoreClient(HiveConf hiveConf)
+      throws MetaException, IOException {
 
     if (hiveConf.getBoolean(HCatConstants.HCAT_HIVE_CLIENT_DISABLE_CACHE, 
false)){
       // If cache is disabled, don't use it.
-      return HiveClientCache.getNonCachedHiveClient(hiveConf);
+      return HiveClientCache.getNonCachedHiveMetastoreClient(hiveConf);
     }
 
     // Singleton behaviour: create the cache instance if required.
     if (hiveClientCache == null) {
-      synchronized (HiveMetaStoreClient.class) {
+      synchronized (IMetaStoreClient.class) {
         if (hiveClientCache == null) {
           hiveClientCache = new HiveClientCache(hiveConf);
         }
@@ -561,11 +561,30 @@ public class HCatUtil {
     }
   }
 
-  private static HiveMetaStoreClient getNonCachedHiveClient(HiveConf hiveConf) 
throws MetaException{
-    return new HiveMetaStoreClient(hiveConf);
+  /**
+   * Get or create a hive client depending on whether it exits in cache or not.
+   * @Deprecated : use {@link #getHiveMetastoreClient(HiveConf)} instead.
+   * This was deprecated in Hive 1.2, slated for removal in two versions
+   * (i.e. 1.2 & 1.3(projected) will have it, but it will be removed after 
that)
+   * @param hiveConf The hive configuration
+   * @return the client
+   * @throws MetaException When HiveMetaStoreClient couldn't be created
+   * @throws IOException
+   */
+  @Deprecated
+  public static HiveMetaStoreClient getHiveClient(HiveConf hiveConf) throws 
MetaException, IOException {
+    IMetaStoreClient imsc = getHiveMetastoreClient(hiveConf);
+    // Try piggybacking on the function that returns IMSC. Current 
implementation of the IMSC cache
+    // has CacheableMetaStoreClients, which are HMSC, so we can return them 
as-is. If not, it's okay
+    // for us to ignore the caching aspect and return a vanilla HMSC.
+    if (imsc instanceof HiveMetaStoreClient){
+      return (HiveMetaStoreClient)imsc;
+    } else {
+      return new HiveMetaStoreClient(hiveConf);
+    }
   }
 
-  public static void closeHiveClientQuietly(HiveMetaStoreClient client) {
+  public static void closeHiveClientQuietly(IMetaStoreClient client) {
     try {
       if (client != null)
         client.close();

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java
index a001252..578b6ea 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/common/HiveClientCache.java
@@ -34,6 +34,7 @@ import org.apache.commons.lang.builder.EqualsBuilder;
 import org.apache.commons.lang.builder.HashCodeBuilder;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.shims.Utils;
@@ -54,7 +55,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
 class HiveClientCache {
   public final static int DEFAULT_HIVE_CACHE_EXPIRY_TIME_SECONDS = 2 * 60;
 
-  final private Cache<HiveClientCacheKey, CacheableHiveMetaStoreClient> 
hiveCache;
+  final private Cache<HiveClientCacheKey, ICacheableMetaStoreClient> hiveCache;
   private static final Logger LOG = 
LoggerFactory.getLogger(HiveClientCache.class);
   private final int timeout;
   // This lock is used to make sure removalListener won't close a client that 
is being contemplated for returning by get()
@@ -79,7 +80,7 @@ class HiveClientCache {
     return threadId.get();
   }
 
-  public static HiveMetaStoreClient getNonCachedHiveClient(HiveConf hiveConf) 
throws MetaException {
+  public static IMetaStoreClient getNonCachedHiveMetastoreClient(HiveConf 
hiveConf) throws MetaException {
     return new HiveMetaStoreClient(hiveConf);
   }
 
@@ -92,11 +93,11 @@ class HiveClientCache {
    */
   public HiveClientCache(final int timeout) {
     this.timeout = timeout;
-    RemovalListener<HiveClientCacheKey, CacheableHiveMetaStoreClient> 
removalListener =
-      new RemovalListener<HiveClientCacheKey, CacheableHiveMetaStoreClient>() {
+    RemovalListener<HiveClientCacheKey, ICacheableMetaStoreClient> 
removalListener =
+      new RemovalListener<HiveClientCacheKey, ICacheableMetaStoreClient>() {
         @Override
-        public void onRemoval(RemovalNotification<HiveClientCacheKey, 
CacheableHiveMetaStoreClient> notification) {
-          CacheableHiveMetaStoreClient hiveMetaStoreClient = 
notification.getValue();
+        public void onRemoval(RemovalNotification<HiveClientCacheKey, 
ICacheableMetaStoreClient> notification) {
+          ICacheableMetaStoreClient hiveMetaStoreClient = 
notification.getValue();
           if (hiveMetaStoreClient != null) {
             synchronized (CACHE_TEARDOWN_LOCK) {
               hiveMetaStoreClient.setExpiredFromCache();
@@ -169,8 +170,8 @@ class HiveClientCache {
    */
   void closeAllClientsQuietly() {
     try {
-      ConcurrentMap<HiveClientCacheKey, CacheableHiveMetaStoreClient> elements 
= hiveCache.asMap();
-      for (CacheableHiveMetaStoreClient cacheableHiveMetaStoreClient : 
elements.values()) {
+      ConcurrentMap<HiveClientCacheKey, ICacheableMetaStoreClient> elements = 
hiveCache.asMap();
+      for (ICacheableMetaStoreClient cacheableHiveMetaStoreClient : 
elements.values()) {
         cacheableHiveMetaStoreClient.tearDown();
       }
     } catch (Exception e) {
@@ -191,24 +192,24 @@ class HiveClientCache {
    * @throws IOException
    * @throws LoginException
    */
-  public HiveMetaStoreClient get(final HiveConf hiveConf) throws 
MetaException, IOException, LoginException {
+  public ICacheableMetaStoreClient get(final HiveConf hiveConf) throws 
MetaException, IOException, LoginException {
     final HiveClientCacheKey cacheKey = 
HiveClientCacheKey.fromHiveConf(hiveConf, getThreadId());
-    CacheableHiveMetaStoreClient hiveMetaStoreClient = null;
+    ICacheableMetaStoreClient cacheableHiveMetaStoreClient = null;
     // the hmsc is not shared across threads. So the only way it could get 
closed while we are doing healthcheck
     // is if removalListener closes it. The synchronization takes care that 
removalListener won't do it
     synchronized (CACHE_TEARDOWN_LOCK) {
-      hiveMetaStoreClient = getOrCreate(cacheKey);
-      hiveMetaStoreClient.acquire();
+      cacheableHiveMetaStoreClient = getOrCreate(cacheKey);
+      cacheableHiveMetaStoreClient.acquire();
     }
-    if (!hiveMetaStoreClient.isOpen()) {
+    if (!cacheableHiveMetaStoreClient.isOpen()) {
       synchronized (CACHE_TEARDOWN_LOCK) {
         hiveCache.invalidate(cacheKey);
-        hiveMetaStoreClient.close();
-        hiveMetaStoreClient = getOrCreate(cacheKey);
-        hiveMetaStoreClient.acquire();
+        cacheableHiveMetaStoreClient.close();
+        cacheableHiveMetaStoreClient = getOrCreate(cacheKey);
+        cacheableHiveMetaStoreClient.acquire();
       }
     }
-    return hiveMetaStoreClient;
+    return cacheableHiveMetaStoreClient;
   }
 
   /**
@@ -219,11 +220,12 @@ class HiveClientCache {
    * @throws MetaException
    * @throws LoginException
    */
-  private CacheableHiveMetaStoreClient getOrCreate(final HiveClientCacheKey 
cacheKey) throws IOException, MetaException, LoginException {
+  private ICacheableMetaStoreClient getOrCreate(final HiveClientCacheKey 
cacheKey)
+      throws IOException, MetaException, LoginException {
     try {
-      return hiveCache.get(cacheKey, new 
Callable<CacheableHiveMetaStoreClient>() {
+      return hiveCache.get(cacheKey, new Callable<ICacheableMetaStoreClient>() 
{
         @Override
-        public CacheableHiveMetaStoreClient call() throws MetaException {
+        public ICacheableMetaStoreClient call() throws MetaException {
           return new CacheableHiveMetaStoreClient(cacheKey.getHiveConf(), 
timeout);
         }
       });
@@ -289,28 +291,48 @@ class HiveClientCache {
     }
   }
 
+  public interface ICacheableMetaStoreClient extends IMetaStoreClient {
+
+    void acquire();
+
+    void release();
+
+    void setExpiredFromCache();
+
+    AtomicInteger getUsers();
+
+    boolean isClosed();
+
+    boolean isOpen();
+
+    void tearDownIfUnused();
+
+    void tearDown();
+  }
+
   /**
    * Add # of current users on HiveMetaStoreClient, so that the client can be 
cleaned when no one is using it.
    */
-  public static class CacheableHiveMetaStoreClient extends HiveMetaStoreClient 
{
+  static class CacheableHiveMetaStoreClient extends HiveMetaStoreClient 
implements ICacheableMetaStoreClient {
+
     private final AtomicInteger users = new AtomicInteger(0);
     private volatile boolean expiredFromCache = false;
     private boolean isClosed = false;
     private final long expiryTime;
     private static final int EXPIRY_TIME_EXTENSION_IN_MILLIS = 60 * 1000;
 
-    public CacheableHiveMetaStoreClient(final HiveConf conf, final int 
timeout) throws MetaException {
+    CacheableHiveMetaStoreClient(final HiveConf conf, final Integer timeout) 
throws MetaException {
       super(conf);
       // Extend the expiry time with some extra time on top of guava expiry 
time to make sure
       // that items closed() are for sure expired and would never be returned 
by guava.
       this.expiryTime = System.currentTimeMillis() + timeout * 1000 + 
EXPIRY_TIME_EXTENSION_IN_MILLIS;
     }
 
-    private void acquire() {
+    public void acquire() {
       users.incrementAndGet();
     }
 
-    private void release() {
+    public void release() {
       users.decrementAndGet();
     }
 
@@ -322,15 +344,22 @@ class HiveClientCache {
       return isClosed;
     }
 
+    /*
+     * Used only for Debugging or testing purposes
+     */
+    public AtomicInteger getUsers() {
+      return users;
+    }
+
     /**
      * Make a call to hive meta store and see if the client is still usable. 
Some calls where the user provides
      * invalid data renders the client unusable for future use (example: 
create a table with very long table name)
      * @return
      */
-    protected boolean isOpen() {
+    public boolean isOpen() {
       try {
         // Look for an unlikely database name and see if either MetaException 
or TException is thrown
-        this.getDatabases("NonExistentDatabaseUsedForHealthCheck");
+        super.getDatabases("NonExistentDatabaseUsedForHealthCheck");
       } catch (TException e) {
         return false;
       }
@@ -354,7 +383,7 @@ class HiveClientCache {
      *  1. There are no active user
      *  2. It has expired from the cache
      */
-    private void tearDownIfUnused() {
+    public void tearDownIfUnused() {
       if (users.get() == 0 && expiredFromCache) {
         this.tearDown();
       }
@@ -363,7 +392,7 @@ class HiveClientCache {
     /**
      * Close if not closed already
      */
-    protected synchronized void tearDown() {
+    public synchronized void tearDown() {
       try {
         if (!isClosed) {
           super.close();

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
index cead40d..90c2d71 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultOutputCommitterContainer.java
@@ -22,7 +22,7 @@ package org.apache.hive.hcatalog.mapreduce;
 import java.io.IOException;
 
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.mapreduce.JobContext;
 import org.apache.hadoop.mapreduce.JobStatus.State;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
@@ -90,10 +90,10 @@ class DefaultOutputCommitterContainer extends 
OutputCommitterContainer {
     
getBaseOutputCommitter().cleanupJob(HCatMapRedUtil.createJobContext(context));
 
     //Cancel HCat and JobTracker tokens
-    HiveMetaStoreClient client = null;
+    IMetaStoreClient client = null;
     try {
       HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
-      client = HCatUtil.getHiveClient(hiveConf);
+      client = HCatUtil.getHiveMetastoreClient(hiveConf);
       String tokenStrForm = client.getTokenStrForm();
       if (tokenStrForm != null && 
context.getConfiguration().get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) 
{
         client.cancelDelegationToken(tokenStrForm);

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
index 8146d85..367f4ea 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputCommitterContainer.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -43,7 +44,6 @@ import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.Warehouse;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -466,7 +466,7 @@ class FileOutputCommitterContainer extends 
OutputCommitterContainer {
    * @throws org.apache.hadoop.hive.metastore.api.MetaException the meta 
exception
    * @throws org.apache.thrift.TException the t exception
    */
-  private void updateTableSchema(HiveMetaStoreClient client, Table table,
+  private void updateTableSchema(IMetaStoreClient client, Table table,
                    HCatSchema partitionSchema) throws IOException, 
InvalidOperationException, MetaException, TException {
 
 
@@ -775,12 +775,12 @@ class FileOutputCommitterContainer extends 
OutputCommitterContainer {
       return;
     }
 
-    HiveMetaStoreClient client = null;
+    IMetaStoreClient client = null;
     HCatTableInfo tableInfo = jobInfo.getTableInfo();
     List<Partition> partitionsAdded = new ArrayList<Partition>();
     try {
       HiveConf hiveConf = HCatUtil.getHiveConf(conf);
-      client = HCatUtil.getHiveClient(hiveConf);
+      client = HCatUtil.getHiveMetastoreClient(hiveConf);
       StorerInfo storer = 
InternalUtil.extractStorerInfo(table.getTTable().getSd(),table.getParameters());
 
       FileStatus tblStat = fs.getFileStatus(tblPath);
@@ -952,7 +952,7 @@ class FileOutputCommitterContainer extends 
OutputCommitterContainer {
           // metastore
           for (Partition p : partitionsAdded) {
             client.dropPartition(tableInfo.getDatabaseName(),
-                tableInfo.getTableName(), p.getValues());
+                tableInfo.getTableName(), p.getValues(), true);
           }
         } catch (Exception te) {
           // Keep cause as the original exception
@@ -990,11 +990,11 @@ class FileOutputCommitterContainer extends 
OutputCommitterContainer {
 
   private void cancelDelegationTokens(JobContext context) throws IOException{
     LOG.info("Cancelling delegation token for the job.");
-    HiveMetaStoreClient client = null;
+    IMetaStoreClient client = null;
     try {
       HiveConf hiveConf = HCatUtil
           .getHiveConf(context.getConfiguration());
-      client = HCatUtil.getHiveClient(hiveConf);
+      client = HCatUtil.getHiveMetastoreClient(hiveConf);
       // cancel the deleg. tokens that were acquired for this job now that
       // we are done - we should cancel if the tokens were acquired by
       // HCatOutputFormat and not if they were supplied by Oozie.

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
index 1cd5306..001b59b 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java
@@ -22,9 +22,9 @@ package org.apache.hive.hcatalog.mapreduce;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
@@ -113,10 +113,10 @@ class FileOutputFormatContainer extends 
OutputFormatContainer {
   @Override
   public void checkOutputSpecs(JobContext context) throws IOException, 
InterruptedException {
     OutputJobInfo jobInfo = 
HCatOutputFormat.getJobInfo(context.getConfiguration());
-    HiveMetaStoreClient client = null;
+    IMetaStoreClient client = null;
     try {
       HiveConf hiveConf = HCatUtil.getHiveConf(context.getConfiguration());
-      client = HCatUtil.getHiveClient(hiveConf);
+      client = HCatUtil.getHiveMetastoreClient(hiveConf);
       handleDuplicatePublish(context,
         jobInfo,
         client,
@@ -163,7 +163,7 @@ class FileOutputFormatContainer extends 
OutputFormatContainer {
    * @throws org.apache.thrift.TException
    */
   private static void handleDuplicatePublish(JobContext context, OutputJobInfo 
outputInfo,
-      HiveMetaStoreClient client, Table table)
+      IMetaStoreClient client, Table table)
       throws IOException, MetaException, TException, NoSuchObjectException {
 
     /*

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java
index 6947398..f9e71f0 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/HCatOutputFormat.java
@@ -31,8 +31,8 @@ import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
@@ -82,12 +82,12 @@ public class HCatOutputFormat extends HCatBaseOutputFormat {
   @SuppressWarnings("unchecked")
   public static void setOutput(Configuration conf, Credentials credentials,
                  OutputJobInfo outputJobInfo) throws IOException {
-    HiveMetaStoreClient client = null;
+    IMetaStoreClient client = null;
 
     try {
 
       HiveConf hiveConf = HCatUtil.getHiveConf(conf);
-      client = HCatUtil.getHiveClient(hiveConf);
+      client = HCatUtil.getHiveMetastoreClient(hiveConf);
       Table table = HCatUtil.getTable(client, outputJobInfo.getDatabaseName(),
         outputJobInfo.getTableName());
 

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java
index 1980ef5..2f07be1 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InitializeInput.java
@@ -27,8 +27,8 @@ import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -93,7 +93,7 @@ class InitializeInput {
    */
   private static InputJobInfo getInputJobInfo(
     Configuration conf, InputJobInfo inputJobInfo, String locationFilter) 
throws Exception {
-    HiveMetaStoreClient client = null;
+    IMetaStoreClient client = null;
     HiveConf hiveConf = null;
     try {
       if (conf != null) {
@@ -101,7 +101,7 @@ class InitializeInput {
       } else {
         hiveConf = new HiveConf(HCatInputFormat.class);
       }
-      client = HCatUtil.getHiveClient(hiveConf);
+      client = HCatUtil.getHiveMetastoreClient(hiveConf);
       Table table = HCatUtil.getTable(client, inputJobInfo.getDatabaseName(),
         inputJobInfo.getTableName());
 

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
index 39ef86e..9b62195 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/Security.java
@@ -26,7 +26,7 @@ import java.util.Map;
 import java.util.Map.Entry;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
@@ -103,7 +103,7 @@ final class Security {
   void handleSecurity(
     Credentials credentials,
     OutputJobInfo outputJobInfo,
-    HiveMetaStoreClient client,
+    IMetaStoreClient client,
     Configuration conf,
     boolean harRequested)
     throws IOException, MetaException, TException, Exception {
@@ -136,7 +136,7 @@ final class Security {
         // hcat normally in OutputCommitter.commitJob()
         // when the JobTracker in Hadoop MapReduce starts supporting renewal of
         // arbitrary tokens, the renewer should be the principal of the 
JobTracker
-        hiveToken = 
HCatUtil.extractThriftToken(client.getDelegationToken(ugi.getUserName()), 
tokenSignature);
+        hiveToken = 
HCatUtil.extractThriftToken(client.getDelegationToken(ugi.getUserName(), 
ugi.getUserName()), tokenSignature);
 
         if (harRequested) {
           TokenSelector<? extends TokenIdentifier> jtTokenSelector =
@@ -165,7 +165,7 @@ final class Security {
   void handleSecurity(
     Job job,
     OutputJobInfo outputJobInfo,
-    HiveMetaStoreClient client,
+    IMetaStoreClient client,
     Configuration conf,
     boolean harRequested)
     throws IOException, MetaException, TException, Exception {
@@ -175,7 +175,7 @@ final class Security {
   // we should cancel hcat token if it was acquired by hcat
   // and not if it was supplied (ie Oozie). In the latter
   // case the HCAT_KEY_TOKEN_SIGNATURE property in the conf will not be set
-  void cancelToken(HiveMetaStoreClient client, JobContext context) throws 
IOException, MetaException {
+  void cancelToken(IMetaStoreClient client, JobContext context) throws 
IOException, MetaException {
     String tokenStrForm = client.getTokenStrForm();
     if (tokenStrForm != null && 
context.getConfiguration().get(HCatConstants.HCAT_KEY_TOKEN_SIGNATURE) != null) 
{
       try {

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java
index 63a5548..b2c9c7a 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/common/TestHiveClientCache.java
@@ -20,7 +20,7 @@ package org.apache.hive.hcatalog.common;
 
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.HiveMetaStore;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
@@ -76,29 +76,28 @@ public class TestHiveClientCache {
 
   @Test
   public void testCacheHit() throws IOException, MetaException, LoginException 
{
-
     HiveClientCache cache = new HiveClientCache(1000);
-    HiveMetaStoreClient client = cache.get(hiveConf);
+    HiveClientCache.ICacheableMetaStoreClient client = cache.get(hiveConf);
     assertNotNull(client);
     client.close(); // close shouldn't matter
 
     // Setting a non important configuration should return the same client only
     hiveConf.setIntVar(HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS, 10);
-    HiveMetaStoreClient client2 = cache.get(hiveConf);
+    HiveClientCache.ICacheableMetaStoreClient client2 = cache.get(hiveConf);
     assertNotNull(client2);
-    assertEquals(client, client2);
+    assertEquals(client.getUsers(), client2.getUsers());
     client2.close();
   }
 
   @Test
   public void testCacheMiss() throws IOException, MetaException, 
LoginException {
     HiveClientCache cache = new HiveClientCache(1000);
-    HiveMetaStoreClient client = cache.get(hiveConf);
+    IMetaStoreClient client = cache.get(hiveConf);
     assertNotNull(client);
 
     // Set different uri as it is one of the criteria deciding whether to 
return the same client or not
     hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, " "); // URIs are checked 
for string equivalence, even spaces make them different
-    HiveMetaStoreClient client2 = cache.get(hiveConf);
+    IMetaStoreClient client2 = cache.get(hiveConf);
     assertNotNull(client2);
     assertNotSame(client, client2);
   }
@@ -110,11 +109,11 @@ public class TestHiveClientCache {
   @Test
   public void testCacheExpiry() throws IOException, MetaException, 
LoginException, InterruptedException {
     HiveClientCache cache = new HiveClientCache(1);
-    HiveClientCache.CacheableHiveMetaStoreClient client = 
(HiveClientCache.CacheableHiveMetaStoreClient) cache.get(hiveConf);
+    HiveClientCache.ICacheableMetaStoreClient client = cache.get(hiveConf);
     assertNotNull(client);
 
     Thread.sleep(2500);
-    HiveMetaStoreClient client2 = cache.get(hiveConf);
+    HiveClientCache.ICacheableMetaStoreClient client2 = cache.get(hiveConf);
     client.close();
     assertTrue(client.isClosed()); // close() after *expiry time* and *a cache 
access* should  have tore down the client
 
@@ -132,21 +131,21 @@ public class TestHiveClientCache {
   public void testMultipleThreadAccess() throws ExecutionException, 
InterruptedException {
     final HiveClientCache cache = new HiveClientCache(1000);
 
-    class GetHiveClient implements Callable<HiveMetaStoreClient> {
+    class GetHiveClient implements Callable<IMetaStoreClient> {
       @Override
-      public HiveMetaStoreClient call() throws IOException, MetaException, 
LoginException {
+      public IMetaStoreClient call() throws IOException, MetaException, 
LoginException {
         return cache.get(hiveConf);
       }
     }
 
     ExecutorService executor = Executors.newFixedThreadPool(2);
 
-    Callable<HiveMetaStoreClient> worker1 = new GetHiveClient();
-    Callable<HiveMetaStoreClient> worker2 = new GetHiveClient();
-    Future<HiveMetaStoreClient> clientFuture1 = executor.submit(worker1);
-    Future<HiveMetaStoreClient> clientFuture2 = executor.submit(worker2);
-    HiveMetaStoreClient client1 = clientFuture1.get();
-    HiveMetaStoreClient client2 = clientFuture2.get();
+    Callable<IMetaStoreClient> worker1 = new GetHiveClient();
+    Callable<IMetaStoreClient> worker2 = new GetHiveClient();
+    Future<IMetaStoreClient> clientFuture1 = executor.submit(worker1);
+    Future<IMetaStoreClient> clientFuture2 = executor.submit(worker2);
+    IMetaStoreClient client1 = clientFuture1.get();
+    IMetaStoreClient client2 = clientFuture2.get();
     assertNotNull(client1);
     assertNotNull(client2);
     assertNotSame(client1, client2);
@@ -155,9 +154,9 @@ public class TestHiveClientCache {
   @Test
   public void testCloseAllClients() throws IOException, MetaException, 
LoginException {
     final HiveClientCache cache = new HiveClientCache(1000);
-    HiveClientCache.CacheableHiveMetaStoreClient client1 = 
(HiveClientCache.CacheableHiveMetaStoreClient) cache.get(hiveConf);
+    HiveClientCache.ICacheableMetaStoreClient client1 = cache.get(hiveConf);
     hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, " "); // URIs are checked 
for string equivalence, even spaces make them different
-    HiveClientCache.CacheableHiveMetaStoreClient client2 = 
(HiveClientCache.CacheableHiveMetaStoreClient) cache.get(hiveConf);
+    HiveClientCache.ICacheableMetaStoreClient client2 = cache.get(hiveConf);
     cache.closeAllClientsQuietly();
     assertTrue(client1.isClosed());
     assertTrue(client2.isClosed());

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
index c98d947..f437079 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/HCatMapReduceTest.java
@@ -147,7 +147,7 @@ public abstract class HCatMapReduceTest extends 
HCatBaseTest {
     // Hack to initialize cache with 0 expiry time causing it to return a new 
hive client every time
     // Otherwise the cache doesn't play well with the second test method with 
the client gets closed() in the
     // tearDown() of the previous test
-    HCatUtil.getHiveClient(hiveConf);
+    HCatUtil.getHiveMetastoreClient(hiveConf);
 
     MapCreate.writeCount = 0;
     MapRead.readCount = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
index f8a0af1..735ab5f 100644
--- 
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
+++ 
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/mapreduce/TestPassProperties.java
@@ -109,7 +109,7 @@ public class TestPassProperties {
       new FileOutputCommitterContainer(job, null).cleanupJob(job);
     } catch (Exception e) {
       caughtException = true;
-      assertTrue(e.getMessage().contains(
+      assertTrue(e.getCause().getMessage().contains(
           "Could not connect to meta store using any of the URIs provided"));
     }
     assertTrue(caughtException);

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
 
b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
index 48a40b1..337f4fb 100644
--- 
a/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
+++ 
b/hcatalog/hcatalog-pig-adapter/src/main/java/org/apache/hive/hcatalog/pig/PigHCatUtil.java
@@ -24,7 +24,6 @@ import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Calendar;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -36,7 +35,7 @@ import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -63,7 +62,6 @@ import org.apache.pig.impl.logicalLayer.schema.Schema;
 import org.apache.pig.impl.util.UDFContext;
 import org.apache.pig.impl.util.Utils;
 import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -141,7 +139,7 @@ class PigHCatUtil {
     return job.getConfiguration().get(HCatConstants.HCAT_METASTORE_PRINCIPAL);
   }
 
-  private static HiveMetaStoreClient getHiveMetaClient(String serverUri,
+  private static IMetaStoreClient getHiveMetaClient(String serverUri,
                              String serverKerberosPrincipal,
                              Class<?> clazz,
                              Job job) throws Exception {
@@ -163,7 +161,7 @@ class PigHCatUtil {
     }
 
     try {
-      return HCatUtil.getHiveClient(hiveConf);
+      return HCatUtil.getHiveMetastoreClient(hiveConf);
     } catch (Exception e) {
       throw new Exception("Could not instantiate a HiveMetaStoreClient 
connecting to server uri:[" + serverUri + "]", e);
     }
@@ -203,7 +201,7 @@ class PigHCatUtil {
     String dbName = dbTablePair.first;
     String tableName = dbTablePair.second;
     Table table = null;
-    HiveMetaStoreClient client = null;
+    IMetaStoreClient client = null;
     try {
       client = getHiveMetaClient(hcatServerUri, hcatServerPrincipal, 
PigHCatUtil.class, job);
       table = HCatUtil.getTable(client, dbName, tableName);

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
index 1c85ab5..ed46bca 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java
@@ -24,7 +24,7 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -33,6 +33,7 @@ import org.apache.hadoop.hive.ql.io.RecordUpdater;
 import org.apache.hadoop.hive.serde2.SerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.thrift.TException;
 
 import java.io.IOException;
@@ -46,7 +47,7 @@ abstract class AbstractRecordWriter implements RecordWriter {
   final HiveEndPoint endPoint;
   final Table tbl;
 
-  final HiveMetaStoreClient msClient;
+  final IMetaStoreClient msClient;
   RecordUpdater updater = null;
 
   private final int totalBuckets;
@@ -62,7 +63,7 @@ abstract class AbstractRecordWriter implements RecordWriter {
     this.conf = conf!=null ? conf
                 : HiveEndPoint.createHiveConf(DelimitedInputWriter.class, 
endPoint.metaStoreUri);
     try {
-      msClient = new HiveMetaStoreClient(this.conf);
+      msClient = HCatUtil.getHiveMetastoreClient(this.conf);
       this.tbl = msClient.getTable(endPoint.database, endPoint.table);
       this.partitionPath = getPathForEndPoint(msClient, endPoint);
       this.totalBuckets = tbl.getSd().getNumBuckets();
@@ -80,6 +81,8 @@ abstract class AbstractRecordWriter implements RecordWriter {
       throw new StreamingException(e.getMessage(), e);
     } catch (ClassNotFoundException e) {
       throw new StreamingException(e.getMessage(), e);
+    } catch (IOException e) {
+      throw new StreamingException(e.getMessage(), e);
     }
   }
 
@@ -147,7 +150,7 @@ abstract class AbstractRecordWriter implements RecordWriter 
{
     }
   }
 
-  private Path getPathForEndPoint(HiveMetaStoreClient msClient, HiveEndPoint 
endPoint)
+  private Path getPathForEndPoint(IMetaStoreClient msClient, HiveEndPoint 
endPoint)
           throws StreamingException {
     try {
       String location;

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
index a08f2f9..3c25486 100644
--- 
a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
+++ 
b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/HiveEndPoint.java
@@ -22,7 +22,6 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.cli.CliSessionState;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.LockComponentBuilder;
 import org.apache.hadoop.hive.metastore.LockRequestBuilder;
@@ -40,6 +39,7 @@ import 
org.apache.hadoop.hive.metastore.api.TxnAbortedException;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hive.hcatalog.common.HCatUtil;
 
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.thrift.TException;
@@ -445,10 +445,13 @@ public class HiveEndPoint {
         conf.setBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL,true);
       }
       try {
-        return new HiveMetaStoreClient(conf);
+        return HCatUtil.getHiveMetastoreClient(conf);
       } catch (MetaException e) {
         throw new ConnectionError("Error connecting to Hive Metastore URI: "
-                + endPoint.metaStoreUri, e);
+                + endPoint.metaStoreUri + ". " + e.getMessage(), e);
+      } catch (IOException e) {
+        throw new ConnectionError("Error connecting to Hive Metastore URI: "
+            + endPoint.metaStoreUri + ". " + e.getMessage(), e);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
index 3b2cd38..3a69581 100644
--- 
a/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
+++ 
b/hcatalog/webhcat/java-client/src/main/java/org/apache/hive/hcatalog/api/HCatClientHMSImpl.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.hive.common.ObjectPair;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.common.classification.InterfaceStability;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.MetaStoreUtils;
 import org.apache.hadoop.hive.metastore.TableType;
@@ -85,7 +84,7 @@ import javax.annotation.Nullable;
 public class HCatClientHMSImpl extends HCatClient {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(HCatClientHMSImpl.class);
-  private HiveMetaStoreClient hmsClient;
+  private IMetaStoreClient hmsClient;
   private Configuration config;
   private HiveConf hiveConfig;
 
@@ -96,7 +95,9 @@ public class HCatClientHMSImpl extends HCatClient {
     try {
       dbNames = hmsClient.getDatabases(pattern);
     } catch (MetaException exp) {
-      throw new HCatException("MetaException while listing db names", exp);
+      throw new HCatException("MetaException while listing db names. " + 
exp.getMessage(), exp);
+    } catch (TException e) {
+      throw new HCatException("Transport Exception while listing db names. " + 
e.getMessage(), e);
     }
     return dbNames;
   }
@@ -172,8 +173,12 @@ public class HCatClientHMSImpl extends HCatClient {
     try {
       tableNames = hmsClient.getTables(checkDB(dbName), tablePattern);
     } catch (MetaException e) {
-      throw new HCatException(
-        "MetaException while fetching table names.", e);
+      throw new HCatException("MetaException while fetching table names. " + 
e.getMessage(), e);
+    } catch (UnknownDBException e) {
+      throw new HCatException("UnknownDB " + dbName + " while fetching table 
names.", e);
+    } catch (TException e) {
+      throw new HCatException("Transport exception while fetching table names. 
"
+          + e.getMessage(), e);
     }
     return tableNames;
   }
@@ -815,7 +820,7 @@ public class HCatClientHMSImpl extends HCatClient {
     this.config = conf;
     try {
       hiveConfig = HCatUtil.getHiveConf(config);
-      hmsClient = HCatUtil.getHiveClient(hiveConfig);
+      hmsClient = HCatUtil.getHiveMetastoreClient(hiveConfig);
     } catch (MetaException exp) {
       throw new HCatException("MetaException while creating HMS client",
         exp);

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
index 1b9663d..e3be5b7 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/CompleteDelegator.java
@@ -26,7 +26,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.templeton.tool.DelegationTokenCache;
 import org.apache.hive.hcatalog.templeton.tool.JobState;
@@ -94,13 +94,13 @@ public class CompleteDelegator extends TempletonDelegator {
       return new CompleteBean("Callback sent");
     } finally {
       state.close();
-      HiveMetaStoreClient client = null;
+      IMetaStoreClient client = null;
       try {
         if(cancelMetastoreToken) {
           String metastoreTokenStrForm =
                   
DelegationTokenCache.getStringFormTokenCache().getDelegationToken(id);
           if(metastoreTokenStrForm != null) {
-            client = HCatUtil.getHiveClient(new HiveConf());
+            client = HCatUtil.getHiveMetastoreClient(new HiveConf());
             client.cancelDelegationToken(metastoreTokenStrForm);
             LOG.debug("Cancelled token for jobId=" + id + " status from JT=" + 
jobStatus);
             
DelegationTokenCache.getStringFormTokenCache().removeDelegationToken(id);

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
index 8ae61a1..b4687b5 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/SecureProxySupport.java
@@ -30,12 +30,13 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
+import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.thrift.TException;
 
 /**
@@ -175,8 +176,8 @@ public class SecureProxySupport {
 
   private String buildHcatDelegationToken(String user)
     throws IOException, InterruptedException, MetaException, TException {
-    HiveConf c = new HiveConf();
-    final HiveMetaStoreClient client = new HiveMetaStoreClient(c);
+    final HiveConf c = new HiveConf();
+    final IMetaStoreClient client = HCatUtil.getHiveMetastoreClient(c);
     LOG.info("user: " + user + " loginUser: " + 
UserGroupInformation.getLoginUser().getUserName());
     final TokenWrapper twrapper = new TokenWrapper();
     final UserGroupInformation ugi = UgiFactory.getUgi(user);
@@ -184,7 +185,7 @@ public class SecureProxySupport {
       public String run()
         throws IOException, MetaException, TException {
         String u = ugi.getUserName();
-        return client.getDelegationToken(u);
+        return client.getDelegationToken(c.getUser(), u);
       }
     });
     return s;

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
----------------------------------------------------------------------
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
index 1f8ebf3..349bd5c 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonControllerJob.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hive.common.classification.InterfaceAudience;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.io.NullWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobClient;
@@ -40,6 +40,7 @@ import 
org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIden
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Tool;
+import org.apache.hive.hcatalog.common.HCatUtil;
 import org.apache.hive.hcatalog.templeton.AppConfig;
 import org.apache.hive.hcatalog.templeton.SecureProxySupport;
 import org.apache.hive.hcatalog.templeton.UgiFactory;
@@ -176,12 +177,12 @@ public class TempletonControllerJob extends Configured 
implements Tool, JobSubmi
     return real.doAs(new PrivilegedExceptionAction<String>() {
       @Override
       public String run() throws IOException, TException, InterruptedException 
 {
-        final HiveMetaStoreClient client = new HiveMetaStoreClient(c);
+        final IMetaStoreClient client = HCatUtil.getHiveMetastoreClient(c);
         return ugi.doAs(new PrivilegedExceptionAction<String>() {
           @Override
           public String run() throws IOException, TException, 
InterruptedException {
             String u = ugi.getUserName();
-            return client.getDelegationToken(u);
+            return client.getDelegationToken(c.getUser(),u);
           }
         });
       }

http://git-wip-us.apache.org/repos/asf/hive/blob/45307c10/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 129a98d..341b0ca 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.metastore.api.TxnOpenException;
 import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
 import org.apache.thrift.TException;
 
+import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 
@@ -1115,6 +1116,8 @@ public interface IMetaStoreClient {
    */
   void cancelDelegationToken(String tokenStrForm) throws MetaException, 
TException;
 
+  public String getTokenStrForm() throws IOException;
+
   void createFunction(Function func)
       throws InvalidObjectException, MetaException, TException;
 

Reply via email to