http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
index 5c70b77..8d14b4c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java
@@ -32,9 +32,9 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.client.backoff.ClientBackoffPolicy;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
-import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MasterService;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService;
 
 /** Internal methods on Connection that should not be used by user code. */
 @InterfaceAudience.Private

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
index 9efb33d..f3c0241 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterStatusListener.java
@@ -49,8 +49,8 @@ import org.apache.hadoop.hbase.HBaseInterfaceAudience;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
 import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.ExceptionUtil;
 import org.apache.hadoop.hbase.util.Threads;

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
index 38178b4..391c541 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionImplementation.java
@@ -63,21 +63,22 @@ import 
org.apache.hadoop.hbase.exceptions.RegionMovedException;
 import org.apache.hadoop.hbase.ipc.RpcClient;
 import org.apache.hadoop.hbase.ipc.RpcClientFactory;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.RequestConverter;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.NormalizeResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService.BlockingInterface;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsNormalizerEnabledResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.NormalizeResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningResponse;
 import org.apache.hadoop.hbase.regionserver.RegionServerStoppedException;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -92,9 +93,10 @@ import org.apache.hadoop.ipc.RemoteException;
 import org.apache.zookeeper.KeeperException;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.BlockingRpcChannel;
-import com.google.protobuf.RpcController;
-import com.google.protobuf.ServiceException;
+
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.BlockingRpcChannel;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
 
 /**
  * Main implementation of {@link Connection} and {@link ClusterConnection} 
interfaces.
@@ -914,6 +916,7 @@ class ConnectionImplementation implements 
ClusterConnection, Closeable {
    */
   static class MasterServiceState {
     Connection connection;
+
     MasterProtos.MasterService.BlockingInterface stub;
     int userCount;
 
@@ -1190,21 +1193,19 @@ class ConnectionImplementation implements 
ClusterConnection, Closeable {
   }
 
   @Override
-  public ClientProtos.ClientService.BlockingInterface getClient(final 
ServerName sn)
+  public BlockingInterface getClient(final ServerName sn)
   throws IOException {
     if (isDeadServer(sn)) {
       throw new RegionServerStoppedException(sn + " is dead.");
     }
-    String key = getStubKey(
-      ClientProtos.ClientService.BlockingInterface.class.getName(), 
sn.getHostname(),
+    String key = 
getStubKey(ClientProtos.ClientService.BlockingInterface.class.getName(), 
sn.getHostname(),
       sn.getPort(), this.hostnamesCanChange);
     this.connectionLock.putIfAbsent(key, key);
     ClientProtos.ClientService.BlockingInterface stub = null;
     synchronized (this.connectionLock.get(key)) {
       stub = (ClientProtos.ClientService.BlockingInterface)this.stubs.get(key);
       if (stub == null) {
-        BlockingRpcChannel channel =
-            this.rpcClient.createBlockingRpcChannel(sn, user, rpcTimeout);
+        BlockingRpcChannel channel = 
this.rpcClient.createBlockingRpcChannel(sn, user, rpcTimeout);
         stub = ClientProtos.ClientService.newBlockingStub(channel);
         // In old days, after getting stub/proxy, we'd make a call.  We are 
not doing that here.
         // Just fail on first actual call rather than in here on setup.

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
index df89622..83655f0 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ConnectionUtils.java
@@ -29,8 +29,8 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.UserProvider;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
index c9d2324..3f44836 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FastFailInterceptorContext.java
@@ -22,8 +22,7 @@ import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 
 @InterfaceAudience.Private
-class FastFailInterceptorContext extends
-    RetryingCallerInterceptorContext {
+class FastFailInterceptorContext extends RetryingCallerInterceptorContext {
 
   // The variable that indicates whether we were able to connect with the 
server
   // in the last run
@@ -118,16 +117,16 @@ class FastFailInterceptorContext extends
     tries = 0;
   }
 
-  public FastFailInterceptorContext prepare(RetryingCallableBase callable) {
+  public FastFailInterceptorContext prepare(RetryingCallable<?> callable) {
     return prepare(callable, 0);
   }
 
-  public FastFailInterceptorContext prepare(RetryingCallableBase callable, int 
tries) {
+  public FastFailInterceptorContext prepare(RetryingCallable<?> callable, int 
tries) {
     if (callable instanceof RegionServerCallable) {
-      RegionServerCallable<?> retryingCallable = (RegionServerCallable<?>) 
callable;
+      RegionServerCallable<?, ?> retryingCallable = (RegionServerCallable<?, 
?>) callable;
       server = retryingCallable.getLocation().getServerName();
     }
     this.tries = tries;
     return this;
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
index c612e0f..c878e8d 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/FlushRegionCallable.java
@@ -27,9 +27,9 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.protobuf.RequestConverter;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionResponse;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
index 3b41755..b442e1b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java
@@ -55,7 +55,6 @@ import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.NamespaceNotFoundException;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.ProcedureInfo;
-import org.apache.hadoop.hbase.ProcedureUtil;
 import org.apache.hadoop.hbase.RegionLocations;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableExistsException;
@@ -69,100 +68,104 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.security.SecurityCapability;
 import org.apache.hadoop.hbase.exceptions.TimeoutIOException;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import org.apache.hadoop.hbase.ipc.MasterCoprocessorRpcChannel;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
-import org.apache.hadoop.hbase.ipc.RegionServerCoprocessorRpcChannel;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.RequestConverter;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CompactRegionRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.RollWALWriterResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.StopServerRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
-import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ProcedureDescription;
-import 
org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
-import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AbortProcedureResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AddColumnResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.AssignRegionRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateNamespaceResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteColumnResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DeleteTableResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DisableTableResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.EnableTableResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ExecProcedureResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetClusterStatusRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetProcedureResultResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.GetTableNamesRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsProcedureDoneResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListProceduresRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyColumnResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyNamespaceResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ModifyTableResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.MoveRegionRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.RestoreSnapshotResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetBalancerRunningRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.ShutdownRequest;
-import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.SnapshotResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.StopMasterRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.TruncateTableResponse;
-import 
org.apache.hadoop.hbase.protobuf.generated.MasterProtos.UnassignRegionRequest;
-import org.apache.hadoop.hbase.protobuf.generated.ProcedureProtos;
 import org.apache.hadoop.hbase.quotas.QuotaFilter;
 import org.apache.hadoop.hbase.quotas.QuotaRetriever;
 import org.apache.hadoop.hbase.quotas.QuotaSettings;
 import org.apache.hadoop.hbase.regionserver.wal.FailedLogCloseException;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.FlushRegionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.GetRegionInfoResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.RollWALWriterResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.StopServerRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.UpdateConfigurationRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ProcedureDescription;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AbortProcedureResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AssignRegionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteSnapshotRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DispatchMergingRegionsResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ExecProcedureResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetClusterStatusRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetCompletedSnapshotsRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsInMaintenanceModeResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsProcedureDoneResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsSnapshotDoneResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListProceduresRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableDescriptorsByNamespaceRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListTableNamesByNamespaceRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampForRegionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MajorCompactionTimestampRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyTableResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MoveRegionRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.RestoreSnapshotResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SecurityCapabilitiesRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetNormalizerRunningRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ShutdownRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SnapshotResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.StopMasterRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.UnassignRegionRequest;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ProcedureProtos.ProcedureState;
 import org.apache.hadoop.hbase.snapshot.ClientSnapshotDescriptionUtils;
 import org.apache.hadoop.hbase.snapshot.HBaseSnapshotException;
 import org.apache.hadoop.hbase.snapshot.RestoreSnapshotException;
@@ -172,6 +175,7 @@ import org.apache.hadoop.hbase.util.Addressing;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.ForeignExceptionUtil;
+import org.apache.hadoop.hbase.util.NonceKey;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.zookeeper.MasterAddressTracker;
 import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
@@ -181,7 +185,9 @@ import org.apache.hadoop.util.StringUtils;
 import org.apache.zookeeper.KeeperException;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.ServiceException;
+import com.google.protobuf.Descriptors;
+import com.google.protobuf.Message;
+import com.google.protobuf.RpcController;
 
 /**
  * HBaseAdmin is no longer a client API. It is marked 
InterfaceAudience.Private indicating that
@@ -1035,7 +1041,7 @@ public class HBaseAdmin implements Admin {
     AdminService.BlockingInterface admin = this.connection.getAdmin(sn);
     // Close the region without updating zk state.
     CloseRegionRequest request =
-      RequestConverter.buildCloseRegionRequest(sn, encodedRegionName);
+      ProtobufUtil.buildCloseRegionRequest(sn, encodedRegionName);
     // TODO: There is no timeout on this controller. Set one!
     HBaseRpcController controller = this.rpcControllerFactory.newController();
     try {
@@ -2024,13 +2030,28 @@ public class HBaseAdmin implements Admin {
             getRpcController(), 
ListProceduresRequest.newBuilder().build()).getProcedureList();
         ProcedureInfo[] procInfoList = new ProcedureInfo[procList.size()];
         for (int i = 0; i < procList.size(); i++) {
-          procInfoList[i] = ProcedureUtil.convert(procList.get(i));
+          procInfoList[i] = convert(procList.get(i));
         }
         return procInfoList;
       }
     });
   }
 
+  private static ProcedureInfo convert(final ProcedureProtos.Procedure 
procProto) {
+    NonceKey nonceKey = null;
+    if (procProto.getNonce() != HConstants.NO_NONCE) {
+      nonceKey = new NonceKey(procProto.getNonceGroup(), procProto.getNonce());
+    }
+    org.apache.hadoop.hbase.ProcedureState procedureState =
+        
org.apache.hadoop.hbase.ProcedureState.valueOf(procProto.getState().name());
+    return new ProcedureInfo(procProto.getProcId(), procProto.getClassName(), 
procProto.getOwner(),
+        procedureState, procProto.hasParentId() ? procProto.getParentId() : 
-1, nonceKey,
+            procProto.hasException()?
+                ForeignExceptionUtil.toIOException(procProto.getException()): 
null,
+            procProto.getLastUpdate(), procProto.getStartTime(),
+            procProto.hasResult()? procProto.getResult().toByteArray() : null);
+  }
+
   @Override
   public HTableDescriptor[] listTableDescriptorsByNamespace(final String name) 
throws IOException {
     return executeCallable(new 
MasterCallable<HTableDescriptor[]>(getConnection(),
@@ -2895,8 +2916,36 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
+  // Coprocessor Endpoint against the Master.
   public CoprocessorRpcChannel coprocessorService() {
-    return new MasterCoprocessorRpcChannel(connection);
+    return new SyncCoprocessorRpcChannel() {
+      @Override
+      protected Message callExecService(final RpcController controller,
+          final Descriptors.MethodDescriptor method, final Message request,
+          final Message responsePrototype)
+      throws IOException {
+        if (LOG.isTraceEnabled()) {
+          LOG.trace("Call: " + method.getName() + ", " + request.toString());
+        }
+        // Try-with-resources so close gets called when we are done.
+        try (MasterCallable<CoprocessorServiceResponse> callable =
+            new MasterCallable<CoprocessorServiceResponse>(connection,
+                connection.getRpcControllerFactory()) {
+          @Override
+          protected CoprocessorServiceResponse rpcCall() throws Exception {
+            CoprocessorServiceRequest csr =
+                CoprocessorRpcUtils.getCoprocessorServiceRequest(method, 
request);
+            return this.master.execMasterService(getRpcController(), csr);
+          }
+        };) {
+          // TODO: Are we retrying here? Does not seem so. We should use 
RetryingRpcCaller
+          callable.prepare(false);
+          int operationTimeout = 
connection.getConnectionConfiguration().getOperationTimeout();
+          CoprocessorServiceResponse result = callable.call(operationTimeout);
+          return CoprocessorRpcUtils.getResponse(result, responsePrototype);
+        }
+      }
+    };
   }
 
   /**
@@ -2915,8 +2964,31 @@ public class HBaseAdmin implements Admin {
   }
 
   @Override
-  public CoprocessorRpcChannel coprocessorService(ServerName sn) {
-    return new RegionServerCoprocessorRpcChannel(connection, sn);
+  public CoprocessorRpcChannel coprocessorService(final ServerName serverName) 
{
+    return new SyncCoprocessorRpcChannel() {
+      @Override
+      protected Message callExecService(RpcController controller,
+          Descriptors.MethodDescriptor method, Message request, Message 
responsePrototype)
+      throws IOException {
+        if (LOG.isTraceEnabled()) {
+          LOG.trace("Call: " + method.getName() + ", " + request.toString());
+        }
+        CoprocessorServiceRequest csr =
+            CoprocessorRpcUtils.getCoprocessorServiceRequest(method, request);
+        // TODO: Are we retrying here? Does not seem so. We should use 
RetryingRpcCaller
+        // TODO: Make this same as RegionCoprocessorRpcChannel and 
MasterCoprocessorRpcChannel. They
+        // are all different though should do same thing; e.g. RpcChannel 
setup.
+        ClientProtos.ClientService.BlockingInterface stub = 
connection.getClient(serverName);
+        CoprocessorServiceResponse result;
+        try {
+          result = stub.
+              
execRegionServerService(connection.getRpcControllerFactory().newController(), 
csr);
+          return CoprocessorRpcUtils.getResponse(result, responsePrototype);
+        } catch (ServiceException e) {
+          throw ProtobufUtil.handleRemoteException(e);
+        }
+      }
+    };
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index 1d1db3a..84f8024 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -49,17 +49,17 @@ import 
org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel;
+import org.apache.hadoop.hbase.client.RegionCoprocessorRpcChannel;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.RequestConverter;
-import org.apache.hadoop.hbase.protobuf.ResponseConverter;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateResponse;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateResponse;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.CompareType;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
@@ -421,17 +421,16 @@ public class HTable implements Table {
     }
 
     if (get.getConsistency() == Consistency.STRONG) {
-      // Good old call.
       final Get configuredGet = get;
-      RegionServerCallable<Result> callable = new 
RegionServerCallable<Result>(this.connection,
-          this.rpcControllerFactory, getName(), get.getRow()) {
+      ClientServiceCallable<Result> callable = new 
ClientServiceCallable<Result>(this.connection, getName(),
+          get.getRow(), this.rpcControllerFactory.newController()) {
         @Override
         protected Result rpcCall() throws Exception {
           ClientProtos.GetRequest request = RequestConverter.buildGetRequest(
               getLocation().getRegionInfo().getRegionName(), configuredGet);
-          ClientProtos.GetResponse response = 
getStub().get(getRpcController(), request);
-          if (response == null) return null;
-          return ProtobufUtil.toResult(response.getResult(), 
getRpcControllerCellScanner());
+          ClientProtos.GetResponse response = doGet(request);
+          return response == null? null:
+            ProtobufUtil.toResult(response.getResult(), 
getRpcControllerCellScanner());
         }
       };
       return 
rpcCallerFactory.<Result>newCaller(readRpcTimeout).callWithRetries(callable,
@@ -524,12 +523,12 @@ public class HTable implements Table {
   throws IOException {
     CancellableRegionServerCallable<SingleResponse> callable =
         new CancellableRegionServerCallable<SingleResponse>(
-            connection, getName(), delete.getRow(), this.rpcControllerFactory) 
{
+            connection, getName(), delete.getRow(), 
this.rpcControllerFactory.newController()) {
       @Override
       protected SingleResponse rpcCall() throws Exception {
         MutateRequest request = RequestConverter.buildMutateRequest(
           getLocation().getRegionInfo().getRegionName(), delete);
-        MutateResponse response = getStub().mutate(getRpcController(), 
request);
+        MutateResponse response = doMutate(request);
         return ResponseConverter.getResult(request, response, 
getRpcControllerCellScanner());
       }
     };
@@ -594,7 +593,7 @@ public class HTable implements Table {
   public void mutateRow(final RowMutations rm) throws IOException {
     CancellableRegionServerCallable<MultiResponse> callable =
       new CancellableRegionServerCallable<MultiResponse>(this.connection, 
getName(), rm.getRow(),
-          rpcControllerFactory) {
+          rpcControllerFactory.newController()) {
       @Override
       protected MultiResponse rpcCall() throws Exception {
         RegionAction.Builder regionMutationBuilder = 
RequestConverter.buildRegionAction(
@@ -602,7 +601,7 @@ public class HTable implements Table {
         regionMutationBuilder.setAtomic(true);
         MultiRequest request =
             
MultiRequest.newBuilder().addRegionAction(regionMutationBuilder.build()).build();
-        ClientProtos.MultiResponse response = 
getStub().multi(getRpcController(), request);
+        ClientProtos.MultiResponse response = doMulti(request);
         ClientProtos.RegionActionResult res = 
response.getRegionActionResultList().get(0);
         if (res.hasException()) {
           Throwable ex = ProtobufUtil.toException(res.getException());
@@ -635,17 +634,17 @@ public class HTable implements Table {
   public Result append(final Append append) throws IOException {
     checkHasFamilies(append);
     NoncedRegionServerCallable<Result> callable =
-        new NoncedRegionServerCallable<Result>(this.connection, 
this.rpcControllerFactory,
-            getName(), append.getRow()) {
-          @Override
-          protected Result rpcCall() throws Exception {
-            MutateRequest request = RequestConverter.buildMutateRequest(
-              getLocation().getRegionInfo().getRegionName(), append, 
getNonceGroup(), getNonce());
-            MutateResponse response = getStub().mutate(getRpcController(), 
request);
-            if (!response.hasResult()) return null;
-            return ProtobufUtil.toResult(response.getResult(), 
getRpcControllerCellScanner());
-          }
-        };
+        new NoncedRegionServerCallable<Result>(this.connection, getName(), 
append.getRow(),
+            this.rpcControllerFactory.newController()) {
+      @Override
+      protected Result rpcCall() throws Exception {
+        MutateRequest request = RequestConverter.buildMutateRequest(
+          getLocation().getRegionInfo().getRegionName(), append, 
getNonceGroup(), getNonce());
+        MutateResponse response = doMutate(request);
+        if (!response.hasResult()) return null;
+        return ProtobufUtil.toResult(response.getResult(), 
getRpcControllerCellScanner());
+      }
+    };
     return rpcCallerFactory.<Result> newCaller(this.writeRpcTimeout).
         callWithRetries(callable, this.operationTimeout);
   }
@@ -657,16 +656,16 @@ public class HTable implements Table {
   public Result increment(final Increment increment) throws IOException {
     checkHasFamilies(increment);
     NoncedRegionServerCallable<Result> callable =
-      new NoncedRegionServerCallable<Result>(this.connection,
-      this.rpcControllerFactory, getName(), increment.getRow()) {
-        @Override
-        protected Result rpcCall() throws Exception {
-          MutateRequest request = RequestConverter.buildMutateRequest(
-            getLocation().getRegionInfo().getRegionName(), increment, 
getNonceGroup(), getNonce());
-          MutateResponse response = getStub().mutate(getRpcController(), 
request);
-          // Should this check for null like append does?
-          return ProtobufUtil.toResult(response.getResult(), 
getRpcControllerCellScanner());
-        }
+        new NoncedRegionServerCallable<Result>(this.connection, getName(), 
increment.getRow(),
+            this.rpcControllerFactory.newController()) {
+      @Override
+      protected Result rpcCall() throws Exception {
+        MutateRequest request = RequestConverter.buildMutateRequest(
+          getLocation().getRegionInfo().getRegionName(), increment, 
getNonceGroup(), getNonce());
+        MutateResponse response = doMutate(request);
+        // Should this check for null like append does?
+        return ProtobufUtil.toResult(response.getResult(), 
getRpcControllerCellScanner());
+      }
     };
     return rpcCallerFactory.<Result> 
newCaller(writeRpcTimeout).callWithRetries(callable,
         this.operationTimeout);
@@ -703,14 +702,14 @@ public class HTable implements Table {
     }
 
     NoncedRegionServerCallable<Long> callable =
-        new NoncedRegionServerCallable<Long>(this.connection, 
this.rpcControllerFactory, getName(),
-            row) {
+        new NoncedRegionServerCallable<Long>(this.connection, getName(), row,
+            this.rpcControllerFactory.newController()) {
       @Override
       protected Long rpcCall() throws Exception {
         MutateRequest request = RequestConverter.buildIncrementRequest(
-            getLocation().getRegionInfo().getRegionName(), row, family,
-            qualifier, amount, durability, getNonceGroup(), getNonce());
-        MutateResponse response = getStub().mutate(getRpcController(), 
request);
+          getLocation().getRegionInfo().getRegionName(), row, family,
+          qualifier, amount, durability, getNonceGroup(), getNonce());
+        MutateResponse response = doMutate(request);
         Result result = ProtobufUtil.toResult(response.getResult(), 
getRpcControllerCellScanner());
         return Long.valueOf(Bytes.toLong(result.getValue(family, qualifier)));
       }
@@ -727,7 +726,19 @@ public class HTable implements Table {
       final byte [] family, final byte [] qualifier, final byte [] value,
       final Put put)
   throws IOException {
-    return checkAndPut(row, family, qualifier, CompareOp.EQUAL, value, put);
+    ClientServiceCallable<Boolean> callable = new 
ClientServiceCallable<Boolean>(this.connection, getName(), row,
+        this.rpcControllerFactory.newController()) {
+      @Override
+      protected Boolean rpcCall() throws Exception {
+        MutateRequest request = RequestConverter.buildMutateRequest(
+          getLocation().getRegionInfo().getRegionName(), row, family, 
qualifier,
+          new BinaryComparator(value), CompareType.EQUAL, put);
+        MutateResponse response = doMutate(request);
+        return Boolean.valueOf(response.getProcessed());
+      }
+    };
+    return rpcCallerFactory.<Boolean> newCaller(this.writeRpcTimeout).
+        callWithRetries(callable, this.operationTimeout);
   }
 
   /**
@@ -738,16 +749,16 @@ public class HTable implements Table {
       final byte [] qualifier, final CompareOp compareOp, final byte [] value,
       final Put put)
   throws IOException {
-    RegionServerCallable<Boolean> callable =
-        new RegionServerCallable<Boolean>(this.connection, 
this.rpcControllerFactory,
-            getName(), row) {
+    ClientServiceCallable<Boolean> callable =
+        new ClientServiceCallable<Boolean>(this.connection, getName(), row,
+            this.rpcControllerFactory.newController()) {
       @Override
       protected Boolean rpcCall() throws Exception {
         CompareType compareType = CompareType.valueOf(compareOp.name());
         MutateRequest request = RequestConverter.buildMutateRequest(
           getLocation().getRegionInfo().getRegionName(), row, family, 
qualifier,
           new BinaryComparator(value), compareType, put);
-        MutateResponse response = getStub().mutate(getRpcController(), 
request);
+        MutateResponse response = doMutate(request);
         return Boolean.valueOf(response.getProcessed());
       }
     };
@@ -762,7 +773,19 @@ public class HTable implements Table {
   public boolean checkAndDelete(final byte [] row, final byte [] family, final 
byte [] qualifier,
       final byte [] value, final Delete delete)
   throws IOException {
-    return checkAndDelete(row, family, qualifier, CompareOp.EQUAL, value, 
delete);
+    ClientServiceCallable<Boolean> callable = new 
ClientServiceCallable<Boolean>(this.connection, getName(), row,
+        this.rpcControllerFactory.newController()) {
+      @Override
+      protected Boolean rpcCall() throws Exception {
+        MutateRequest request = RequestConverter.buildMutateRequest(
+          getLocation().getRegionInfo().getRegionName(), row, family, 
qualifier,
+          new BinaryComparator(value), CompareType.EQUAL, delete);
+        MutateResponse response = doMutate(request);
+        return Boolean.valueOf(response.getProcessed());
+      }
+    };
+    return rpcCallerFactory.<Boolean> newCaller(this.writeRpcTimeout).
+        callWithRetries(callable, this.operationTimeout);
   }
 
   /**
@@ -775,14 +798,14 @@ public class HTable implements Table {
   throws IOException {
     CancellableRegionServerCallable<SingleResponse> callable =
         new CancellableRegionServerCallable<SingleResponse>(
-            this.connection, getName(), row, this.rpcControllerFactory) {
+            this.connection, getName(), row, 
this.rpcControllerFactory.newController()) {
       @Override
       protected SingleResponse rpcCall() throws Exception {
         CompareType compareType = CompareType.valueOf(compareOp.name());
         MutateRequest request = RequestConverter.buildMutateRequest(
           getLocation().getRegionInfo().getRegionName(), row, family, 
qualifier,
           new BinaryComparator(value), compareType, delete);
-        MutateResponse response = getStub().mutate(getRpcController(), 
request);
+        MutateResponse response = doMutate(request);
         return ResponseConverter.getResult(request, response, 
getRpcControllerCellScanner());
       }
     };
@@ -808,14 +831,14 @@ public class HTable implements Table {
     throws IOException {
     CancellableRegionServerCallable<MultiResponse> callable =
       new CancellableRegionServerCallable<MultiResponse>(connection, 
getName(), rm.getRow(),
-        rpcControllerFactory) {
+        rpcControllerFactory.newController()) {
         @Override
         protected MultiResponse rpcCall() throws Exception {
           CompareType compareType = CompareType.valueOf(compareOp.name());
           MultiRequest request = RequestConverter.buildMutateRequest(
             getLocation().getRegionInfo().getRegionName(), row, family, 
qualifier,
             new BinaryComparator(value), compareType, rm);
-          ClientProtos.MultiResponse response = 
getStub().multi(getRpcController(), request);
+          ClientProtos.MultiResponse response = doMulti(request);
           ClientProtos.RegionActionResult res = 
response.getRegionActionResultList().get(0);
           if (res.hasException()) {
             Throwable ex = ProtobufUtil.toException(res.getException());
@@ -1044,10 +1067,8 @@ public class HTable implements Table {
   public <T extends Service, R> void coprocessorService(final Class<T> service,
       byte[] startKey, byte[] endKey, final Batch.Call<T,R> callable,
       final Batch.Callback<R> callback) throws ServiceException, Throwable {
-
     // get regions covered by the row range
     List<byte[]> keys = getStartKeysInRange(startKey, endKey);
-
     Map<byte[],Future<R>> futures =
         new TreeMap<byte[],Future<R>>(Bytes.BYTES_COMPARATOR);
     for (final byte[] r : keys) {
@@ -1056,7 +1077,8 @@ public class HTable implements Table {
       Future<R> future = pool.submit(new Callable<R>() {
         @Override
         public R call() throws Exception {
-          T instance = ProtobufUtil.newServiceStub(service, channel);
+          T instance =
+              
org.apache.hadoop.hbase.protobuf.ProtobufUtil.newServiceStub(service, channel);
           R result = callable.call(instance);
           byte[] region = channel.getLastRegion();
           if (callback != null) {
@@ -1184,7 +1206,8 @@ public class HTable implements Table {
       return;
     }
 
-    List<RegionCoprocessorServiceExec> execs = new 
ArrayList<RegionCoprocessorServiceExec>();
+    List<RegionCoprocessorServiceExec> execs =
+        new ArrayList<RegionCoprocessorServiceExec>(keys.size());
     final Map<byte[], RegionCoprocessorServiceExec> execsByRow =
         new TreeMap<byte[], 
RegionCoprocessorServiceExec>(Bytes.BYTES_COMPARATOR);
     for (int i = 0; i < keys.size(); i++) {
@@ -1221,7 +1244,8 @@ public class HTable implements Table {
             }
             try {
               Message.Builder builder = responsePrototype.newBuilderForType();
-              ProtobufUtil.mergeFrom(builder, 
serviceResult.getValue().getValue());
+              org.apache.hadoop.hbase.protobuf.ProtobufUtil.mergeFrom(builder,
+                  serviceResult.getValue().getValue().toByteArray());
               callback.update(region, row, (R) builder.build());
             } catch (IOException e) {
               LOG.error("Unexpected response type from endpoint " + 
methodDescriptor.getFullName(),
@@ -1259,4 +1283,4 @@ public class HTable implements Table {
     }
     return mutator;
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
index 5db0546..0b24bcd 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCallable.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
index 47693f4..1dc3a47 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterKeepAliveConnection.java
@@ -20,7 +20,7 @@
 
 package org.apache.hadoop.hbase.client;
 
-import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos;
 
 /**
  * A KeepAlive connection is not physically closed immediately after the close,

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
index 4fa20e6..22a5561 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MetricsConnection.java
@@ -18,8 +18,8 @@
 package org.apache.hadoop.hbase.client;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.protobuf.Descriptors.MethodDescriptor;
-import com.google.protobuf.Message;
+import 
org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.MethodDescriptor;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.Message;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Histogram;
 import com.codahale.metrics.MetricRegistry;
@@ -28,9 +28,9 @@ import com.codahale.metrics.JmxReporter;
 import com.codahale.metrics.RatioGauge;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ClientService;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutateRequest;
-import 
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto.MutationType;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.ClientService;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutateRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto.MutationType;
 import org.apache.hadoop.hbase.util.Bytes;
 
 import java.util.concurrent.ConcurrentHashMap;

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
index 18376f4..937e1b5 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
@@ -24,7 +24,7 @@ import java.util.Map;
 import java.util.TreeMap;
 
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
index 3ef97e78..6067ef0 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiServerCallable.java
@@ -31,15 +31,14 @@ import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.ipc.HBaseRpcController;
-import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.protobuf.RequestConverter;
-import org.apache.hadoop.hbase.protobuf.ResponseConverter;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MutationProto;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.RegionAction;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
+import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MultiRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.MutationProto;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.RegionAction;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+import org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcController;
 
 import com.google.common.annotations.VisibleForTesting;
 
@@ -55,8 +54,8 @@ class MultiServerCallable<R> extends 
CancellableRegionServerCallable<MultiRespon
   private final boolean cellBlock;
 
   MultiServerCallable(final ClusterConnection connection, final TableName 
tableName,
-      final ServerName location, RpcControllerFactory rpcFactory, final 
MultiAction<R> multi) {
-    super(connection, tableName, null, rpcFactory);
+      final ServerName location, final MultiAction<R> multi, RpcController 
rpcController) {
+    super(connection, tableName, null, rpcController);
     this.multiAction = multi;
     // RegionServerCallable has HRegionLocation field, but this is a 
multi-region request.
     // Using region info from parent HRegionLocation would be a mistake for 
this class; so

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
index 06e0224..42c159e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hbase.client;
 
+import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -41,7 +42,9 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.io.HeapSize;
 import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.ClientProtos;
 import org.apache.hadoop.hbase.security.access.AccessControlConstants;
+import org.apache.hadoop.hbase.security.access.AccessControlUtil;
 import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.security.visibility.CellVisibility;
 import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
@@ -332,8 +335,8 @@ public abstract class Mutation extends 
OperationWithAttributes implements Row, C
    * @param expression
    */
   public Mutation setCellVisibility(CellVisibility expression) {
-    this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY, 
ProtobufUtil
-        .toCellVisibility(expression).toByteArray());
+    this.setAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY,
+        toCellVisibility(expression).toByteArray());
     return this;
   }
 
@@ -344,7 +347,50 @@ public abstract class Mutation extends 
OperationWithAttributes implements Row, C
   public CellVisibility getCellVisibility() throws DeserializationException {
     byte[] cellVisibilityBytes = 
this.getAttribute(VisibilityConstants.VISIBILITY_LABELS_ATTR_KEY);
     if (cellVisibilityBytes == null) return null;
-    return ProtobufUtil.toCellVisibility(cellVisibilityBytes);
+    return toCellVisibility(cellVisibilityBytes);
+  }
+
+  /**
+   * Create a protocol buffer CellVisibility based on a client CellVisibility.
+   *
+   * @param cellVisibility
+   * @return a protocol buffer CellVisibility
+   */
+  static ClientProtos.CellVisibility toCellVisibility(CellVisibility 
cellVisibility) {
+    ClientProtos.CellVisibility.Builder builder = 
ClientProtos.CellVisibility.newBuilder();
+    builder.setExpression(cellVisibility.getExpression());
+    return builder.build();
+  }
+
+  /**
+   * Convert a protocol buffer CellVisibility to a client CellVisibility
+   *
+   * @param proto
+   * @return the converted client CellVisibility
+   */
+  private static CellVisibility toCellVisibility(ClientProtos.CellVisibility 
proto) {
+    if (proto == null) return null;
+    return new CellVisibility(proto.getExpression());
+  }
+
+  /**
+   * Convert a protocol buffer CellVisibility bytes to a client CellVisibility
+   *
+   * @param protoBytes
+   * @return the converted client CellVisibility
+   * @throws DeserializationException
+   */
+  private static CellVisibility toCellVisibility(byte[] protoBytes) throws 
DeserializationException {
+    if (protoBytes == null) return null;
+    ClientProtos.CellVisibility.Builder builder = 
ClientProtos.CellVisibility.newBuilder();
+    ClientProtos.CellVisibility proto = null;
+    try {
+      ProtobufUtil.mergeFrom(builder, protoBytes);
+      proto = builder.build();
+    } catch (IOException e) {
+      throw new DeserializationException(e);
+    }
+    return toCellVisibility(proto);
   }
 
   /**
@@ -413,7 +459,7 @@ public abstract class Mutation extends 
OperationWithAttributes implements Row, C
    */
   public Mutation setACL(String user, Permission perms) {
     setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
-      ProtobufUtil.toUsersAndPermissions(user, perms).toByteArray());
+      AccessControlUtil.toUsersAndPermissions(user, perms).toByteArray());
     return this;
   }
 
@@ -426,7 +472,7 @@ public abstract class Mutation extends 
OperationWithAttributes implements Row, C
       permMap.put(entry.getKey(), entry.getValue());
     }
     setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
-      ProtobufUtil.toUsersAndPermissions(permMap).toByteArray());
+      AccessControlUtil.toUsersAndPermissions(permMap).toByteArray());
     return this;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
index f3f9168..3832e32 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryableCallerInterceptor.java
@@ -25,9 +25,9 @@ import 
org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException;
 
 /**
  * Class that acts as a NoOpInterceptor. This class is used in case the
- * {@link RetryingCallerInterceptor} was not configured correctly or an
- * {@link RetryingCallerInterceptor} was never configured in the first place.
- * 
+ * RetryingCallerInterceptor was not configured correctly or an
+ * RetryingCallerInterceptor was never configured in the first place.
+ *
  */
 @InterfaceAudience.Private
 class NoOpRetryableCallerInterceptor extends RetryingCallerInterceptor {
@@ -65,4 +65,4 @@ class NoOpRetryableCallerInterceptor extends 
RetryingCallerInterceptor {
   public String toString() {
     return "NoOpRetryableCallerInterceptor";
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryingInterceptorContext.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryingInterceptorContext.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryingInterceptorContext.java
index f8542bd..59ca6b0 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryingInterceptorContext.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoOpRetryingInterceptorContext.java
@@ -28,17 +28,14 @@ class NoOpRetryingInterceptorContext extends 
RetryingCallerInterceptorContext {
   }
 
   @Override
-  public RetryingCallerInterceptorContext prepare(
-      RetryingCallableBase callable) {
+  public RetryingCallerInterceptorContext prepare(RetryingCallable<?> 
callable) {
     // Do Nothing
     return this;
   }
 
   @Override
-  public RetryingCallerInterceptorContext prepare(
-      RetryingCallableBase callable, int tries) {
+  public RetryingCallerInterceptorContext prepare(RetryingCallable<?> 
callable, int tries) {
     // Do Nothing
     return this;
   }
-
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.java
index 7c01e21..aff0205 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/NoncedRegionServerCallable.java
@@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.client;
 
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
-import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
+import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 
 /**
  * Implementations make an rpc call against a RegionService via a protobuf 
Service.
@@ -46,10 +46,9 @@ public abstract class NoncedRegionServerCallable<T> extends 
CancellableRegionSer
    * @param tableName Table name to which <code>row</code> belongs.
    * @param row The row we want in <code>tableName</code>.
    */
-  public NoncedRegionServerCallable(Connection connection,
-                                    RpcControllerFactory rpcControllerFactory,
-                                    TableName tableName, byte [] row) {
-    super(connection, tableName, row, rpcControllerFactory);
+  public NoncedRegionServerCallable(Connection connection, TableName 
tableName, byte [] row,
+      HBaseRpcController rpcController) {
+    super(connection, tableName, row, rpcController);
     this.nonce = getConnection().getNonceGenerator().newNonce();
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
index 53dd2c1..d6befb7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
@@ -25,11 +25,12 @@ import 
org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.filter.Filter;
 import org.apache.hadoop.hbase.io.TimeRange;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.security.access.AccessControlConstants;
+import org.apache.hadoop.hbase.security.access.AccessControlUtil;
 import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.security.visibility.Authorizations;
 import org.apache.hadoop.hbase.security.visibility.VisibilityConstants;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 
 import com.google.common.collect.ArrayListMultimap;
 import com.google.common.collect.ListMultimap;
@@ -96,7 +97,7 @@ public abstract class Query extends OperationWithAttributes {
    */
   public Query setACL(String user, Permission perms) {
     setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
-      ProtobufUtil.toUsersAndPermissions(user, perms).toByteArray());
+      AccessControlUtil.toUsersAndPermissions(user, perms).toByteArray());
     return this;
   }
 
@@ -109,7 +110,7 @@ public abstract class Query extends OperationWithAttributes 
{
       permMap.put(entry.getKey(), entry.getValue());
     }
     setAttribute(AccessControlConstants.OP_ATTRIBUTE_ACL,
-      ProtobufUtil.toUsersAndPermissions(permMap).toByteArray());
+        AccessControlUtil.toUsersAndPermissions(permMap).toByteArray());
     return this;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java
index 9b3f6ef..6846562 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionAdminServiceCallable.java
@@ -29,8 +29,8 @@ import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.ipc.HBaseRpcController;
 import org.apache.hadoop.hbase.ipc.RpcControllerFactory;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.AdminService;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService;
 import org.apache.hadoop.hbase.util.Bytes;
 
 /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
new file mode 100644
index 0000000..2dc73e0
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorRpcChannel.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.client;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.classification.InterfaceAudience;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
+import 
org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
+
+import com.google.protobuf.Descriptors;
+import com.google.protobuf.Message;
+import com.google.protobuf.RpcController;
+
+/**
+ * Provides clients with an RPC connection to call Coprocessor Endpoint
+ * {@link com.google.protobuf.Service}s
+ * against a given table region.  An instance of this class may be obtained
+ * by calling {@link 
org.apache.hadoop.hbase.client.Table#coprocessorService(byte[])},
+ * but should normally only be used in creating a new {@link 
com.google.protobuf.Service} stub to
+ * call the endpoint methods.
+ * @see org.apache.hadoop.hbase.client.Table#coprocessorService(byte[])
+ */
+@InterfaceAudience.Private
+class RegionCoprocessorRpcChannel extends SyncCoprocessorRpcChannel {
+  private static final Log LOG = 
LogFactory.getLog(RegionCoprocessorRpcChannel.class);
+  private final TableName table;
+  private final byte [] row;
+  private final ClusterConnection conn;
+  private byte[] lastRegion;
+  private final int operationTimeout;
+  private final RpcRetryingCallerFactory rpcCallerFactory;
+
+  /**
+   * Constructor
+   * @param conn connection to use
+   * @param table to connect to
+   * @param row to locate region with
+   */
+  RegionCoprocessorRpcChannel(ClusterConnection conn, TableName table, byte[] 
row) {
+    this.table = table;
+    this.row = row;
+    this.conn = conn;
+    this.operationTimeout = 
conn.getConnectionConfiguration().getOperationTimeout();
+    this.rpcCallerFactory = conn.getRpcRetryingCallerFactory();
+  }
+
+  @Override
+  protected Message callExecService(final RpcController controller,
+      final Descriptors.MethodDescriptor method, final Message request,
+      final Message responsePrototype)
+  throws IOException {
+    if (LOG.isTraceEnabled()) {
+      LOG.trace("Call: " + method.getName() + ", " + request.toString());
+    }
+    if (row == null) {
+      throw new NullPointerException("Can't be null!");
+    }
+    ClientServiceCallable<CoprocessorServiceResponse> callable =
+      new ClientServiceCallable<CoprocessorServiceResponse>(this.conn,
+              this.table, this.row, 
this.conn.getRpcControllerFactory().newController()) {
+      @Override
+      protected CoprocessorServiceResponse rpcCall() throws Exception {
+        byte [] regionName = getLocation().getRegionInfo().getRegionName();
+        CoprocessorServiceRequest csr =
+            CoprocessorRpcUtils.getCoprocessorServiceRequest(method, request, 
row, regionName);
+        return getStub().execService(getRpcController(), csr);
+      }
+    };
+    CoprocessorServiceResponse result =
+        this.rpcCallerFactory.<CoprocessorServiceResponse> 
newCaller().callWithRetries(callable,
+            operationTimeout);
+    this.lastRegion = result.getRegion().getValue().toByteArray();
+    return CoprocessorRpcUtils.getResponse(result, responsePrototype);
+  }
+
+  /**
+   * Get last region this RpcChannel communicated with
+   * @return region name as byte array
+   */
+  public byte[] getLastRegion() {
+    return lastRegion;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hbase/blob/17d4b70d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
----------------------------------------------------------------------
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
index ad1d2a1..d53e12d 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
@@ -30,9 +30,9 @@ import com.google.protobuf.Message;
 /**
  * Represents a coprocessor service method execution against a single region.  
While coprocessor
  * service calls are performed against a region, this class implements {@link 
Row} in order to
- * make use of the {@link AsyncProcess} framework for batching multi-region 
calls per region server.
+ * make use of the AsyncProcess framework for batching multi-region calls per 
region server.
  *
- * <p><b>Note:</b> This class should not be instantiated directly.  Use 
+ * <p><b>Note:</b> This class should not be instantiated directly.  Use
  * HTable#batchCoprocessorService instead.</p>
  */
 @InterfaceAudience.Private

Reply via email to