http://git-wip-us.apache.org/repos/asf/hbase/blob/7357b0ce/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
----------------------------------------------------------------------
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
index fbcbb54..3bec203 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestCoprocessorTableEndpoint.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -47,7 +47,6 @@ import org.junit.rules.TestName;
 
 @Category({CoprocessorTests.class, MediumTests.class})
 public class TestCoprocessorTableEndpoint {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestCoprocessorTableEndpoint.class);
@@ -81,7 +80,7 @@ public class TestCoprocessorTableEndpoint {
 
     HTableDescriptor desc = new HTableDescriptor(tableName);
     desc.addFamily(new HColumnDescriptor(TEST_FAMILY));
-    
desc.addCoprocessor(org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName());
+    desc.addCoprocessor(ColumnAggregationEndpoint.class.getName());
 
     createTable(desc);
     verifyTable(tableName);
@@ -96,7 +95,7 @@ public class TestCoprocessorTableEndpoint {
 
     createTable(desc);
 
-    
desc.addCoprocessor(org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName());
+    desc.addCoprocessor(ColumnAggregationEndpoint.class.getName());
     updateTable(desc);
 
     verifyTable(tableName);
@@ -113,24 +112,24 @@ public class TestCoprocessorTableEndpoint {
   private static Map<byte [], Long> sum(final Table table, final byte [] 
family,
     final byte [] qualifier, final byte [] start, final byte [] end)
       throws ServiceException, Throwable {
-  return 
table.coprocessorService(ColumnAggregationProtos.ColumnAggregationService.class,
+    return 
table.coprocessorService(ColumnAggregationProtos.ColumnAggregationService.class,
       start, end,
-    new Batch.Call<ColumnAggregationProtos.ColumnAggregationService, Long>() {
-      @Override
-      public Long call(ColumnAggregationProtos.ColumnAggregationService 
instance)
-      throws IOException {
-        
CoprocessorRpcUtils.BlockingRpcCallback<ColumnAggregationProtos.SumResponse> 
rpcCallback =
-            new CoprocessorRpcUtils.BlockingRpcCallback<>();
-        ColumnAggregationProtos.SumRequest.Builder builder =
-          ColumnAggregationProtos.SumRequest.newBuilder();
-        builder.setFamily(ByteString.copyFrom(family));
-        if (qualifier != null && qualifier.length > 0) {
-          builder.setQualifier(ByteString.copyFrom(qualifier));
+      new Batch.Call<ColumnAggregationProtos.ColumnAggregationService, Long>() 
{
+        @Override
+        public Long call(ColumnAggregationProtos.ColumnAggregationService 
instance)
+          throws IOException {
+          
CoprocessorRpcUtils.BlockingRpcCallback<ColumnAggregationProtos.SumResponse> 
rpcCallback =
+              new CoprocessorRpcUtils.BlockingRpcCallback<>();
+          ColumnAggregationProtos.SumRequest.Builder builder =
+            ColumnAggregationProtos.SumRequest.newBuilder();
+          builder.setFamily(ByteString.copyFrom(family));
+          if (qualifier != null && qualifier.length > 0) {
+            builder.setQualifier(ByteString.copyFrom(qualifier));
+          }
+          instance.sum(null, builder.build(), rpcCallback);
+          return rpcCallback.get().getSum();
         }
-        instance.sum(null, builder.build(), rpcCallback);
-        return rpcCallback.get().getSum();
-      }
-    });
+      });
   }
 
   private static final void createTable(HTableDescriptor desc) throws 
Exception {

http://git-wip-us.apache.org/repos/asf/hbase/blob/7357b0ce/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
----------------------------------------------------------------------
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
index 01e5b59..e788e5d 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -84,7 +84,6 @@ import org.slf4j.LoggerFactory;
  */
 @Category({CoprocessorTests.class, MediumTests.class})
 public class TestRowProcessorEndpoint {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestRowProcessorEndpoint.class);
@@ -219,8 +218,7 @@ public class TestRowProcessorEndpoint {
     return result;
   }
 
-  private void concurrentExec(
-      final Runnable task, final int numThreads) throws Throwable {
+  private void concurrentExec(final Runnable task, final int numThreads) 
throws Throwable {
     startSignal = new CountDownLatch(numThreads);
     doneSignal = new CountDownLatch(numThreads);
     for (int i = 0; i < numThreads; ++i) {
@@ -313,10 +311,10 @@ public class TestRowProcessorEndpoint {
    * So they can be loaded with the endpoint on the coprocessor.
    */
   public static class RowProcessorEndpoint<S extends Message,T extends Message>
-  extends BaseRowProcessorEndpoint<S,T> {
+          extends BaseRowProcessorEndpoint<S,T> {
     public static class IncrementCounterProcessor extends
-        
BaseRowProcessor<IncrementCounterProcessorTestProtos.IncCounterProcessorRequest,
-        IncrementCounterProcessorTestProtos.IncCounterProcessorResponse> {
+            
BaseRowProcessor<IncrementCounterProcessorTestProtos.IncCounterProcessorRequest,
+                    
IncrementCounterProcessorTestProtos.IncCounterProcessorResponse> {
       int counter = 0;
       byte[] row = new byte[0];
 
@@ -397,7 +395,7 @@ public class TestRowProcessorEndpoint {
     }
 
     public static class FriendsOfFriendsProcessor extends
-        BaseRowProcessor<FriendsOfFriendsProcessorRequest, 
FriendsOfFriendsProcessorResponse> {
+            BaseRowProcessor<FriendsOfFriendsProcessorRequest, 
FriendsOfFriendsProcessorResponse> {
       byte[] row = null;
       byte[] person = null;
       final Set<String> result = new HashSet<>();
@@ -482,7 +480,7 @@ public class TestRowProcessorEndpoint {
     }
 
     public static class RowSwapProcessor extends
-        BaseRowProcessor<RowSwapProcessorRequest, RowSwapProcessorResponse> {
+            BaseRowProcessor<RowSwapProcessorRequest, 
RowSwapProcessorResponse> {
       byte[] row1 = new byte[0];
       byte[] row2 = new byte[0];
 
@@ -586,8 +584,7 @@ public class TestRowProcessorEndpoint {
     }
 
     public static class TimeoutProcessor extends
-        BaseRowProcessor<TimeoutProcessorRequest, TimeoutProcessorResponse> {
-
+            BaseRowProcessor<TimeoutProcessorRequest, 
TimeoutProcessorResponse> {
       byte[] row = new byte[0];
 
       /**
@@ -643,8 +640,7 @@ public class TestRowProcessorEndpoint {
       }
     }
 
-    public static void doScan(
-        HRegion region, Scan scan, List<Cell> result) throws IOException {
+    public static void doScan(HRegion region, Scan scan, List<Cell> result) 
throws IOException {
       InternalScanner scanner = null;
       try {
         scan.setIsolationLevel(IsolationLevel.READ_UNCOMMITTED);
@@ -652,7 +648,9 @@ public class TestRowProcessorEndpoint {
         result.clear();
         scanner.next(result);
       } finally {
-        if (scanner != null) scanner.close();
+        if (scanner != null) {
+          scanner.close();
+        }
       }
     }
   }
@@ -676,5 +674,4 @@ public class TestRowProcessorEndpoint {
     out.append("]");
     return out.toString();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/7357b0ce/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
----------------------------------------------------------------------
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index 38c3081..21f17f7 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -87,7 +87,6 @@ import org.slf4j.LoggerFactory;
 
 @Category({MediumTests.class})
 public class TestSecureExport {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestSecureExport.class);
@@ -146,12 +145,16 @@ public class TestSecureExport {
       USER_XO + "/" + LOCALHOST,
       USER_NONE + "/" + LOCALHOST);
   }
+
   private static User getUserByLogin(final String user) throws IOException {
-    return 
User.create(UserGroupInformation.loginUserFromKeytabAndReturnUGI(getPrinciple(user),
 KEYTAB_FILE.getAbsolutePath()));
+    return User.create(UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        getPrinciple(user), KEYTAB_FILE.getAbsolutePath()));
   }
+
   private static String getPrinciple(final String user) {
     return user + "/" + LOCALHOST + "@" + KDC.getRealm();
   }
+
   private static void setUpClusterKdc() throws Exception {
     HBaseKerberosUtils.setKeytabFileForTesting(KEYTAB_FILE.getAbsolutePath());
     HBaseKerberosUtils.setPrincipalForTesting(SERVER_PRINCIPAL + "@" + 
KDC.getRealm());
@@ -160,30 +163,42 @@ public class TestSecureExport {
     // the following key should be changed.
     // 1) DFS_NAMENODE_USER_NAME_KEY -> DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY
     // 2) DFS_DATANODE_USER_NAME_KEY -> DFS_DATANODE_KERBEROS_PRINCIPAL_KEY
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, 
SERVER_PRINCIPAL + "@" + KDC.getRealm());
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY, 
SERVER_PRINCIPAL + "@" + KDC.getRealm());
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, 
KEYTAB_FILE.getAbsolutePath());
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, 
KEYTAB_FILE.getAbsolutePath());
+    UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY,
+        SERVER_PRINCIPAL + "@" + KDC.getRealm());
+    UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY,
+        SERVER_PRINCIPAL + "@" + KDC.getRealm());
+    UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
+        KEYTAB_FILE.getAbsolutePath());
+    UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY,
+        KEYTAB_FILE.getAbsolutePath());
     // set yarn principal
-    UTIL.getConfiguration().set(YarnConfiguration.RM_PRINCIPAL, 
SERVER_PRINCIPAL + "@" + KDC.getRealm());
-    UTIL.getConfiguration().set(YarnConfiguration.NM_PRINCIPAL, 
SERVER_PRINCIPAL + "@" + KDC.getRealm());
-    
UTIL.getConfiguration().set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
 HTTP_PRINCIPAL + "@" + KDC.getRealm());
+    UTIL.getConfiguration().set(YarnConfiguration.RM_PRINCIPAL,
+        SERVER_PRINCIPAL + "@" + KDC.getRealm());
+    UTIL.getConfiguration().set(YarnConfiguration.NM_PRINCIPAL,
+        SERVER_PRINCIPAL + "@" + KDC.getRealm());
+    
UTIL.getConfiguration().set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+        HTTP_PRINCIPAL + "@" + KDC.getRealm());
     
UTIL.getConfiguration().setBoolean(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY,
 true);
-    UTIL.getConfiguration().set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, 
HttpConfig.Policy.HTTPS_ONLY.name());
+    UTIL.getConfiguration().set(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
+        HttpConfig.Policy.HTTPS_ONLY.name());
     UTIL.getConfiguration().set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, 
LOCALHOST + ":0");
     UTIL.getConfiguration().set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, 
LOCALHOST + ":0");
 
     File keystoresDir = new 
File(UTIL.getDataTestDir("keystore").toUri().getPath());
     keystoresDir.mkdirs();
     String sslConfDir = 
KeyStoreTestUtil.getClasspathDir(TestSecureExport.class);
-    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), 
sslConfDir, UTIL.getConfiguration(), false);
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir.getAbsolutePath(), sslConfDir,
+        UTIL.getConfiguration(), false);
 
     UTIL.getConfiguration().setBoolean("ignore.secure.ports.for.testing", 
true);
     UserGroupInformation.setConfiguration(UTIL.getConfiguration());
-    UTIL.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, 
UTIL.getConfiguration().get(
-      CoprocessorHost.REGION_COPROCESSOR_CONF_KEY) + "," + 
Export.class.getName());
+    UTIL.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY,
+        UTIL.getConfiguration().get(
+            CoprocessorHost.REGION_COPROCESSOR_CONF_KEY) + "," + 
Export.class.getName());
   }
-  private static void addLabels(final Configuration conf, final List<String> 
users, final List<String> labels) throws Exception {
+
+  private static void addLabels(final Configuration conf, final List<String> 
users,
+      final List<String> labels) throws Exception {
     PrivilegedExceptionAction<VisibilityLabelsProtos.VisibilityLabelsResponse> 
action
       = () -> {
         try (Connection conn = ConnectionFactory.createConnection(conf)) {
@@ -207,19 +222,21 @@ public class TestSecureExport {
   @After
   public void cleanup() throws IOException {
   }
+
   private static void clearOutput(Path path) throws IOException {
     FileSystem fs = path.getFileSystem(UTIL.getConfiguration());
     if (fs.exists(path)) {
       assertEquals(true, fs.delete(path, true));
     }
   }
+
   /**
    * Sets the security firstly for getting the correct default realm.
-   * @throws Exception
    */
   @BeforeClass
   public static void beforeClass() throws Exception {
-    UserProvider.setUserProviderForTesting(UTIL.getConfiguration(), 
HadoopSecurityEnabledUserProviderForTesting.class);
+    UserProvider.setUserProviderForTesting(UTIL.getConfiguration(),
+        HadoopSecurityEnabledUserProviderForTesting.class);
     setUpKdcServer();
     SecureTestUtil.enableSecurity(UTIL.getConfiguration());
     
UTIL.getConfiguration().setBoolean(AccessControlConstants.EXEC_PERMISSION_CHECKS_KEY,
 true);
@@ -252,11 +269,9 @@ public class TestSecureExport {
   /**
    * Test the ExportEndpoint's access levels. The {@link Export} test is 
ignored
    * since the access exceptions cannot be collected from the mappers.
-   *
-   * @throws java.io.IOException
    */
   @Test
-  public void testAccessCase() throws IOException, Throwable {
+  public void testAccessCase() throws Throwable {
     final String exportTable = name.getMethodName();
     TableDescriptor exportHtd = TableDescriptorBuilder
             .newBuilder(TableName.valueOf(name.getMethodName()))
@@ -339,11 +354,13 @@ public class TestSecureExport {
     SecureTestUtil.verifyAllowed(deleteAction, getUserByLogin(USER_OWNER));
     fs.delete(openDir, true);
   }
+
   @Test
   public void testVisibilityLabels() throws IOException, Throwable {
     final String exportTable = name.getMethodName() + "_export";
     final String importTable = name.getMethodName() + "_import";
-    final TableDescriptor exportHtd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(exportTable))
+    final TableDescriptor exportHtd = TableDescriptorBuilder
+            .newBuilder(TableName.valueOf(exportTable))
             .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYA))
             .setOwnerString(USER_OWNER)
             .build();
@@ -400,7 +417,8 @@ public class TestSecureExport {
         }
       };
       SecureTestUtil.verifyAllowed(exportAction, getUserByLogin(USER_OWNER));
-      final TableDescriptor importHtd = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(importTable))
+      final TableDescriptor importHtd = TableDescriptorBuilder
+              .newBuilder(TableName.valueOf(importTable))
               .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILYB))
               .setOwnerString(USER_OWNER)
               .build();
@@ -411,7 +429,8 @@ public class TestSecureExport {
           importTable,
           output.toString()
         };
-        assertEquals(0, ToolRunner.run(new 
Configuration(UTIL.getConfiguration()), new Import(), args));
+        assertEquals(0, ToolRunner.run(
+            new Configuration(UTIL.getConfiguration()), new Import(), args));
         return null;
       };
       SecureTestUtil.verifyAllowed(importAction, getUserByLogin(USER_OWNER));

http://git-wip-us.apache.org/repos/asf/hbase/blob/7357b0ce/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
----------------------------------------------------------------------
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
index d32e6ea..0d15f93 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/SecureBulkLoadEndpointClient.java
@@ -15,17 +15,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.hadoop.hbase.regionserver;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
@@ -41,12 +38,14 @@ import 
org.apache.hadoop.hbase.protobuf.generated.SecureBulkLoadProtos;
 import org.apache.hadoop.hbase.util.ByteStringer;
 import org.apache.hadoop.hbase.util.Pair;
 import org.apache.hadoop.security.token.Token;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Client proxy for SecureBulkLoadProtocol used in conjunction with 
SecureBulkLoadEndpoint
  * @deprecated Use for backward compatibility testing only. Will be removed 
when
  *             SecureBulkLoadEndpoint is not supported.
  */
+@Deprecated
 @InterfaceAudience.Private
 public class SecureBulkLoadEndpointClient {
   private Table table;
@@ -111,9 +110,8 @@ public class SecureBulkLoadEndpointClient {
   }
 
   public boolean bulkLoadHFiles(final List<Pair<byte[], String>> familyPaths,
-                         final Token<?> userToken,
-                         final String bulkToken,
-                         final byte[] startRow) throws IOException {
+          final Token<?> userToken, final String bulkToken, final byte[] 
startRow)
+          throws IOException {
     // we never want to send a batch of HFiles to all regions, thus cannot call
     // HTable#coprocessorService methods that take start and end rowkeys; see 
HBASE-9639
     try {
@@ -162,5 +160,4 @@ public class SecureBulkLoadEndpointClient {
       throw new IOException(throwable);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/7357b0ce/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
----------------------------------------------------------------------
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
index 7196851..49697b8 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionServerBulkLoadWithOldSecureEndpoint.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -62,7 +62,6 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CompactRegi
 @Category({RegionServerTests.class, LargeTests.class})
 @Ignore // BROKEN. FIX OR REMOVE.
 public class TestHRegionServerBulkLoadWithOldSecureEndpoint extends 
TestHRegionServerBulkLoad {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       
HBaseClassTestRule.forClass(TestHRegionServerBulkLoadWithOldSecureEndpoint.class);
@@ -86,8 +85,8 @@ public class TestHRegionServerBulkLoadWithOldSecureEndpoint 
extends TestHRegionS
     final AtomicLong numCompactions = new AtomicLong();
     private TableName tableName;
 
-    public AtomicHFileLoader(TableName tableName, TestContext ctx,
-        byte targetFamilies[][]) throws IOException {
+    public AtomicHFileLoader(TableName tableName, TestContext ctx, byte[][] 
targetFamilies)
+            throws IOException {
       super(ctx);
       this.tableName = tableName;
     }
@@ -114,19 +113,19 @@ public class 
TestHRegionServerBulkLoadWithOldSecureEndpoint extends TestHRegionS
       final String bulkToken = new 
SecureBulkLoadEndpointClient(table).prepareBulkLoad(tableName);
       RpcControllerFactory rpcControllerFactory = new 
RpcControllerFactory(UTIL.getConfiguration());
       ClientServiceCallable<Void> callable =
-          new ClientServiceCallable<Void>(conn, tableName, 
Bytes.toBytes("aaa"),
-              rpcControllerFactory.newController(), HConstants.PRIORITY_UNSET) 
{
-            @Override
-            protected Void rpcCall() throws Exception {
-              LOG.debug("Going to connect to server " + getLocation() + " for 
row " +
-                  Bytes.toStringBinary(getRow()));
-              try (Table table = conn.getTable(getTableName())) {
-                boolean loaded = new 
SecureBulkLoadEndpointClient(table).bulkLoadHFiles(famPaths,
-                    null, bulkToken, 
getLocation().getRegionInfo().getStartKey());
-              }
-              return null;
+        new ClientServiceCallable<Void>(conn, tableName, Bytes.toBytes("aaa"),
+            rpcControllerFactory.newController(), HConstants.PRIORITY_UNSET) {
+          @Override
+          protected Void rpcCall() throws Exception {
+            LOG.debug("Going to connect to server " + getLocation() + " for 
row " +
+                Bytes.toStringBinary(getRow()));
+            try (Table table = conn.getTable(getTableName())) {
+              boolean loaded = new 
SecureBulkLoadEndpointClient(table).bulkLoadHFiles(famPaths,
+                  null, bulkToken, 
getLocation().getRegionInfo().getStartKey());
             }
-          };
+            return null;
+          }
+        };
       RpcRetryingCallerFactory factory = new RpcRetryingCallerFactory(conf);
       RpcRetryingCaller<Void> caller = factory.<Void> newCaller();
       caller.callWithRetries(callable, Integer.MAX_VALUE);
@@ -156,7 +155,7 @@ public class TestHRegionServerBulkLoadWithOldSecureEndpoint 
extends TestHRegionS
   }
 
   void runAtomicBulkloadTest(TableName tableName, int millisToRun, int 
numScanners)
-      throws Exception {
+          throws Exception {
     setupTable(tableName, 10);
 
     TestContext ctx = new TestContext(UTIL.getConfiguration());

http://git-wip-us.apache.org/repos/asf/hbase/blob/7357b0ce/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
----------------------------------------------------------------------
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
index b306b76..bc368e3 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/regionserver/TestServerCustomProtocol.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -68,7 +68,6 @@ import org.slf4j.LoggerFactory;
 
 @Category({RegionServerTests.class, MediumTests.class})
 public class TestServerCustomProtocol {
-
   @ClassRule
   public static final HBaseClassTestRule CLASS_RULE =
       HBaseClassTestRule.forClass(TestServerCustomProtocol.class);
@@ -84,7 +83,9 @@ public class TestServerCustomProtocol {
 
     @Override
     public void start(CoprocessorEnvironment env) throws IOException {
-      if (env instanceof RegionCoprocessorEnvironment) return;
+      if (env instanceof RegionCoprocessorEnvironment) {
+        return;
+      }
       throw new CoprocessorException("Must be loaded on a table region!");
     }
 
@@ -116,9 +117,13 @@ public class TestServerCustomProtocol {
     @Override
     public void hello(RpcController controller, HelloRequest request,
         RpcCallback<HelloResponse> done) {
-      if (!request.hasName()) 
done.run(HelloResponse.newBuilder().setResponse(WHOAREYOU).build());
-      else if (request.getName().equals(NOBODY)) 
done.run(HelloResponse.newBuilder().build());
-      else done.run(HelloResponse.newBuilder().setResponse(HELLO + 
request.getName()).build());
+      if (!request.hasName()) {
+        done.run(HelloResponse.newBuilder().setResponse(WHOAREYOU).build());
+      } else if (request.getName().equals(NOBODY)) {
+        done.run(HelloResponse.newBuilder().build());
+      } else {
+        done.run(HelloResponse.newBuilder().setResponse(HELLO + 
request.getName()).build());
+      }
     }
 
     @Override
@@ -153,19 +158,19 @@ public class TestServerCustomProtocol {
   }
 
   @Before
-  public void before()  throws Exception {
+  public void before() throws Exception {
     final byte[][] SPLIT_KEYS = new byte[][] { ROW_B, ROW_C };
     Table table = util.createTable(TEST_TABLE, TEST_FAMILY, SPLIT_KEYS);
 
-    Put puta = new Put( ROW_A );
+    Put puta = new Put(ROW_A);
     puta.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
     table.put(puta);
 
-    Put putb = new Put( ROW_B );
+    Put putb = new Put(ROW_B);
     putb.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
     table.put(putb);
 
-    Put putc = new Put( ROW_C );
+    Put putc = new Put(ROW_C);
     putc.addColumn(TEST_FAMILY, Bytes.toBytes("col1"), Bytes.toBytes(1));
     table.put(putc);
   }
@@ -234,7 +239,7 @@ public class TestServerCustomProtocol {
   }
 
   private Map<byte [], String> hello(final Table table, final String send, 
final String response)
-  throws ServiceException, Throwable {
+          throws ServiceException, Throwable {
     Map<byte [], String> results = hello(table, send);
     for (Map.Entry<byte [], String> e: results.entrySet()) {
       assertEquals("Invalid custom protocol response", response, e.getValue());
@@ -243,13 +248,12 @@ public class TestServerCustomProtocol {
   }
 
   private Map<byte [], String> hello(final Table table, final String send)
-  throws ServiceException, Throwable {
+          throws ServiceException, Throwable {
     return hello(table, send, null, null);
   }
 
   private Map<byte [], String> hello(final Table table, final String send, 
final byte [] start,
-      final byte [] end)
-  throws ServiceException, Throwable {
+          final byte [] end) throws ServiceException, Throwable {
     return table.coprocessorService(PingProtos.PingService.class,
         start, end,
         new Batch.Call<PingProtos.PingService, String>() {
@@ -258,7 +262,9 @@ public class TestServerCustomProtocol {
             CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.HelloResponse> 
rpcCallback =
               new CoprocessorRpcUtils.BlockingRpcCallback<>();
             PingProtos.HelloRequest.Builder builder = 
PingProtos.HelloRequest.newBuilder();
-            if (send != null) builder.setName(send);
+            if (send != null) {
+              builder.setName(send);
+            }
             instance.hello(null, builder.build(), rpcCallback);
             PingProtos.HelloResponse r = rpcCallback.get();
             return r != null && r.hasResponse()? r.getResponse(): null;
@@ -267,8 +273,7 @@ public class TestServerCustomProtocol {
   }
 
   private Map<byte [], String> compoundOfHelloAndPing(final Table table, final 
byte [] start,
-      final byte [] end)
-  throws ServiceException, Throwable {
+          final byte [] end) throws ServiceException, Throwable {
     return table.coprocessorService(PingProtos.PingService.class,
         start, end,
         new Batch.Call<PingProtos.PingService, String>() {
@@ -286,9 +291,8 @@ public class TestServerCustomProtocol {
         });
   }
 
-  private Map<byte [], String> noop(final Table table, final byte [] start,
-      final byte [] end)
-  throws ServiceException, Throwable {
+  private Map<byte [], String> noop(final Table table, final byte [] start, 
final byte [] end)
+          throws ServiceException, Throwable {
     return table.coprocessorService(PingProtos.PingService.class, start, end,
         new Batch.Call<PingProtos.PingService, String>() {
           @Override
@@ -397,7 +401,7 @@ public class TestServerCustomProtocol {
   }
 
   private Map<byte [], String> ping(final Table table, final byte [] start, 
final byte [] end)
-  throws ServiceException, Throwable {
+          throws ServiceException, Throwable {
     return table.coprocessorService(PingProtos.PingService.class, start, end,
       new Batch.Call<PingProtos.PingService, String>() {
         @Override
@@ -410,8 +414,8 @@ public class TestServerCustomProtocol {
   private static String doPing(PingProtos.PingService instance) throws 
IOException {
     CoprocessorRpcUtils.BlockingRpcCallback<PingProtos.PingResponse> 
rpcCallback =
         new CoprocessorRpcUtils.BlockingRpcCallback<>();
-      instance.ping(null, PingProtos.PingRequest.newBuilder().build(), 
rpcCallback);
-      return rpcCallback.get().getPong();
+    instance.ping(null, PingProtos.PingRequest.newBuilder().build(), 
rpcCallback);
+    return rpcCallback.get().getPong();
   }
 
   @Test
@@ -459,18 +463,17 @@ public class TestServerCustomProtocol {
     }
   }
 
-  private void verifyRegionResults(RegionLocator table,
-      Map<byte[],String> results, byte[] row) throws Exception {
+  private void verifyRegionResults(RegionLocator table, Map<byte[],String> 
results, byte[] row)
+          throws Exception {
     verifyRegionResults(table, results, "pong", row);
   }
 
-  private void verifyRegionResults(RegionLocator regionLocator,
-      Map<byte[], String> results, String expected, byte[] row)
-  throws Exception {
+  private void verifyRegionResults(RegionLocator regionLocator, Map<byte[], 
String> results,
+          String expected, byte[] row) throws Exception {
     for (Map.Entry<byte [], String> e: results.entrySet()) {
       LOG.info("row=" + Bytes.toString(row) + ", expected=" + expected +
-       ", result key=" + Bytes.toString(e.getKey()) +
-       ", value=" + e.getValue());
+        ", result key=" + Bytes.toString(e.getKey()) +
+        ", value=" + e.getValue());
     }
     HRegionLocation loc = regionLocator.getRegionLocation(row, true);
     byte[] region = loc.getRegionInfo().getRegionName();

Reply via email to