http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
index 0e3355a..1d09dfa 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
@@ -112,7 +112,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck {
     TEST_UTIL.startMiniCluster(1);
 
     tableExecutorService = new ThreadPoolExecutor(1, POOL_SIZE, 60, 
TimeUnit.SECONDS,
-        new SynchronousQueue<Runnable>(), 
Threads.newDaemonThreadFactory("testhbck"));
+        new SynchronousQueue<>(), Threads.newDaemonThreadFactory("testhbck"));
 
     hbfsckExecutorService = new ScheduledThreadPoolExecutor(POOL_SIZE);
 
@@ -1402,7 +1402,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck 
{
       setupTable(tableName);
 
       // Mess it up by removing the RegionInfo for one region.
-      final List<Delete> deletes = new LinkedList<Delete>();
+      final List<Delete> deletes = new LinkedList<>();
       Table meta = connection.getTable(TableName.META_TABLE_NAME, 
hbfsckExecutorService);
       MetaTableAccessor.fullScanRegions(connection, new 
MetaTableAccessor.Visitor() {
 
@@ -1630,7 +1630,7 @@ public class TestHBaseFsckOneRS extends BaseTestHBaseFsck 
{
         am.regionOffline(state.getRegion());
       }
 
-      Map<HRegionInfo, ServerName> regionsMap = new HashMap<HRegionInfo, 
ServerName>();
+      Map<HRegionInfo, ServerName> regionsMap = new HashMap<>();
       regionsMap.put(regions.get(0).getRegionInfo(), 
regionServer.getServerName());
       am.assign(regionsMap);
       am.waitForAssignment(regions.get(0).getRegionInfo());

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplicas.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplicas.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplicas.java
index 9b92a69..7956d40 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplicas.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckReplicas.java
@@ -74,7 +74,7 @@ public class TestHBaseFsckReplicas extends BaseTestHBaseFsck {
     TEST_UTIL.startMiniCluster(3);
 
     tableExecutorService = new ThreadPoolExecutor(1, POOL_SIZE, 60, 
TimeUnit.SECONDS,
-        new SynchronousQueue<Runnable>(), 
Threads.newDaemonThreadFactory("testhbck"));
+        new SynchronousQueue<>(), Threads.newDaemonThreadFactory("testhbck"));
 
     hbfsckExecutorService = new ScheduledThreadPoolExecutor(POOL_SIZE);
 
@@ -255,7 +255,7 @@ public class TestHBaseFsckReplicas extends 
BaseTestHBaseFsck {
       }
       // get all the online regions in the regionservers
       Collection<ServerName> servers = admin.getClusterStatus().getServers();
-      Set<HRegionInfo> onlineRegions = new HashSet<HRegionInfo>();
+      Set<HRegionInfo> onlineRegions = new HashSet<>();
       for (ServerName s : servers) {
         List<HRegionInfo> list = admin.getOnlineRegions(s);
         onlineRegions.addAll(list);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
index 99a41f5..91a71c7 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
@@ -86,7 +86,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
     TEST_UTIL.startMiniCluster(2);
 
     tableExecutorService = new ThreadPoolExecutor(1, POOL_SIZE, 60, 
TimeUnit.SECONDS,
-        new SynchronousQueue<Runnable>(), 
Threads.newDaemonThreadFactory("testhbck"));
+        new SynchronousQueue<>(), Threads.newDaemonThreadFactory("testhbck"));
 
     hbfsckExecutorService = new ScheduledThreadPoolExecutor(POOL_SIZE);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
index fbfbb47..c3f934d 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdLock.java
@@ -50,7 +50,7 @@ public class TestIdLock {
 
   private IdLock idLock = new IdLock();
 
-  private Map<Long, String> idOwner = new ConcurrentHashMap<Long, String>();
+  private Map<Long, String> idOwner = new ConcurrentHashMap<>();
 
   private class IdLockTestThread implements Callable<Boolean> {
 
@@ -95,8 +95,7 @@ public class TestIdLock {
   public void testMultipleClients() throws Exception {
     ExecutorService exec = Executors.newFixedThreadPool(NUM_THREADS);
     try {
-      ExecutorCompletionService<Boolean> ecs =
-          new ExecutorCompletionService<Boolean>(exec);
+      ExecutorCompletionService<Boolean> ecs = new 
ExecutorCompletionService<>(exec);
       for (int i = 0; i < NUM_THREADS; ++i)
         ecs.submit(new IdLockTestThread("client_" + i));
       for (int i = 0; i < NUM_THREADS; ++i) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java
index 66f6d4b..2ccfad8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestIdReadWriteLock.java
@@ -53,7 +53,7 @@ public class TestIdReadWriteLock {
 
   private IdReadWriteLock idLock = new IdReadWriteLock();
 
-  private Map<Long, String> idOwner = new ConcurrentHashMap<Long, String>();
+  private Map<Long, String> idOwner = new ConcurrentHashMap<>();
 
   private class IdLockTestThread implements Callable<Boolean> {
 
@@ -104,8 +104,7 @@ public class TestIdReadWriteLock {
   public void testMultipleClients() throws Exception {
     ExecutorService exec = Executors.newFixedThreadPool(NUM_THREADS);
     try {
-      ExecutorCompletionService<Boolean> ecs =
-          new ExecutorCompletionService<Boolean>(exec);
+      ExecutorCompletionService<Boolean> ecs = new 
ExecutorCompletionService<>(exec);
       for (int i = 0; i < NUM_THREADS; ++i)
         ecs.submit(new IdLockTestThread("client_" + i));
       for (int i = 0; i < NUM_THREADS; ++i) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
index 0cf4609..865cd11 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadEncoded.java
@@ -40,7 +40,7 @@ public class TestMiniClusterLoadEncoded extends 
TestMiniClusterLoadParallel {
 
   @Parameters
   public static Collection<Object[]> parameters() {
-    List<Object[]> parameters = new ArrayList<Object[]>();
+    List<Object[]> parameters = new ArrayList<>();
     for (DataBlockEncoding dataBlockEncoding : DataBlockEncoding.values() ) {
       parameters.add(new Object[]{dataBlockEncoding});
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
index 726a450..f765221 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMiniClusterLoadSequential.java
@@ -88,7 +88,7 @@ public class TestMiniClusterLoadSequential {
 
   @Parameters
   public static Collection<Object[]> parameters() {
-    List<Object[]> parameters = new ArrayList<Object[]>();
+    List<Object[]> parameters = new ArrayList<>();
     for (boolean multiPut : new boolean[]{false, true}) {
       for (DataBlockEncoding dataBlockEncoding : new DataBlockEncoding[] {
           DataBlockEncoding.NONE, DataBlockEncoding.PREFIX }) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestPoolMap.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestPoolMap.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestPoolMap.java
index b229e91..7fc09d2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestPoolMap.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestPoolMap.java
@@ -46,7 +46,7 @@ public class TestPoolMap {
 
     @Override
     protected void setUp() throws Exception {
-      this.poolMap = new PoolMap<String, String>(getPoolType(), POOL_SIZE);
+      this.poolMap = new PoolMap<>(getPoolType(), POOL_SIZE);
     }
 
     protected abstract PoolType getPoolType();
@@ -117,7 +117,7 @@ public class TestPoolMap {
 
     public void testPoolCap() throws InterruptedException, ExecutionException {
       String randomKey = String.valueOf(random.nextInt());
-      List<String> randomValues = new ArrayList<String>();
+      List<String> randomValues = new ArrayList<>();
       for (int i = 0; i < POOL_SIZE * 2; i++) {
         String randomValue = String.valueOf(random.nextInt());
         randomValues.add(randomValue);
@@ -219,7 +219,7 @@ public class TestPoolMap {
     public void testPoolCap() throws InterruptedException, ExecutionException {
       // As long as we poll values we put, the pool size should remain zero
       String randomKey = String.valueOf(random.nextInt());
-      List<String> randomValues = new ArrayList<String>();
+      List<String> randomValues = new ArrayList<>();
       for (int i = 0; i < POOL_SIZE * 2; i++) {
         String randomValue = String.valueOf(random.nextInt());
         randomValues.add(randomValue);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java
index e7a6500..51dc238 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSizeCalculator.java
@@ -135,7 +135,7 @@ public class TestRegionSizeCalculator {
   */
   private Admin mockAdmin(RegionLoad... regionLoadArray) throws Exception {
     Admin mockAdmin = Mockito.mock(Admin.class);
-    Map<byte[], RegionLoad> regionLoads = new TreeMap<byte[], 
RegionLoad>(Bytes.BYTES_COMPARATOR);
+    Map<byte[], RegionLoad> regionLoads = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);
     for (RegionLoad regionLoad : regionLoadArray) {
       regionLoads.put(regionLoad.getName(), regionLoad);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
index ea2bc7a..931830f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitCalculator.java
@@ -126,8 +126,7 @@ public class TestRegionSplitCalculator {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
     SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
     SimpleRange c = new SimpleRange(Bytes.toBytes("C"), Bytes.toBytes("D"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
     sc.add(b);
     sc.add(c);
@@ -142,8 +141,7 @@ public class TestRegionSplitCalculator {
 
   @Test
   public void testSplitCalculatorNoEdge() {
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
 
     Multimap<byte[], SimpleRange> regions = sc.calcCoverage();
     LOG.info("Empty");
@@ -155,8 +153,7 @@ public class TestRegionSplitCalculator {
   @Test
   public void testSplitCalculatorSingleEdge() {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
 
     Multimap<byte[], SimpleRange> regions = sc.calcCoverage();
@@ -169,8 +166,7 @@ public class TestRegionSplitCalculator {
   @Test
   public void testSplitCalculatorDegenerateEdge() {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("A"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
 
     Multimap<byte[], SimpleRange> regions = sc.calcCoverage();
@@ -185,8 +181,7 @@ public class TestRegionSplitCalculator {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
     SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
     SimpleRange c = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
     sc.add(b);
     sc.add(c);
@@ -204,8 +199,7 @@ public class TestRegionSplitCalculator {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
     SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
     SimpleRange c = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("D"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
     sc.add(b);
     sc.add(c);
@@ -223,8 +217,7 @@ public class TestRegionSplitCalculator {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
     SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
     SimpleRange c = new SimpleRange(Bytes.toBytes("E"), Bytes.toBytes("F"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
     sc.add(b);
     sc.add(c);
@@ -241,8 +234,7 @@ public class TestRegionSplitCalculator {
   public void testSplitCalculatorOverreach() {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
     SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("D"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
     sc.add(b);
 
@@ -258,8 +250,7 @@ public class TestRegionSplitCalculator {
   public void testSplitCalculatorFloor() {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
     SimpleRange b = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
     sc.add(b);
 
@@ -274,8 +265,7 @@ public class TestRegionSplitCalculator {
   public void testSplitCalculatorCeil() {
     SimpleRange a = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
     SimpleRange b = new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("C"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
     sc.add(b);
 
@@ -292,8 +282,7 @@ public class TestRegionSplitCalculator {
     SimpleRange b = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
 
     LOG.info(a.tiebreaker + " - " + b.tiebreaker);
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
     sc.add(b);
 
@@ -307,8 +296,7 @@ public class TestRegionSplitCalculator {
   @Test
   public void testSplitCalculatorBackwards() {
     SimpleRange a = new SimpleRange(Bytes.toBytes("C"), Bytes.toBytes("A"));
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(a);
 
     Multimap<byte[], SimpleRange> regions = sc.calcCoverage();
@@ -320,8 +308,7 @@ public class TestRegionSplitCalculator {
 
   @Test
   public void testComplex() {
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("Am")));
     sc.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")));
     sc.add(new SimpleRange(Bytes.toBytes("Am"), Bytes.toBytes("C")));
@@ -344,8 +331,7 @@ public class TestRegionSplitCalculator {
 
   @Test
   public void testBeginEndMarker() {
-    RegionSplitCalculator<SimpleRange> sc = new 
RegionSplitCalculator<SimpleRange>(
-        cmp);
+    RegionSplitCalculator<SimpleRange> sc = new RegionSplitCalculator<>(cmp);
     sc.add(new SimpleRange(Bytes.toBytes(""), Bytes.toBytes("A")));
     sc.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B")));
     sc.add(new SimpleRange(Bytes.toBytes("B"), Bytes.toBytes("")));
@@ -364,7 +350,7 @@ public class TestRegionSplitCalculator {
     SimpleRange ae = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("E"));
     SimpleRange ac = new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C"));
 
-    Collection<SimpleRange> bigOverlap = new ArrayList<SimpleRange>(8);
+    Collection<SimpleRange> bigOverlap = new ArrayList<>(8);
     bigOverlap.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("E")));
     bigOverlap.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("C")));
     bigOverlap.add(new SimpleRange(Bytes.toBytes("A"), Bytes.toBytes("B")));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
index c195762..0c5b980 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java
@@ -78,7 +78,7 @@ public class TestRegionSplitter {
      */
     @Test
     public void testCreatePresplitTableHex() throws Exception {
-      final List<byte[]> expectedBounds = new ArrayList<byte[]>(17);
+      final List<byte[]> expectedBounds = new ArrayList<>(17);
       expectedBounds.add(ArrayUtils.EMPTY_BYTE_ARRAY);
       expectedBounds.add("10000000".getBytes());
       expectedBounds.add("20000000".getBytes());
@@ -108,7 +108,7 @@ public class TestRegionSplitter {
      */
     @Test
     public void testCreatePresplitTableUniform() throws Exception {
-      List<byte[]> expectedBounds = new ArrayList<byte[]>(17);
+      List<byte[]> expectedBounds = new ArrayList<>(17);
       expectedBounds.add(ArrayUtils.EMPTY_BYTE_ARRAY);
       expectedBounds.add(new byte[] {      0x10, 0, 0, 0, 0, 0, 0, 0});
       expectedBounds.add(new byte[] {      0x20, 0, 0, 0, 0, 0, 0, 0});
@@ -293,7 +293,7 @@ public class TestRegionSplitter {
 
   @Test
   public void noopRollingSplit() throws Exception {
-    final List<byte[]> expectedBounds = new ArrayList<byte[]>(1);
+    final List<byte[]> expectedBounds = new ArrayList<>(1);
     expectedBounds.add(ArrayUtils.EMPTY_BYTE_ARRAY);
     
rollingSplitAndVerify(TableName.valueOf(TestRegionSplitter.class.getSimpleName()),
         "UniformSplit", expectedBounds);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
index 839d1cc..0efa6da 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedCopyOnWriteSet.java
@@ -34,7 +34,7 @@ public class TestSortedCopyOnWriteSet {
 
   @Test
   public void testSorting() throws Exception {
-    SortedCopyOnWriteSet<String> set = new SortedCopyOnWriteSet<String>();
+    SortedCopyOnWriteSet<String> set = new SortedCopyOnWriteSet<>();
     set.add("c");
     set.add("d");
     set.add("a");
@@ -52,8 +52,7 @@ public class TestSortedCopyOnWriteSet {
 
   @Test
   public void testIteratorIsolation() throws Exception {
-    SortedCopyOnWriteSet<String> set = new SortedCopyOnWriteSet<String>(
-        Lists.newArrayList("a", "b", "c", "d", "e"));
+    SortedCopyOnWriteSet<String> set = new 
SortedCopyOnWriteSet<>(Lists.newArrayList("a", "b", "c", "d", "e"));
 
     // isolation of remove()
     Iterator<String> iter = set.iterator();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java
index 454435b..bdae0e5 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestSortedList.java
@@ -45,7 +45,7 @@ public class TestSortedList {
 
   @Test
   public void testSorting() throws Exception {
-    SortedList<String> list = new SortedList<String>(new StringComparator());
+    SortedList<String> list = new SortedList<>(new StringComparator());
     list.add("c");
     list.add("d");
     list.add("a");
@@ -72,8 +72,7 @@ public class TestSortedList {
 
   @Test
   public void testReadOnlyIterators() throws Exception {
-    SortedList<String> list = new SortedList<String>(
-        Lists.newArrayList("a", "b", "c", "d", "e"), new StringComparator());
+    SortedList<String> list = new SortedList<>(Lists.newArrayList("a", "b", 
"c", "d", "e"), new StringComparator());
 
     Iterator<String> i = list.iterator();
     i.next();
@@ -108,8 +107,7 @@ public class TestSortedList {
 
   @Test
   public void testIteratorIsolation() throws Exception {
-    SortedList<String> list = new SortedList<String>(
-        Lists.newArrayList("a", "b", "c", "d", "e"), new StringComparator());
+    SortedList<String> list = new SortedList<>(Lists.newArrayList("a", "b", 
"c", "d", "e"), new StringComparator());
 
     // isolation of remove()
     Iterator<String> iter = list.iterator();
@@ -161,8 +159,7 @@ public class TestSortedList {
 
   @Test
   public void testRandomAccessIsolation() throws Exception {
-    SortedList<String> list = new SortedList<String>(
-        Lists.newArrayList("a", "b", "c"), new StringComparator());
+    SortedList<String> list = new SortedList<>(Lists.newArrayList("a", "b", 
"c"), new StringComparator());
     List<String> innerList = list.get();
     assertEquals("a", innerList.get(0));
     assertEquals("b", innerList.get(1));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
index 3701094..a7c4ad1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
@@ -228,7 +228,7 @@ public class OfflineMetaRebuildTestCore {
     Scan s = new Scan();
     Table meta = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME);
     ResultScanner scanner = meta.getScanner(s);
-    List<Delete> dels = new ArrayList<Delete>();
+    List<Delete> dels = new ArrayList<>();
     for (Result r : scanner) {
       HRegionInfo info =
           MetaTableAccessor.getHRegionInfo(r);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java
index 2d32b5e..e2ea838 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/IOTestProvider.java
@@ -108,7 +108,7 @@ public class IOTestProvider implements WALProvider {
 
   @Override
   public List<WAL> getWALs() throws IOException {
-    List<WAL> wals = new ArrayList<WAL>(1);
+    List<WAL> wals = new ArrayList<>(1);
     wals.add(log);
     return wals;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
index 8523e69..73725bb 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestBoundedRegionGroupingStrategy.java
@@ -178,7 +178,7 @@ public class TestBoundedRegionGroupingStrategy {
       FSUtils.setRootDir(CONF, TEST_UTIL.getDataTestDirOnTestFS());
 
       wals = new WALFactory(CONF, null, "setMembershipDedups");
-      final Set<WAL> seen = new HashSet<WAL>(temp * 4);
+      final Set<WAL> seen = new HashSet<>(temp * 4);
       final Random random = new Random();
       int count = 0;
       // we know that this should see one of the wals more than once

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
index d82c3b6..f752735 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestFSHLogProvider.java
@@ -193,12 +193,12 @@ public class TestFSHLogProvider {
     final HTableDescriptor htd2 =
         new HTableDescriptor(TableName.valueOf(currentTest.getMethodName() + 
"2"))
             .addFamily(new HColumnDescriptor("row"));
-    NavigableMap<byte[], Integer> scopes1 = new TreeMap<byte[], Integer>(
+    NavigableMap<byte[], Integer> scopes1 = new TreeMap<>(
         Bytes.BYTES_COMPARATOR);
     for(byte[] fam : htd.getFamiliesKeys()) {
       scopes1.put(fam, 0);
     }
-    NavigableMap<byte[], Integer> scopes2 = new TreeMap<byte[], Integer>(
+    NavigableMap<byte[], Integer> scopes2 = new TreeMap<>(
         Bytes.BYTES_COMPARATOR);
     for(byte[] fam : htd2.getFamiliesKeys()) {
       scopes2.put(fam, 0);
@@ -275,12 +275,12 @@ public class TestFSHLogProvider {
         new HTableDescriptor(TableName.valueOf(currentTest.getMethodName() + 
"1")).addFamily(new HColumnDescriptor("row"));
     HTableDescriptor table2 =
         new HTableDescriptor(TableName.valueOf(currentTest.getMethodName() + 
"2")).addFamily(new HColumnDescriptor("row"));
-    NavigableMap<byte[], Integer> scopes1 = new TreeMap<byte[], Integer>(
+    NavigableMap<byte[], Integer> scopes1 = new TreeMap<>(
         Bytes.BYTES_COMPARATOR);
     for(byte[] fam : table1.getFamiliesKeys()) {
       scopes1.put(fam, 0);
     }
-    NavigableMap<byte[], Integer> scopes2 = new TreeMap<byte[], Integer>(
+    NavigableMap<byte[], Integer> scopes2 = new TreeMap<>(
         Bytes.BYTES_COMPARATOR);
     for(byte[] fam : table2.getFamiliesKeys()) {
       scopes2.put(fam, 0);
@@ -370,7 +370,7 @@ public class TestFSHLogProvider {
     localConf.set(WALFactory.WAL_PROVIDER, FSHLogProvider.class.getName());
     final WALFactory wals = new WALFactory(localConf, null, 
currentTest.getMethodName());
     try {
-      final Set<WAL> seen = new HashSet<WAL>(1);
+      final Set<WAL> seen = new HashSet<>(1);
       final Random random = new Random();
       assertTrue("first attempt to add WAL from default provider should work.",
           seen.add(wals.getWAL(Bytes.toBytes(random.nextInt()), null)));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
index 913ea48..7497d67 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestSecureWAL.java
@@ -116,8 +116,7 @@ public class TestSecureWAL {
     TableName tableName = 
TableName.valueOf(name.getMethodName().replaceAll("[^a-zA-Z0-9]", "_"));
     HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor(tableName.getName()));
-    NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(
-        Bytes.BYTES_COMPARATOR);
+    NavigableMap<byte[], Integer> scopes = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);
     for(byte[] fam : htd.getFamiliesKeys()) {
       scopes.put(fam, 0);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
index 3318f61..f02e244 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFactory.java
@@ -180,8 +180,7 @@ public class TestWALFactory {
     }
     HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor("column"));
-    NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(
-        Bytes.BYTES_COMPARATOR);
+    NavigableMap<byte[], Integer> scopes = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);
     for(byte[] fam : htd.getFamiliesKeys()) {
       scopes.put(fam, 0);
     }
@@ -259,8 +258,7 @@ public class TestWALFactory {
                   null,null, false);
       HTableDescriptor htd = new HTableDescriptor(tableName);
       htd.addFamily(new HColumnDescriptor(tableName.getName()));
-      NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(
-          Bytes.BYTES_COMPARATOR);
+      NavigableMap<byte[], Integer> scopes = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);
       for(byte[] fam : htd.getFamiliesKeys()) {
         scopes.put(fam, 0);
       }
@@ -385,8 +383,7 @@ public class TestWALFactory {
 
     HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor(tableName.getName()));
-    NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(
-        Bytes.BYTES_COMPARATOR);
+    NavigableMap<byte[], Integer> scopes = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);
     for(byte[] fam : htd.getFamiliesKeys()) {
       scopes.put(fam, 0);
     }
@@ -637,8 +634,7 @@ public class TestWALFactory {
     long timestamp = System.currentTimeMillis();
     HTableDescriptor htd = new HTableDescriptor(tableName);
     htd.addFamily(new HColumnDescriptor("column"));
-    NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(
-        Bytes.BYTES_COMPARATOR);
+    NavigableMap<byte[], Integer> scopes = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);
     for(byte[] fam : htd.getFamiliesKeys()) {
       scopes.put(fam, 0);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
index c69150f..65401de 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
@@ -107,8 +107,7 @@ public class TestWALFiltering {
   public void testFlushedSequenceIdsSentToHMaster()
   throws IOException, InterruptedException,
   org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException, 
ServiceException {
-    SortedMap<byte[], Long> allFlushedSequenceIds =
-        new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR);
+    SortedMap<byte[], Long> allFlushedSequenceIds = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);
     for (int i = 0; i < NUM_RS; ++i) {
       flushAllRegions(i);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
index 6f4a797..ecde00d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALReaderOnSecureWAL.java
@@ -104,8 +104,7 @@ public class TestWALReaderOnSecureWAL {
       TableName tableName = TableName.valueOf(tblName);
       HTableDescriptor htd = new HTableDescriptor(tableName);
       htd.addFamily(new HColumnDescriptor(tableName.getName()));
-      NavigableMap<byte[], Integer> scopes = new TreeMap<byte[], Integer>(
-          Bytes.BYTES_COMPARATOR);
+      NavigableMap<byte[], Integer> scopes = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);
       for(byte[] fam : htd.getFamiliesKeys()) {
         scopes.put(fam, 0);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
index 3b15cef..611f8c3 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALSplit.java
@@ -135,7 +135,7 @@ public class TestWALSplit {
   private static final byte[] QUALIFIER = "q1".getBytes();
   private static final byte[] VALUE = "v1".getBytes();
   private static final String WAL_FILE_PREFIX = "wal.dat.";
-  private static List<String> REGIONS = new ArrayList<String>();
+  private static List<String> REGIONS = new ArrayList<>();
   private static final String HBASE_SKIP_ERRORS = 
"hbase.hlog.split.skip.errors";
   private static String ROBBER;
   private static String ZOMBIE;
@@ -158,7 +158,7 @@ public class TestWALSplit {
     // This is how you turn off shortcircuit read currently.  TODO: Fix.  
Should read config.
     System.setProperty("hbase.tests.use.shortcircuit.reads", "false");
     // Create fake maping user to group and set it to the conf.
-    Map<String, String []> u2g_map = new HashMap<String, String []>(2);
+    Map<String, String []> u2g_map = new HashMap<>(2);
     ROBBER = User.getCurrent().getName() + "-robber";
     ZOMBIE = User.getCurrent().getName() + "-zombie";
     u2g_map.put(ROBBER, GROUP);
@@ -585,7 +585,7 @@ public class TestWALSplit {
         .filter(x -> x != 
FaultyProtobufLogReader.FailureType.NONE).collect(Collectors.toList());
     for (FaultyProtobufLogReader.FailureType failureType : failureTypes) {
       final Set<String> walDirContents = splitCorruptWALs(failureType);
-      final Set<String> archivedLogs = new HashSet<String>();
+      final Set<String> archivedLogs = new HashSet<>();
       final StringBuilder archived = new StringBuilder("Archived logs in 
CORRUPTDIR:");
       for (FileStatus log : fs.listStatus(CORRUPTDIR)) {
         archived.append("\n\t").append(log.toString());
@@ -630,7 +630,7 @@ public class TestWALSplit {
       wals = new WALFactory(conf, null, name.getMethodName());
       generateWALs(-1);
       // Our reader will render all of these files corrupt.
-      final Set<String> walDirContents = new HashSet<String>();
+      final Set<String> walDirContents = new HashSet<>();
       for (FileStatus status : fs.listStatus(WALDIR)) {
         walDirContents.add(status.getPath().getName());
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
index 9bb3d7d..53cc49b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
@@ -141,8 +141,7 @@ public final class WALPerformanceEvaluation extends 
Configured implements Tool {
       this.numFamilies = htd.getColumnFamilyCount();
       this.region = region;
       this.htd = htd;
-      scopes = new TreeMap<byte[], Integer>(
-          Bytes.BYTES_COMPARATOR);
+      scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR);
       for(byte[] fam : htd.getFamiliesKeys()) {
         scopes.put(fam, 0);
       }
@@ -420,7 +419,7 @@ public final class WALPerformanceEvaluation extends 
Configured implements Tool {
       throws IOException {
     WAL.Reader reader = wals.createReader(wal.getFileSystem(getConf()), wal);
     long count = 0;
-    Map<String, Long> sequenceIds = new HashMap<String, Long>();
+    Map<String, Long> sequenceIds = new HashMap<>();
     try {
       while (true) {
         WAL.Entry e = reader.next();
@@ -490,7 +489,7 @@ public final class WALPerformanceEvaluation extends 
Configured implements Tool {
     System.exit(1);
   }
 
-  private final Set<WAL> walsListenedTo = new HashSet<WAL>();
+  private final Set<WAL> walsListenedTo = new HashSet<>();
 
   private HRegion openRegion(final FileSystem fs, final Path dir, final 
HTableDescriptor htd,
       final WALFactory wals, final long whenToRoll, final LogRoller roller) 
throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
index 6350af8..b4ac59c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZKMulti.java
@@ -85,24 +85,24 @@ public class TestZKMulti {
     ZKUtil.multiOrSequential(zkw, null, false);
 
     // empty multi
-    ZKUtil.multiOrSequential(zkw, new LinkedList<ZKUtilOp>(), false);
+    ZKUtil.multiOrSequential(zkw, new LinkedList<>(), false);
 
     // single create
     String path = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testSimpleMulti");
-    LinkedList<ZKUtilOp> singleCreate = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> singleCreate = new LinkedList<>();
     singleCreate.add(ZKUtilOp.createAndFailSilent(path, new byte[0]));
     ZKUtil.multiOrSequential(zkw, singleCreate, false);
     assertTrue(ZKUtil.checkExists(zkw, path) != -1);
 
     // single setdata
-    LinkedList<ZKUtilOp> singleSetData = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> singleSetData = new LinkedList<>();
     byte [] data = Bytes.toBytes("foobar");
     singleSetData.add(ZKUtilOp.setData(path, data));
     ZKUtil.multiOrSequential(zkw, singleSetData, false);
     assertTrue(Bytes.equals(ZKUtil.getData(zkw, path), data));
 
     // single delete
-    LinkedList<ZKUtilOp> singleDelete = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> singleDelete = new LinkedList<>();
     singleDelete.add(ZKUtilOp.deleteNodeFailSilent(path));
     ZKUtil.multiOrSequential(zkw, singleDelete, false);
     assertTrue(ZKUtil.checkExists(zkw, path) == -1);
@@ -117,7 +117,7 @@ public class TestZKMulti {
     String path5 = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testComplexMulti5");
     String path6 = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testComplexMulti6");
     // create 4 nodes that we'll setData on or delete later
-    LinkedList<ZKUtilOp> create4Nodes = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> create4Nodes = new LinkedList<>();
     create4Nodes.add(ZKUtilOp.createAndFailSilent(path1, 
Bytes.toBytes(path1)));
     create4Nodes.add(ZKUtilOp.createAndFailSilent(path2, 
Bytes.toBytes(path2)));
     create4Nodes.add(ZKUtilOp.createAndFailSilent(path3, 
Bytes.toBytes(path3)));
@@ -129,7 +129,7 @@ public class TestZKMulti {
     assertTrue(Bytes.equals(ZKUtil.getData(zkw, path4), Bytes.toBytes(path4)));
 
     // do multiple of each operation (setData, delete, create)
-    LinkedList<ZKUtilOp> ops = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> ops = new LinkedList<>();
     // setData
     ops.add(ZKUtilOp.setData(path1, Bytes.add(Bytes.toBytes(path1), 
Bytes.toBytes(path1))));
     ops.add(ZKUtilOp.setData(path2, Bytes.add(Bytes.toBytes(path2), 
Bytes.toBytes(path2))));
@@ -155,7 +155,7 @@ public class TestZKMulti {
     // try to delete a node that doesn't exist
     boolean caughtNoNode = false;
     String path = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testSingleFailureZ");
-    LinkedList<ZKUtilOp> ops = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> ops = new LinkedList<>();
     ops.add(ZKUtilOp.deleteNodeFailSilent(path));
     try {
       ZKUtil.multiOrSequential(zkw, ops, false);
@@ -166,7 +166,7 @@ public class TestZKMulti {
 
     // try to setData on a node that doesn't exist
     caughtNoNode = false;
-    ops = new LinkedList<ZKUtilOp>();
+    ops = new LinkedList<>();
     ops.add(ZKUtilOp.setData(path, Bytes.toBytes(path)));
     try {
       ZKUtil.multiOrSequential(zkw, ops, false);
@@ -177,7 +177,7 @@ public class TestZKMulti {
 
     // try to create on a node that already exists
     boolean caughtNodeExists = false;
-    ops = new LinkedList<ZKUtilOp>();
+    ops = new LinkedList<>();
     ops.add(ZKUtilOp.createAndFailSilent(path, Bytes.toBytes(path)));
     ZKUtil.multiOrSequential(zkw, ops, false);
     try {
@@ -194,7 +194,7 @@ public class TestZKMulti {
     String pathA = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testSingleFailureInMultiA");
     String pathB = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testSingleFailureInMultiB");
     String pathC = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testSingleFailureInMultiC");
-    LinkedList<ZKUtilOp> ops = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> ops = new LinkedList<>();
     ops.add(ZKUtilOp.createAndFailSilent(pathA, Bytes.toBytes(pathA)));
     ops.add(ZKUtilOp.createAndFailSilent(pathB, Bytes.toBytes(pathB)));
     ops.add(ZKUtilOp.deleteNodeFailSilent(pathC));
@@ -217,14 +217,14 @@ public class TestZKMulti {
     String pathY = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testMultiFailureY");
     String pathZ = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testMultiFailureZ");
     // create X that we will use to fail create later
-    LinkedList<ZKUtilOp> ops = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> ops = new LinkedList<>();
     ops.add(ZKUtilOp.createAndFailSilent(pathX, Bytes.toBytes(pathX)));
     ZKUtil.multiOrSequential(zkw, ops, false);
 
     // fail one of each create ,setData, delete
     String pathV = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testMultiFailureV");
     String pathW = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"testMultiFailureW");
-    ops = new LinkedList<ZKUtilOp>();
+    ops = new LinkedList<>();
     ops.add(ZKUtilOp.createAndFailSilent(pathX, Bytes.toBytes(pathX))); // 
fail  -- already exists
     ops.add(ZKUtilOp.setData(pathY, Bytes.toBytes(pathY))); // fail -- doesn't 
exist
     ops.add(ZKUtilOp.deleteNodeFailSilent(pathZ)); // fail -- doesn't exist
@@ -246,7 +246,7 @@ public class TestZKMulti {
     assertTrue(ZKUtil.checkExists(zkw, pathV) == -1);
 
     // test that with multiple failures, throws an exception corresponding to 
first failure in list
-    ops = new LinkedList<ZKUtilOp>();
+    ops = new LinkedList<>();
     ops.add(ZKUtilOp.setData(pathY, Bytes.toBytes(pathY))); // fail -- doesn't 
exist
     ops.add(ZKUtilOp.createAndFailSilent(pathX, Bytes.toBytes(pathX))); // 
fail -- exists
     boolean caughtNoNode = false;
@@ -273,14 +273,14 @@ public class TestZKMulti {
     String path4 = ZKUtil.joinZNode(zkw.znodePaths.baseZNode, 
"runSequential4");
 
     // create some nodes that we will use later
-    LinkedList<ZKUtilOp> ops = new LinkedList<ZKUtilOp>();
+    LinkedList<ZKUtilOp> ops = new LinkedList<>();
     ops.add(ZKUtilOp.createAndFailSilent(path1, Bytes.toBytes(path1)));
     ops.add(ZKUtilOp.createAndFailSilent(path2, Bytes.toBytes(path2)));
     ZKUtil.multiOrSequential(zkw, ops, false);
 
     // test that, even with operations that fail, the ones that would pass 
will pass
     // with runSequentialOnMultiFailure
-    ops = new LinkedList<ZKUtilOp>();
+    ops = new LinkedList<>();
     ops.add(ZKUtilOp.setData(path1, Bytes.add(Bytes.toBytes(path1), 
Bytes.toBytes(path1)))); // pass
     ops.add(ZKUtilOp.deleteNodeFailSilent(path2)); // pass
     ops.add(ZKUtilOp.deleteNodeFailSilent(path3)); // fail -- node doesn't 
exist
@@ -368,7 +368,7 @@ public class TestZKMulti {
 
   private void createZNodeTree(String rootZNode) throws KeeperException,
       InterruptedException {
-    List<Op> opList = new ArrayList<Op>();
+    List<Op> opList = new ArrayList<>();
     opList.add(Op.create(rootZNode, new byte[0], Ids.OPEN_ACL_UNSAFE,
         CreateMode.PERSISTENT));
     int level = 0;

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
index 26329f6..89164f4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java
@@ -325,7 +325,7 @@ public class TestZooKeeperACL {
     if (!secureZKAvailable) {
       return;
     }
-    List<ServerName> drainingServers = new ArrayList<ServerName>(1);
+    List<ServerName> drainingServers = new ArrayList<>(1);
     drainingServers.add(ServerName.parseServerName("ZZZ,123,123"));
 
     // If unable to connect to secure ZK cluster then this operation would 
fail.

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java
index 59e5856..315d6b0 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/CallQueue.java
@@ -133,7 +133,7 @@ public class CallQueue implements BlockingQueue<Runnable> {
       throw new IllegalArgumentException(
           "A BlockingQueue cannot drain to itself.");
     }
-    List<Call> drained = new ArrayList<Call>();
+    List<Call> drained = new ArrayList<>();
     underlyingQueue.drainTo(drained, maxElements);
     for (Call r : drained) {
       updateMetrics(r);

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
index 2f4336b..221786a 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
@@ -154,7 +154,7 @@ public class IncrementCoalescer implements 
IncrementCoalescerMBean {
   private final AtomicLong successfulCoalescings = new AtomicLong();
   private final AtomicLong totalIncrements = new AtomicLong();
   private final ConcurrentMap<FullyQualifiedRow, Long> countersMap =
-      new ConcurrentHashMap<FullyQualifiedRow, Long>(100000, 0.75f, 1500);
+      new ConcurrentHashMap<>(100000, 0.75f, 1500);
   private final ThreadPoolExecutor pool;
   private final HBaseHandler handler;
 
@@ -166,7 +166,7 @@ public class IncrementCoalescer implements 
IncrementCoalescerMBean {
   @SuppressWarnings("deprecation")
   public IncrementCoalescer(HBaseHandler hand) {
     this.handler = hand;
-    LinkedBlockingQueue<Runnable> queue = new LinkedBlockingQueue<Runnable>();
+    LinkedBlockingQueue<Runnable> queue = new LinkedBlockingQueue<>();
     pool =
         new ThreadPoolExecutor(CORE_POOL_SIZE, CORE_POOL_SIZE, 50, 
TimeUnit.MILLISECONDS, queue,
             Threads.newDaemonThreadFactory("IncrementCoalescer"));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
index 2a1a398..b01bacf 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/TBoundedThreadPoolServer.java
@@ -146,10 +146,10 @@ public class TBoundedThreadPoolServer extends TServer {
     int maxWorkerThreads = options.maxWorkerThreads;
     if (options.maxQueuedRequests > 0) {
       this.callQueue = new CallQueue(
-          new LinkedBlockingQueue<Call>(options.maxQueuedRequests), metrics);
+          new LinkedBlockingQueue<>(options.maxQueuedRequests), metrics);
       minWorkerThreads = maxWorkerThreads;
     } else {
-      this.callQueue = new CallQueue(new SynchronousQueue<Call>(), metrics);
+      this.callQueue = new CallQueue(new SynchronousQueue<>(), metrics);
     }
 
     ThreadFactoryBuilder tfb = new ThreadFactoryBuilder();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
index 3eacfb9..0829188 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
@@ -299,7 +299,7 @@ public class ThriftServerRunner implements Runnable {
     }
 
     public static List<String> serversThatCannotSpecifyBindIP() {
-      List<String> l = new ArrayList<String>();
+      List<String> l = new ArrayList<>();
       for (ImplType t : values()) {
         if (!t.canSpecifyBindIP) {
           l.add(t.simpleClassName());
@@ -396,7 +396,7 @@ public class ThriftServerRunner implements Runnable {
 
   private void setupHTTPServer() throws IOException {
     TProtocolFactory protocolFactory = new TBinaryProtocol.Factory();
-    TProcessor processor = new Hbase.Processor<Hbase.Iface>(handler);
+    TProcessor processor = new Hbase.Processor<>(handler);
     TServlet thriftHttpServlet = new ThriftHttpServlet(processor, 
protocolFactory, realUser,
         conf, hbaseHandler, securityEnabled, doAsEnabled);
 
@@ -496,7 +496,7 @@ public class ThriftServerRunner implements Runnable {
       protocolFactory = new TBinaryProtocol.Factory();
     }
 
-    final TProcessor p = new Hbase.Processor<Hbase.Iface>(handler);
+    final TProcessor p = new Hbase.Processor<>(handler);
     ImplType implType = ImplType.getServerImpl(conf);
     TProcessor processor = p;
 
@@ -516,7 +516,7 @@ public class ThriftServerRunner implements Runnable {
       // Extract the name from the principal
       String name = SecurityUtil.getUserFromPrincipal(
         conf.get("hbase.thrift.kerberos.principal"));
-      Map<String, String> saslProperties = new HashMap<String, String>();
+      Map<String, String> saslProperties = new HashMap<>();
       saslProperties.put(Sasl.QOP, qop);
       TSaslServerTransport.Factory saslFactory = new 
TSaslServerTransport.Factory();
       saslFactory.addServerDefinition("GSSAPI", name, host, saslProperties,
@@ -591,8 +591,7 @@ public class ThriftServerRunner implements Runnable {
         tserver = new TNonblockingServer(serverArgs);
       } else if (implType == ImplType.HS_HA) {
         THsHaServer.Args serverArgs = new THsHaServer.Args(serverTransport);
-        CallQueue callQueue =
-            new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
+        CallQueue callQueue = new CallQueue(new LinkedBlockingQueue<>(), 
metrics);
         ExecutorService executorService = createExecutor(
             callQueue, serverArgs.getMaxWorkerThreads(), 
serverArgs.getMaxWorkerThreads());
         serverArgs.executorService(executorService)
@@ -603,8 +602,7 @@ public class ThriftServerRunner implements Runnable {
       } else { // THREADED_SELECTOR
         TThreadedSelectorServer.Args serverArgs =
             new HThreadedSelectorServerArgs(serverTransport, conf);
-        CallQueue callQueue =
-            new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
+        CallQueue callQueue = new CallQueue(new LinkedBlockingQueue<>(), 
metrics);
         ExecutorService executorService = createExecutor(
             callQueue, serverArgs.getWorkerThreads(), 
serverArgs.getWorkerThreads());
         serverArgs.executorService(executorService)
@@ -781,7 +779,7 @@ public class ThriftServerRunner implements Runnable {
     protected HBaseHandler(final Configuration c,
         final UserProvider userProvider) throws IOException {
       this.conf = c;
-      scannerMap = new HashMap<Integer, ResultScannerWrapper>();
+      scannerMap = new HashMap<>();
       this.coalescer = new IncrementCoalescer(this);
 
       int cleanInterval = conf.getInt(CLEANUP_INTERVAL, 10 * 1000);
@@ -869,7 +867,7 @@ public class ThriftServerRunner implements Runnable {
     public List<ByteBuffer> getTableNames() throws IOError {
       try {
         TableName[] tableNames = this.getAdmin().listTableNames();
-        ArrayList<ByteBuffer> list = new 
ArrayList<ByteBuffer>(tableNames.length);
+        ArrayList<ByteBuffer> list = new ArrayList<>(tableNames.length);
         for (int i = 0; i < tableNames.length; i++) {
           list.add(ByteBuffer.wrap(tableNames[i].getName()));
         }
@@ -888,7 +886,7 @@ public class ThriftServerRunner implements Runnable {
     throws IOError {
       try (RegionLocator locator = 
connectionCache.getRegionLocator(getBytes(tableName))) {
         List<HRegionLocation> regionLocations = 
locator.getAllRegionLocations();
-        List<TRegionInfo> results = new 
ArrayList<TRegionInfo>(regionLocations.size());
+        List<TRegionInfo> results = new ArrayList<>(regionLocations.size());
         for (HRegionLocation regionLocation : regionLocations) {
           HRegionInfo info = regionLocation.getRegionInfo();
           ServerName serverName = regionLocation.getServerName();
@@ -1151,7 +1149,7 @@ public class ThriftServerRunner implements Runnable {
       
       Table table= null;
       try {
-        List<Get> gets = new ArrayList<Get>(rows.size());
+        List<Get> gets = new ArrayList<>(rows.size());
         table = getTable(tableName);
         if (metrics != null) {
           metrics.incNumRowKeysInBatchGet(rows.size());
@@ -1363,8 +1361,8 @@ public class ThriftServerRunner implements Runnable {
         ByteBuffer tableName, List<BatchMutation> rowBatches, long timestamp,
         Map<ByteBuffer, ByteBuffer> attributes)
         throws IOError, IllegalArgument, TException {
-      List<Put> puts = new ArrayList<Put>();
-      List<Delete> deletes = new ArrayList<Delete>();
+      List<Put> puts = new ArrayList<>();
+      List<Delete> deletes = new ArrayList<>();
 
       for (BatchMutation batch : rowBatches) {
         byte[] row = getBytes(batch.row);
@@ -1479,7 +1477,7 @@ public class ThriftServerRunner implements Runnable {
       try {
         results = resultScannerWrapper.getScanner().next(nbRows);
         if (null == results) {
-          return new ArrayList<TRowResult>();
+          return new ArrayList<>();
         }
       } catch (IOException e) {
         LOG.warn(e.getMessage(), e);
@@ -1709,8 +1707,7 @@ public class ThriftServerRunner implements Runnable {
       
       Table table = null;
       try {
-        TreeMap<ByteBuffer, ColumnDescriptor> columns =
-          new TreeMap<ByteBuffer, ColumnDescriptor>();
+        TreeMap<ByteBuffer, ColumnDescriptor> columns = new TreeMap<>();
 
         table = getTable(tableName);
         HTableDescriptor desc = table.getTableDescriptor();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
index d2a95ce..7ec49fb 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
@@ -107,7 +107,7 @@ public class ThriftUtilities {
    * @return Thrift TCell array
    */
   static public List<TCell> cellFromHBase(Cell in) {
-    List<TCell> list = new ArrayList<TCell>(1);
+    List<TCell> list = new ArrayList<>(1);
     if (in != null) {
       list.add(new TCell(ByteBuffer.wrap(CellUtil.cloneValue(in)), 
in.getTimestamp()));
     }
@@ -123,12 +123,12 @@ public class ThriftUtilities {
   static public List<TCell> cellFromHBase(Cell[] in) {
     List<TCell> list = null;
     if (in != null) {
-      list = new ArrayList<TCell>(in.length);
+      list = new ArrayList<>(in.length);
       for (int i = 0; i < in.length; i++) {
         list.add(new TCell(ByteBuffer.wrap(CellUtil.cloneValue(in[i])), 
in[i].getTimestamp()));
       }
     } else {
-      list = new ArrayList<TCell>(0);
+      list = new ArrayList<>(0);
     }
     return list;
   }
@@ -149,7 +149,7 @@ public class ThriftUtilities {
    * @return Thrift TRowResult array
    */
   static public List<TRowResult> rowResultFromHBase(Result[] in, boolean 
sortColumns) {
-    List<TRowResult> results = new ArrayList<TRowResult>(in.length);
+    List<TRowResult> results = new ArrayList<>(in.length);
     for ( Result result_ : in) {
         if(result_ == null || result_.isEmpty()) {
             continue;
@@ -157,7 +157,7 @@ public class ThriftUtilities {
         TRowResult result = new TRowResult();
         result.row = ByteBuffer.wrap(result_.getRow());
         if (sortColumns) {
-          result.sortedColumns = new ArrayList<TColumn>();
+          result.sortedColumns = new ArrayList<>();
           for (Cell kv : result_.rawCells()) {
             result.sortedColumns.add(new TColumn(
                 ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.cloneFamily(kv),
@@ -165,7 +165,7 @@ public class ThriftUtilities {
                 new TCell(ByteBuffer.wrap(CellUtil.cloneValue(kv)), 
kv.getTimestamp())));
           }
         } else {
-          result.columns = new TreeMap<ByteBuffer, TCell>();
+          result.columns = new TreeMap<>();
           for (Cell kv : result_.rawCells()) {
             result.columns.put(
                 ByteBuffer.wrap(KeyValue.makeColumn(CellUtil.cloneFamily(kv),

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
index 5a68147..acad62c 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftHBaseServiceHandler.java
@@ -87,8 +87,7 @@ public class ThriftHBaseServiceHandler implements 
THBaseService.Iface {
   // nextScannerId and scannerMap are used to manage scanner state
   // TODO: Cleanup thread for Scanners, Scanner id wrap
   private final AtomicInteger nextScannerId = new AtomicInteger(0);
-  private final Map<Integer, ResultScanner> scannerMap =
-      new ConcurrentHashMap<Integer, ResultScanner>();
+  private final Map<Integer, ResultScanner> scannerMap = new 
ConcurrentHashMap<>();
 
   private final ConnectionCache connectionCache;
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
index d027c77..560ae64 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftServer.java
@@ -195,7 +195,7 @@ public class ThriftServer extends Configured implements 
Tool {
     } else if (qop == null) {
       return new TTransportFactory();
     } else {
-      Map<String, String> saslProperties = new HashMap<String, String>();
+      Map<String, String> saslProperties = new HashMap<>();
       saslProperties.put(Sasl.QOP, qop.getSaslQop());
       TSaslServerTransport.Factory saslFactory = new 
TSaslServerTransport.Factory();
       saslFactory.addServerDefinition("GSSAPI", name, host, saslProperties,
@@ -306,9 +306,9 @@ public class ThriftServer extends Configured implements 
Tool {
       int workerThreads, int maxCallQueueSize, ThriftMetrics metrics) {
     CallQueue callQueue;
     if (maxCallQueueSize > 0) {
-      callQueue = new CallQueue(new 
LinkedBlockingQueue<Call>(maxCallQueueSize), metrics);
+      callQueue = new CallQueue(new LinkedBlockingQueue<>(maxCallQueueSize), 
metrics);
     } else {
-      callQueue = new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
+      callQueue = new CallQueue(new LinkedBlockingQueue<>(), metrics);
     }
 
     ThreadFactoryBuilder tfb = new ThreadFactoryBuilder();

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
index 0001b3f..7b4a82b 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
@@ -139,7 +139,7 @@ public class ThriftUtilities {
    * @see #getFromThrift(TGet)
    */
   public static List<Get> getsFromThrift(List<TGet> in) throws IOException {
-    List<Get> out = new ArrayList<Get>(in.size());
+    List<Get> out = new ArrayList<>(in.size());
     for (TGet get : in) {
       out.add(getFromThrift(get));
     }
@@ -160,7 +160,7 @@ public class ThriftUtilities {
     if (row != null) {
       out.setRow(in.getRow());
     }
-    List<TColumnValue> columnValues = new ArrayList<TColumnValue>(raw.length);
+    List<TColumnValue> columnValues = new ArrayList<>(raw.length);
     for (Cell kv : raw) {
       TColumnValue col = new TColumnValue();
       col.setFamily(CellUtil.cloneFamily(kv));
@@ -186,7 +186,7 @@ public class ThriftUtilities {
    * @see #resultFromHBase(Result)
    */
   public static List<TResult> resultsFromHBase(Result[] in) {
-    List<TResult> out = new ArrayList<TResult>(in.length);
+    List<TResult> out = new ArrayList<>(in.length);
     for (Result result : in) {
       out.add(resultFromHBase(result));
     }
@@ -245,7 +245,7 @@ public class ThriftUtilities {
    * @see #putFromThrift(TPut)
    */
   public static List<Put> putsFromThrift(List<TPut> in) {
-    List<Put> out = new ArrayList<Put>(in.size());
+    List<Put> out = new ArrayList<>(in.size());
     for (TPut put : in) {
       out.add(putFromThrift(put));
     }
@@ -318,7 +318,7 @@ public class ThriftUtilities {
    */
 
   public static List<Delete> deletesFromThrift(List<TDelete> in) {
-    List<Delete> out = new ArrayList<Delete>(in.size());
+    List<Delete> out = new ArrayList<>(in.size());
     for (TDelete delete : in) {
       out.add(deleteFromThrift(delete));
     }
@@ -328,7 +328,7 @@ public class ThriftUtilities {
   public static TDelete deleteFromHBase(Delete in) {
     TDelete out = new TDelete(ByteBuffer.wrap(in.getRow()));
 
-    List<TColumn> columns = new 
ArrayList<TColumn>(in.getFamilyCellMap().entrySet().size());
+    List<TColumn> columns = new 
ArrayList<>(in.getFamilyCellMap().entrySet().size());
     long rowTimestamp = in.getTimeStamp();
     if (rowTimestamp != HConstants.LATEST_TIMESTAMP) {
       out.setTimestamp(rowTimestamp);
@@ -505,7 +505,7 @@ public class ThriftUtilities {
   }
 
   public static List<THRegionLocation> 
regionLocationsFromHBase(List<HRegionLocation> locations) {
-    List<THRegionLocation> tlocations = new 
ArrayList<THRegionLocation>(locations.size());
+    List<THRegionLocation> tlocations = new ArrayList<>(locations.size());
     for (HRegionLocation hrl:locations) {
       tlocations.add(regionLocationFromHBase(hrl));
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java
index b646009..e595847 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestCallQueue.java
@@ -56,7 +56,7 @@ public class TestCallQueue {
 
   @Parameters
   public static Collection<Object[]> getParameters() {
-    Collection<Object[]> parameters = new ArrayList<Object[]>();
+    Collection<Object[]> parameters = new ArrayList<>();
     for (int elementsAdded : new int[] {100, 200, 300}) {
       for (int elementsRemoved : new int[] {0, 20, 100}) {
         parameters.add(new Object[]{new Integer(elementsAdded),
@@ -77,8 +77,7 @@ public class TestCallQueue {
   @Test(timeout = 60000)
   public void testPutTake() throws Exception {
     ThriftMetrics metrics = createMetrics();
-    CallQueue callQueue = new CallQueue(
-        new LinkedBlockingQueue<Call>(), metrics);
+    CallQueue callQueue = new CallQueue(new LinkedBlockingQueue<>(), metrics);
     for (int i = 0; i < elementsAdded; ++i) {
       callQueue.put(createDummyRunnable());
     }
@@ -91,8 +90,7 @@ public class TestCallQueue {
   @Test(timeout = 60000)
   public void testOfferPoll() throws Exception {
     ThriftMetrics metrics = createMetrics();
-    CallQueue callQueue = new CallQueue(
-        new LinkedBlockingQueue<Call>(), metrics);
+    CallQueue callQueue = new CallQueue(new LinkedBlockingQueue<>(), metrics);
     for (int i = 0; i < elementsAdded; ++i) {
       callQueue.offer(createDummyRunnable());
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
index 26019be..c04b36f 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftHttpServer.java
@@ -127,7 +127,7 @@ public class TestThriftHttpServer {
   }
 
   private void runThriftServer(int customHeaderSize) throws Exception {
-    List<String> args = new ArrayList<String>(3);
+    List<String> args = new ArrayList<>(3);
     port = HBaseTestingUtility.randomFreePort();
     args.add("-" + ThriftServer.PORT_OPTION);
     args.add(String.valueOf(port));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
index ff4bc6a..d0052e5 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServer.java
@@ -280,13 +280,13 @@ public class TestThriftServer {
   }
 
   public static void doTestIncrements(HBaseHandler handler) throws Exception {
-    List<Mutation> mutations = new ArrayList<Mutation>(1);
+    List<Mutation> mutations = new ArrayList<>(1);
     mutations.add(new Mutation(false, columnAAname, valueEname, true));
     mutations.add(new Mutation(false, columnAname, valueEname, true));
     handler.mutateRow(tableAname, rowAname, mutations, null);
     handler.mutateRow(tableAname, rowBname, mutations, null);
 
-    List<TIncrement> increments = new ArrayList<TIncrement>(3);
+    List<TIncrement> increments = new ArrayList<>(3);
     increments.add(new TIncrement(tableAname, rowBname, columnAAname, 7));
     increments.add(new TIncrement(tableAname, rowBname, columnAAname, 7));
     increments.add(new TIncrement(tableAname, rowBname, columnAAname, 7));
@@ -377,7 +377,7 @@ public class TestThriftServer {
     assertEquals(0, size);
 
     // Try null mutation
-    List<Mutation> mutations = new ArrayList<Mutation>(1);
+    List<Mutation> mutations = new ArrayList<>(1);
     mutations.add(new Mutation(false, columnAname, null, true));
     handler.mutateRow(tableAname, rowAname, mutations, null);
     TRowResult rowResult3 = handler.getRow(tableAname, rowAname, null).get(0);
@@ -436,7 +436,7 @@ public class TestThriftServer {
     // ColumnAname has been deleted, and will never be visible even with a 
getRowTs()
     assertFalse(rowResult2.columns.containsKey(columnAname));
 
-    List<ByteBuffer> columns = new ArrayList<ByteBuffer>(1);
+    List<ByteBuffer> columns = new ArrayList<>(1);
     columns.add(columnBname);
 
     rowResult1 = handler.getRowWithColumns(tableAname, rowAname, columns, 
null).get(0);
@@ -555,7 +555,7 @@ public class TestThriftServer {
     assertEquals(rowResult6.sortedColumns.size(), 1);
     assertEquals(rowResult6.sortedColumns.get(0).getCell().value, valueCname);
 
-    List<Mutation> rowBmutations = new ArrayList<Mutation>(20);
+    List<Mutation> rowBmutations = new ArrayList<>(20);
     for (int i = 0; i < 20; i++) {
       rowBmutations.add(new Mutation(false, asByteBuffer("columnA:" + i), 
valueCname, true));
     }
@@ -668,13 +668,13 @@ public class TestThriftServer {
         UserProvider.instantiate(UTIL.getConfiguration()));
     handler.createTable(tableAname, getColumnDescriptors());
     try {
-      List<Mutation> mutations = new ArrayList<Mutation>(1);
+      List<Mutation> mutations = new ArrayList<>(1);
       mutations.add(new Mutation(false, columnAname, valueAname, true));
       handler.mutateRow(tableAname, rowAname, mutations, null);
 
-      List<ByteBuffer> columnList = new ArrayList<ByteBuffer>(1);
+      List<ByteBuffer> columnList = new ArrayList<>(1);
       columnList.add(columnAname);
-      List<ByteBuffer> valueList = new ArrayList<ByteBuffer>(1);
+      List<ByteBuffer> valueList = new ArrayList<>(1);
       valueList.add(valueBname);
 
       TAppend append = new TAppend(tableAname, rowAname, columnList, 
valueList);
@@ -702,7 +702,7 @@ public class TestThriftServer {
         UserProvider.instantiate(UTIL.getConfiguration()));
     handler.createTable(tableAname, getColumnDescriptors());
     try {
-      List<Mutation> mutations = new ArrayList<Mutation>(1);
+      List<Mutation> mutations = new ArrayList<>(1);
       mutations.add(new Mutation(false, columnAname, valueAname, true));
       Mutation putB = (new Mutation(false, columnBname, valueBname, true));
 
@@ -796,7 +796,7 @@ public class TestThriftServer {
    * default ColumnDescriptor and one ColumnDescriptor with fewer versions
    */
   private static List<ColumnDescriptor> getColumnDescriptors() {
-    ArrayList<ColumnDescriptor> cDescriptors = new 
ArrayList<ColumnDescriptor>(2);
+    ArrayList<ColumnDescriptor> cDescriptors = new ArrayList<>(2);
 
     // A default ColumnDescriptor
     ColumnDescriptor cDescA = new ColumnDescriptor();
@@ -818,7 +818,7 @@ public class TestThriftServer {
    * @return a List of column names for use in retrieving a scanner
    */
   private List<ByteBuffer> getColumnList(boolean includeA, boolean includeB) {
-    List<ByteBuffer> columnList = new ArrayList<ByteBuffer>();
+    List<ByteBuffer> columnList = new ArrayList<>();
     if (includeA) columnList.add(columnAname);
     if (includeB) columnList.add(columnBname);
     return columnList;
@@ -830,7 +830,7 @@ public class TestThriftServer {
    * and columnB having valueB
    */
   private static List<Mutation> getMutations() {
-    List<Mutation> mutations = new ArrayList<Mutation>(2);
+    List<Mutation> mutations = new ArrayList<>(2);
     mutations.add(new Mutation(false, columnAname, valueAname, true));
     mutations.add(new Mutation(false, columnBname, valueBname, true));
     return mutations;
@@ -845,19 +845,19 @@ public class TestThriftServer {
    * (rowB, columnB): place valueD
    */
   private static List<BatchMutation> getBatchMutations() {
-    List<BatchMutation> batchMutations = new ArrayList<BatchMutation>(3);
+    List<BatchMutation> batchMutations = new ArrayList<>(3);
 
     // Mutations to rowA.  You can't mix delete and put anymore.
-    List<Mutation> rowAmutations = new ArrayList<Mutation>(1);
+    List<Mutation> rowAmutations = new ArrayList<>(1);
     rowAmutations.add(new Mutation(true, columnAname, null, true));
     batchMutations.add(new BatchMutation(rowAname, rowAmutations));
 
-    rowAmutations = new ArrayList<Mutation>(1);
+    rowAmutations = new ArrayList<>(1);
     rowAmutations.add(new Mutation(false, columnBname, valueCname, true));
     batchMutations.add(new BatchMutation(rowAname, rowAmutations));
 
     // Mutations to rowB
-    List<Mutation> rowBmutations = new ArrayList<Mutation>(2);
+    List<Mutation> rowBmutations = new ArrayList<>(2);
     rowBmutations.add(new Mutation(false, columnAname, valueCname, true));
     rowBmutations.add(new Mutation(false, columnBname, valueDname, true));
     batchMutations.add(new BatchMutation(rowBname, rowBmutations));

http://git-wip-us.apache.org/repos/asf/hbase/blob/b53f3547/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
----------------------------------------------------------------------
diff --git 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
index 457273e..87998da 100644
--- 
a/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
+++ 
b/hbase-thrift/src/test/java/org/apache/hadoop/hbase/thrift/TestThriftServerCmdLine.java
@@ -82,7 +82,7 @@ public class TestThriftServerCmdLine {
 
   @Parameters
   public static Collection<Object[]> getParameters() {
-    Collection<Object[]> parameters = new ArrayList<Object[]>();
+    Collection<Object[]> parameters = new ArrayList<>();
     for (ImplType implType : ImplType.values()) {
       for (boolean specifyFramed : new boolean[] {false, true}) {
         for (boolean specifyBindIP : new boolean[] {false, true}) {
@@ -151,7 +151,7 @@ public class TestThriftServerCmdLine {
 
   @Test(timeout=600000)
   public void testRunThriftServer() throws Exception {
-    List<String> args = new ArrayList<String>();
+    List<String> args = new ArrayList<>();
     if (implType != null) {
       String serverTypeOption = implType.toString();
       assertTrue(serverTypeOption.startsWith("-"));

Reply via email to