hbase git commit: HBASE-21208 Bytes#toShort doesn't work without unsafe

2018-09-26 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 fea75742b -> a4e72544f


HBASE-21208 Bytes#toShort doesn't work without unsafe

Signed-off-by: Ted Yu 
Signed-off-by: anoopsamjohn 
Signed-off-by: Reid Chan 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a4e72544
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a4e72544
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a4e72544

Branch: refs/heads/branch-2.1
Commit: a4e72544f7d0091ad83ecbc90b953ce90255eb14
Parents: fea7574
Author: Chia-Ping Tsai 
Authored: Tue Sep 25 10:18:47 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Sep 26 18:19:19 2018 +0800

--
 .../org/apache/hadoop/hbase/util/Bytes.java |  7 ++--
 .../org/apache/hadoop/hbase/util/TestBytes.java | 44 
 2 files changed, 48 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a4e72544/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index 15facea..faaab6f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -130,7 +130,8 @@ public class Bytes implements Comparable {
   // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
   public static final int ESTIMATED_HEAP_TAX = 16;
 
-  private static final boolean UNSAFE_UNALIGNED = 
UnsafeAvailChecker.unaligned();
+  @VisibleForTesting
+  static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
 
   /**
* Returns length of the byte array, returning 0 if the array is null.
@@ -1161,9 +1162,9 @@ public class Bytes implements Comparable {
   return UnsafeAccess.toShort(bytes, offset);
 } else {
   short n = 0;
-  n = (short) ((n ^ bytes[offset]) & 0xFF);
+  n = (short) (n ^ (bytes[offset] & 0xFF));
   n = (short) (n << 8);
-  n = (short) ((n ^ bytes[offset+1]) & 0xFF);
+  n = (short) (n ^ (bytes[offset + 1] & 0xFF));
   return n;
}
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/a4e72544/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
index cea615e..8b206e2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
@@ -22,6 +22,8 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
 import java.math.BigDecimal;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -44,6 +46,48 @@ public class TestBytes extends TestCase {
   public static final HBaseClassTestRule CLASS_RULE =
   HBaseClassTestRule.forClass(TestBytes.class);
 
+  private static void setUnsafe(boolean value) throws Exception {
+Field field = Bytes.class.getDeclaredField("UNSAFE_UNALIGNED");
+field.setAccessible(true);
+Field modifiersField = Field.class.getDeclaredField("modifiers");
+modifiersField.setAccessible(true);
+int oldModifiers = field.getModifiers();
+modifiersField.setInt(field, oldModifiers & ~Modifier.FINAL);
+try {
+  field.set(null, value);
+} finally {
+  modifiersField.setInt(field, oldModifiers);
+}
+assertEquals(Bytes.UNSAFE_UNALIGNED, value);
+  }
+
+  public void testShort() throws Exception  {
+testShort(false);
+  }
+
+  public void testShortUnsafe() throws Exception  {
+testShort(true);
+  }
+
+  private static void testShort(boolean unsafe) throws Exception  {
+setUnsafe(unsafe);
+try {
+  for (short n : Arrays.asList(
+  Short.MIN_VALUE,
+  (short) -100,
+  (short) -1,
+  (short) 0,
+  (short) 1,
+  (short) 300,
+  Short.MAX_VALUE)) {
+byte[] bytes = Bytes.toBytes(n);
+assertEquals(Bytes.toShort(bytes, 0, bytes.length), n);
+  }
+} finally {
+  setUnsafe(UnsafeAvailChecker.unaligned());
+}
+  }
+
   public void testNullHashCode() {
 byte [] b = null;
 Exception ee = null;



hbase git commit: HBASE-21208 Bytes#toShort doesn't work without unsafe

2018-09-24 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 d026f3c5d -> cf915f9c7


HBASE-21208 Bytes#toShort doesn't work without unsafe

Signed-off-by: Ted Yu 
Signed-off-by: anoopsamjohn 
Signed-off-by: Reid Chan 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cf915f9c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cf915f9c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cf915f9c

Branch: refs/heads/branch-2.0
Commit: cf915f9c7c63c5361f99d906b3c9d89b97f706b1
Parents: d026f3c
Author: Chia-Ping Tsai 
Authored: Tue Sep 25 10:18:47 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Sep 25 10:29:30 2018 +0800

--
 .../org/apache/hadoop/hbase/util/Bytes.java |  7 ++--
 .../org/apache/hadoop/hbase/util/TestBytes.java | 44 
 2 files changed, 48 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/cf915f9c/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index 15facea..faaab6f 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -130,7 +130,8 @@ public class Bytes implements Comparable {
   // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
   public static final int ESTIMATED_HEAP_TAX = 16;
 
-  private static final boolean UNSAFE_UNALIGNED = 
UnsafeAvailChecker.unaligned();
+  @VisibleForTesting
+  static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
 
   /**
* Returns length of the byte array, returning 0 if the array is null.
@@ -1161,9 +1162,9 @@ public class Bytes implements Comparable {
   return UnsafeAccess.toShort(bytes, offset);
 } else {
   short n = 0;
-  n = (short) ((n ^ bytes[offset]) & 0xFF);
+  n = (short) (n ^ (bytes[offset] & 0xFF));
   n = (short) (n << 8);
-  n = (short) ((n ^ bytes[offset+1]) & 0xFF);
+  n = (short) (n ^ (bytes[offset + 1] & 0xFF));
   return n;
}
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cf915f9c/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
index cea615e..8b206e2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
@@ -22,6 +22,8 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
 import java.math.BigDecimal;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -44,6 +46,48 @@ public class TestBytes extends TestCase {
   public static final HBaseClassTestRule CLASS_RULE =
   HBaseClassTestRule.forClass(TestBytes.class);
 
+  private static void setUnsafe(boolean value) throws Exception {
+Field field = Bytes.class.getDeclaredField("UNSAFE_UNALIGNED");
+field.setAccessible(true);
+Field modifiersField = Field.class.getDeclaredField("modifiers");
+modifiersField.setAccessible(true);
+int oldModifiers = field.getModifiers();
+modifiersField.setInt(field, oldModifiers & ~Modifier.FINAL);
+try {
+  field.set(null, value);
+} finally {
+  modifiersField.setInt(field, oldModifiers);
+}
+assertEquals(Bytes.UNSAFE_UNALIGNED, value);
+  }
+
+  public void testShort() throws Exception  {
+testShort(false);
+  }
+
+  public void testShortUnsafe() throws Exception  {
+testShort(true);
+  }
+
+  private static void testShort(boolean unsafe) throws Exception  {
+setUnsafe(unsafe);
+try {
+  for (short n : Arrays.asList(
+  Short.MIN_VALUE,
+  (short) -100,
+  (short) -1,
+  (short) 0,
+  (short) 1,
+  (short) 300,
+  Short.MAX_VALUE)) {
+byte[] bytes = Bytes.toBytes(n);
+assertEquals(Bytes.toShort(bytes, 0, bytes.length), n);
+  }
+} finally {
+  setUnsafe(UnsafeAvailChecker.unaligned());
+}
+  }
+
   public void testNullHashCode() {
 byte [] b = null;
 Exception ee = null;



hbase git commit: HBASE-21208 Bytes#toShort doesn't work without unsafe

2018-09-24 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 20974513d -> df5310fc1


HBASE-21208 Bytes#toShort doesn't work without unsafe

Signed-off-by: Ted Yu 
Signed-off-by: anoopsamjohn 
Signed-off-by: Reid Chan 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/df5310fc
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/df5310fc
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/df5310fc

Branch: refs/heads/branch-2
Commit: df5310fc1ee16a36de1dafad2f15f1dfdb95a49b
Parents: 2097451
Author: Chia-Ping Tsai 
Authored: Tue Sep 25 10:18:47 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Sep 25 10:27:08 2018 +0800

--
 .../org/apache/hadoop/hbase/util/Bytes.java |  7 ++--
 .../org/apache/hadoop/hbase/util/TestBytes.java | 44 
 2 files changed, 48 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/df5310fc/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index e63c0db..47312b6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -130,7 +130,8 @@ public class Bytes implements Comparable {
   // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
   public static final int ESTIMATED_HEAP_TAX = 16;
 
-  private static final boolean UNSAFE_UNALIGNED = 
UnsafeAvailChecker.unaligned();
+  @VisibleForTesting
+  static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
 
   /**
* Returns length of the byte array, returning 0 if the array is null.
@@ -1161,9 +1162,9 @@ public class Bytes implements Comparable {
   return UnsafeAccess.toShort(bytes, offset);
 } else {
   short n = 0;
-  n = (short) ((n ^ bytes[offset]) & 0xFF);
+  n = (short) (n ^ (bytes[offset] & 0xFF));
   n = (short) (n << 8);
-  n = (short) ((n ^ bytes[offset+1]) & 0xFF);
+  n = (short) (n ^ (bytes[offset + 1] & 0xFF));
   return n;
}
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/df5310fc/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
index cea615e..8b206e2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
@@ -22,6 +22,8 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
 import java.math.BigDecimal;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -44,6 +46,48 @@ public class TestBytes extends TestCase {
   public static final HBaseClassTestRule CLASS_RULE =
   HBaseClassTestRule.forClass(TestBytes.class);
 
+  private static void setUnsafe(boolean value) throws Exception {
+Field field = Bytes.class.getDeclaredField("UNSAFE_UNALIGNED");
+field.setAccessible(true);
+Field modifiersField = Field.class.getDeclaredField("modifiers");
+modifiersField.setAccessible(true);
+int oldModifiers = field.getModifiers();
+modifiersField.setInt(field, oldModifiers & ~Modifier.FINAL);
+try {
+  field.set(null, value);
+} finally {
+  modifiersField.setInt(field, oldModifiers);
+}
+assertEquals(Bytes.UNSAFE_UNALIGNED, value);
+  }
+
+  public void testShort() throws Exception  {
+testShort(false);
+  }
+
+  public void testShortUnsafe() throws Exception  {
+testShort(true);
+  }
+
+  private static void testShort(boolean unsafe) throws Exception  {
+setUnsafe(unsafe);
+try {
+  for (short n : Arrays.asList(
+  Short.MIN_VALUE,
+  (short) -100,
+  (short) -1,
+  (short) 0,
+  (short) 1,
+  (short) 300,
+  Short.MAX_VALUE)) {
+byte[] bytes = Bytes.toBytes(n);
+assertEquals(Bytes.toShort(bytes, 0, bytes.length), n);
+  }
+} finally {
+  setUnsafe(UnsafeAvailChecker.unaligned());
+}
+  }
+
   public void testNullHashCode() {
 byte [] b = null;
 Exception ee = null;



hbase git commit: HBASE-21208 Bytes#toShort doesn't work without unsafe

2018-09-24 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 7ab77518a -> c686b535c


HBASE-21208 Bytes#toShort doesn't work without unsafe

Signed-off-by: Ted Yu 
Signed-off-by: anoopsamjohn 
Signed-off-by: Reid Chan 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c686b535
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c686b535
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c686b535

Branch: refs/heads/master
Commit: c686b535c2b4321c9089fac208d8d4230e8511df
Parents: 7ab7751
Author: Chia-Ping Tsai 
Authored: Tue Sep 25 10:18:47 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Sep 25 10:19:36 2018 +0800

--
 .../org/apache/hadoop/hbase/util/Bytes.java |  7 ++--
 .../org/apache/hadoop/hbase/util/TestBytes.java | 44 
 2 files changed, 48 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c686b535/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
--
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java 
b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
index e63c0db..47312b6 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/Bytes.java
@@ -130,7 +130,8 @@ public class Bytes implements Comparable {
   // SizeOf which uses java.lang.instrument says 24 bytes. (3 longs?)
   public static final int ESTIMATED_HEAP_TAX = 16;
 
-  private static final boolean UNSAFE_UNALIGNED = 
UnsafeAvailChecker.unaligned();
+  @VisibleForTesting
+  static final boolean UNSAFE_UNALIGNED = UnsafeAvailChecker.unaligned();
 
   /**
* Returns length of the byte array, returning 0 if the array is null.
@@ -1161,9 +1162,9 @@ public class Bytes implements Comparable {
   return UnsafeAccess.toShort(bytes, offset);
 } else {
   short n = 0;
-  n = (short) ((n ^ bytes[offset]) & 0xFF);
+  n = (short) (n ^ (bytes[offset] & 0xFF));
   n = (short) (n << 8);
-  n = (short) ((n ^ bytes[offset+1]) & 0xFF);
+  n = (short) (n ^ (bytes[offset + 1] & 0xFF));
   return n;
}
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/c686b535/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
--
diff --git 
a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java 
b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
index cea615e..8b206e2 100644
--- a/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
+++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/util/TestBytes.java
@@ -22,6 +22,8 @@ import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
 import java.io.DataOutputStream;
 import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
 import java.math.BigDecimal;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
@@ -44,6 +46,48 @@ public class TestBytes extends TestCase {
   public static final HBaseClassTestRule CLASS_RULE =
   HBaseClassTestRule.forClass(TestBytes.class);
 
+  private static void setUnsafe(boolean value) throws Exception {
+Field field = Bytes.class.getDeclaredField("UNSAFE_UNALIGNED");
+field.setAccessible(true);
+Field modifiersField = Field.class.getDeclaredField("modifiers");
+modifiersField.setAccessible(true);
+int oldModifiers = field.getModifiers();
+modifiersField.setInt(field, oldModifiers & ~Modifier.FINAL);
+try {
+  field.set(null, value);
+} finally {
+  modifiersField.setInt(field, oldModifiers);
+}
+assertEquals(Bytes.UNSAFE_UNALIGNED, value);
+  }
+
+  public void testShort() throws Exception  {
+testShort(false);
+  }
+
+  public void testShortUnsafe() throws Exception  {
+testShort(true);
+  }
+
+  private static void testShort(boolean unsafe) throws Exception  {
+setUnsafe(unsafe);
+try {
+  for (short n : Arrays.asList(
+  Short.MIN_VALUE,
+  (short) -100,
+  (short) -1,
+  (short) 0,
+  (short) 1,
+  (short) 300,
+  Short.MAX_VALUE)) {
+byte[] bytes = Bytes.toBytes(n);
+assertEquals(Bytes.toShort(bytes, 0, bytes.length), n);
+  }
+} finally {
+  setUnsafe(UnsafeAvailChecker.unaligned());
+}
+  }
+
   public void testNullHashCode() {
 byte [] b = null;
 Exception ee = null;



hbase git commit: HBASE-21012 Revert the change of serializing TimeRangeTracker

2018-08-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 2311fa86d -> 51df2887c


HBASE-21012 Revert the change of serializing TimeRangeTracker

Signed-off-by: Michael Stack 
Signed-off-by: zhangduo 
Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/51df2887
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/51df2887
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/51df2887

Branch: refs/heads/branch-2
Commit: 51df2887c42ef019a6a095c741fb4a77283fda62
Parents: 2311fa8
Author: brandboat 
Authored: Thu Aug 9 12:27:25 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Aug 11 22:25:10 2018 +0800

--
 .../hbase/regionserver/TimeRangeTracker.java| 27 +++-
 src/main/asciidoc/_chapters/upgrading.adoc  | 12 +
 2 files changed, 32 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/51df2887/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
index 5c0eee5..d995fc6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -206,13 +208,24 @@ public abstract class TimeRangeTracker {
 }
   }
 
-  public static byte[] toByteArray(TimeRangeTracker tracker) {
-return ProtobufUtil.prependPBMagic(
-HBaseProtos.TimeRangeTracker.newBuilder()
-  .setFrom(tracker.getMin())
-  .setTo(tracker.getMax())
-  .build()
-  .toByteArray());
+  /**
+   * This method used to serialize TimeRangeTracker (TRT) by protobuf while 
this breaks the
+   * forward compatibility on HFile.(See HBASE-21008) In previous hbase 
version ( < 2.0.0 ) we use
+   * DataOutput to serialize TRT, these old versions don't have capability to 
deserialize TRT
+   * which is serialized by protobuf. So we need to revert the change of 
serializing
+   * TimeRangeTracker back to DataOutput. For more information, please check 
HBASE-21012.
+   * @param tracker TimeRangeTracker needed to be serialized.
+   * @return byte array filled with serialized TimeRangeTracker.
+   * @throws IOException if something goes wrong in writeLong.
+   */
+  public static byte[] toByteArray(TimeRangeTracker tracker) throws 
IOException {
+try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
+  try (DataOutputStream dos = new DataOutputStream(bos)) {
+dos.writeLong(tracker.getMin());
+dos.writeLong(tracker.getMax());
+return bos.toByteArray();
+  }
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/51df2887/src/main/asciidoc/_chapters/upgrading.adoc
--
diff --git a/src/main/asciidoc/_chapters/upgrading.adoc 
b/src/main/asciidoc/_chapters/upgrading.adoc
index bc2ec1c..6dc788a 100644
--- a/src/main/asciidoc/_chapters/upgrading.adoc
+++ b/src/main/asciidoc/_chapters/upgrading.adoc
@@ -588,6 +588,18 @@ The internal changes to HBase during this upgrade were 
sufficient for compilatio
 
 If you previously relied on client side tracing integrated with HBase 
operations, it is recommended that you upgrade your usage to HTrace 4 as well.
 
+[[upgrade2.0.hfile.compatability]]
+.HFile lose forward compatability
+
+HFiles generated by 2.0.0, 2.0.1, 2.1.0 are not forward compatible to 1.4.6-, 
1.3.2.1-, 1.2.6.1-,
+and other inactive releases. Why HFile lose compatability is hbase in new 
versions
+(2.0.0, 2.0.1, 2.1.0) use protobuf to serialize/deserialize TimeRangeTracker 
(TRT) while old
+versions use DataInput/DataOutput. To solve this, We have to put
+link:https://jira.apache.org/jira/browse/HBASE-21012[HBASE-21012]
+to 2.x and put 
link:https://jira.apache.org/jira/browse/HBASE-21013[HBASE-21013] in 1.x.
+For more information, please check
+link:https://jira.apache.org/jira/browse/HBASE-21008[HBASE-21008].
+
 [[upgrade2.0.perf]]
 .Performance
 



hbase git commit: HBASE-21012 Revert the change of serializing TimeRangeTracker

2018-08-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master a3ab9306a -> 699ea4c7d


HBASE-21012 Revert the change of serializing TimeRangeTracker

Signed-off-by: Michael Stack 
Signed-off-by: zhangduo 
Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/699ea4c7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/699ea4c7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/699ea4c7

Branch: refs/heads/master
Commit: 699ea4c7d02c4871a57c1cb1a984c3451c54cc6b
Parents: a3ab930
Author: brandboat 
Authored: Thu Aug 9 12:27:25 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Aug 11 22:17:04 2018 +0800

--
 .../hbase/regionserver/TimeRangeTracker.java| 27 +++-
 src/main/asciidoc/_chapters/upgrading.adoc  | 12 +
 2 files changed, 32 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/699ea4c7/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
index 5c0eee5..d995fc6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -206,13 +208,24 @@ public abstract class TimeRangeTracker {
 }
   }
 
-  public static byte[] toByteArray(TimeRangeTracker tracker) {
-return ProtobufUtil.prependPBMagic(
-HBaseProtos.TimeRangeTracker.newBuilder()
-  .setFrom(tracker.getMin())
-  .setTo(tracker.getMax())
-  .build()
-  .toByteArray());
+  /**
+   * This method used to serialize TimeRangeTracker (TRT) by protobuf while 
this breaks the
+   * forward compatibility on HFile.(See HBASE-21008) In previous hbase 
version ( < 2.0.0 ) we use
+   * DataOutput to serialize TRT, these old versions don't have capability to 
deserialize TRT
+   * which is serialized by protobuf. So we need to revert the change of 
serializing
+   * TimeRangeTracker back to DataOutput. For more information, please check 
HBASE-21012.
+   * @param tracker TimeRangeTracker needed to be serialized.
+   * @return byte array filled with serialized TimeRangeTracker.
+   * @throws IOException if something goes wrong in writeLong.
+   */
+  public static byte[] toByteArray(TimeRangeTracker tracker) throws 
IOException {
+try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
+  try (DataOutputStream dos = new DataOutputStream(bos)) {
+dos.writeLong(tracker.getMin());
+dos.writeLong(tracker.getMax());
+return bos.toByteArray();
+  }
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/699ea4c7/src/main/asciidoc/_chapters/upgrading.adoc
--
diff --git a/src/main/asciidoc/_chapters/upgrading.adoc 
b/src/main/asciidoc/_chapters/upgrading.adoc
index bc2ec1c..6dc788a 100644
--- a/src/main/asciidoc/_chapters/upgrading.adoc
+++ b/src/main/asciidoc/_chapters/upgrading.adoc
@@ -588,6 +588,18 @@ The internal changes to HBase during this upgrade were 
sufficient for compilatio
 
 If you previously relied on client side tracing integrated with HBase 
operations, it is recommended that you upgrade your usage to HTrace 4 as well.
 
+[[upgrade2.0.hfile.compatability]]
+.HFile lose forward compatability
+
+HFiles generated by 2.0.0, 2.0.1, 2.1.0 are not forward compatible to 1.4.6-, 
1.3.2.1-, 1.2.6.1-,
+and other inactive releases. Why HFile lose compatability is hbase in new 
versions
+(2.0.0, 2.0.1, 2.1.0) use protobuf to serialize/deserialize TimeRangeTracker 
(TRT) while old
+versions use DataInput/DataOutput. To solve this, We have to put
+link:https://jira.apache.org/jira/browse/HBASE-21012[HBASE-21012]
+to 2.x and put 
link:https://jira.apache.org/jira/browse/HBASE-21013[HBASE-21013] in 1.x.
+For more information, please check
+link:https://jira.apache.org/jira/browse/HBASE-21008[HBASE-21008].
+
 [[upgrade2.0.perf]]
 .Performance
 



hbase git commit: HBASE-21012 Revert the change of serializing TimeRangeTracker

2018-08-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 5e12d6a98 -> 873d9f508


HBASE-21012 Revert the change of serializing TimeRangeTracker

Signed-off-by: Michael Stack 
Signed-off-by: zhangduo 
Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/873d9f50
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/873d9f50
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/873d9f50

Branch: refs/heads/branch-2.1
Commit: 873d9f50822aceec70bf3703362185e6498d1c91
Parents: 5e12d6a
Author: brandboat 
Authored: Thu Aug 9 12:27:25 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Aug 11 22:28:49 2018 +0800

--
 .../hbase/regionserver/TimeRangeTracker.java| 27 +++-
 src/main/asciidoc/_chapters/upgrading.adoc  | 12 +
 2 files changed, 32 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/873d9f50/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
index 5c0eee5..d995fc6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -206,13 +208,24 @@ public abstract class TimeRangeTracker {
 }
   }
 
-  public static byte[] toByteArray(TimeRangeTracker tracker) {
-return ProtobufUtil.prependPBMagic(
-HBaseProtos.TimeRangeTracker.newBuilder()
-  .setFrom(tracker.getMin())
-  .setTo(tracker.getMax())
-  .build()
-  .toByteArray());
+  /**
+   * This method used to serialize TimeRangeTracker (TRT) by protobuf while 
this breaks the
+   * forward compatibility on HFile.(See HBASE-21008) In previous hbase 
version ( < 2.0.0 ) we use
+   * DataOutput to serialize TRT, these old versions don't have capability to 
deserialize TRT
+   * which is serialized by protobuf. So we need to revert the change of 
serializing
+   * TimeRangeTracker back to DataOutput. For more information, please check 
HBASE-21012.
+   * @param tracker TimeRangeTracker needed to be serialized.
+   * @return byte array filled with serialized TimeRangeTracker.
+   * @throws IOException if something goes wrong in writeLong.
+   */
+  public static byte[] toByteArray(TimeRangeTracker tracker) throws 
IOException {
+try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
+  try (DataOutputStream dos = new DataOutputStream(bos)) {
+dos.writeLong(tracker.getMin());
+dos.writeLong(tracker.getMax());
+return bos.toByteArray();
+  }
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/873d9f50/src/main/asciidoc/_chapters/upgrading.adoc
--
diff --git a/src/main/asciidoc/_chapters/upgrading.adoc 
b/src/main/asciidoc/_chapters/upgrading.adoc
index bc2ec1c..6dc788a 100644
--- a/src/main/asciidoc/_chapters/upgrading.adoc
+++ b/src/main/asciidoc/_chapters/upgrading.adoc
@@ -588,6 +588,18 @@ The internal changes to HBase during this upgrade were 
sufficient for compilatio
 
 If you previously relied on client side tracing integrated with HBase 
operations, it is recommended that you upgrade your usage to HTrace 4 as well.
 
+[[upgrade2.0.hfile.compatability]]
+.HFile lose forward compatability
+
+HFiles generated by 2.0.0, 2.0.1, 2.1.0 are not forward compatible to 1.4.6-, 
1.3.2.1-, 1.2.6.1-,
+and other inactive releases. Why HFile lose compatability is hbase in new 
versions
+(2.0.0, 2.0.1, 2.1.0) use protobuf to serialize/deserialize TimeRangeTracker 
(TRT) while old
+versions use DataInput/DataOutput. To solve this, We have to put
+link:https://jira.apache.org/jira/browse/HBASE-21012[HBASE-21012]
+to 2.x and put 
link:https://jira.apache.org/jira/browse/HBASE-21013[HBASE-21013] in 1.x.
+For more information, please check
+link:https://jira.apache.org/jira/browse/HBASE-21008[HBASE-21008].
+
 [[upgrade2.0.perf]]
 .Performance
 



hbase git commit: HBASE-21012 Revert the change of serializing TimeRangeTracker

2018-08-11 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 9c55577e6 -> 2a15e3a01


HBASE-21012 Revert the change of serializing TimeRangeTracker

Signed-off-by: Michael Stack 
Signed-off-by: zhangduo 
Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2a15e3a0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2a15e3a0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2a15e3a0

Branch: refs/heads/branch-2.0
Commit: 2a15e3a01a7474cbc81e4228265eea09861be1ff
Parents: 9c55577
Author: brandboat 
Authored: Thu Aug 9 12:27:25 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Aug 11 22:32:28 2018 +0800

--
 .../hbase/regionserver/TimeRangeTracker.java| 27 +++-
 src/main/asciidoc/_chapters/upgrading.adoc  | 12 +
 2 files changed, 32 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2a15e3a0/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
index 5c0eee5..d995fc6 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.regionserver;
 
 import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
 import java.io.DataInputStream;
+import java.io.DataOutputStream;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicLong;
 
@@ -206,13 +208,24 @@ public abstract class TimeRangeTracker {
 }
   }
 
-  public static byte[] toByteArray(TimeRangeTracker tracker) {
-return ProtobufUtil.prependPBMagic(
-HBaseProtos.TimeRangeTracker.newBuilder()
-  .setFrom(tracker.getMin())
-  .setTo(tracker.getMax())
-  .build()
-  .toByteArray());
+  /**
+   * This method used to serialize TimeRangeTracker (TRT) by protobuf while 
this breaks the
+   * forward compatibility on HFile.(See HBASE-21008) In previous hbase 
version ( < 2.0.0 ) we use
+   * DataOutput to serialize TRT, these old versions don't have capability to 
deserialize TRT
+   * which is serialized by protobuf. So we need to revert the change of 
serializing
+   * TimeRangeTracker back to DataOutput. For more information, please check 
HBASE-21012.
+   * @param tracker TimeRangeTracker needed to be serialized.
+   * @return byte array filled with serialized TimeRangeTracker.
+   * @throws IOException if something goes wrong in writeLong.
+   */
+  public static byte[] toByteArray(TimeRangeTracker tracker) throws 
IOException {
+try (ByteArrayOutputStream bos = new ByteArrayOutputStream()) {
+  try (DataOutputStream dos = new DataOutputStream(bos)) {
+dos.writeLong(tracker.getMin());
+dos.writeLong(tracker.getMax());
+return bos.toByteArray();
+  }
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/2a15e3a0/src/main/asciidoc/_chapters/upgrading.adoc
--
diff --git a/src/main/asciidoc/_chapters/upgrading.adoc 
b/src/main/asciidoc/_chapters/upgrading.adoc
index a126422..a082014 100644
--- a/src/main/asciidoc/_chapters/upgrading.adoc
+++ b/src/main/asciidoc/_chapters/upgrading.adoc
@@ -588,6 +588,18 @@ The internal changes to HBase during this upgrade were 
sufficient for compilatio
 
 If you previously relied on client side tracing integrated with HBase 
operations, it is recommended that you upgrade your usage to HTrace 4 as well.
 
+[[upgrade2.0.hfile.compatability]]
+.HFile lose forward compatability
+
+HFiles generated by 2.0.0, 2.0.1, 2.1.0 are not forward compatible to 1.4.6-, 
1.3.2.1-, 1.2.6.1-,
+and other inactive releases. Why HFile lose compatability is hbase in new 
versions
+(2.0.0, 2.0.1, 2.1.0) use protobuf to serialize/deserialize TimeRangeTracker 
(TRT) while old
+versions use DataInput/DataOutput. To solve this, We have to put
+link:https://jira.apache.org/jira/browse/HBASE-21012[HBASE-21012]
+to 2.x and put 
link:https://jira.apache.org/jira/browse/HBASE-21013[HBASE-21013] in 1.x.
+For more information, please check
+link:https://jira.apache.org/jira/browse/HBASE-21008[HBASE-21008].
+
 [[upgrade2.0.perf]]
 .Performance
 



hbase git commit: HBASE-20873 Update doc for Endpoint-based Export

2018-07-24 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 097ae5570 -> ca558bc85


HBASE-20873 Update doc for Endpoint-based Export

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ca558bc8
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ca558bc8
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ca558bc8

Branch: refs/heads/branch-2
Commit: ca558bc857f22f55a8489e3b1920468478d12b1c
Parents: 097ae55
Author: Wei-Chiu Chuang 
Authored: Fri Jul 13 19:01:22 2018 -0700
Committer: Chia-Ping Tsai 
Committed: Wed Jul 25 10:44:46 2018 +0800

--
 src/main/asciidoc/_chapters/ops_mgt.adoc | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ca558bc8/src/main/asciidoc/_chapters/ops_mgt.adoc
--
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc 
b/src/main/asciidoc/_chapters/ops_mgt.adoc
index 1a3d58f..8a20a3c 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -503,9 +503,12 @@ $ bin/hbase org.apache.hadoop.hbase.mapreduce.Export 
  [  
[ [ []]]
 
+The outputdir is a HDFS directory that does not exist prior to the export. 
When done, the exported files will be owned by the user invoking the export 
command.
 
 *The Comparison of Endpoint-based Export And Mapreduce-based Export*
 |===



hbase git commit: HBASE-20873 Update doc for Endpoint-based Export

2018-07-24 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 1dbfe92db -> 78948d987


HBASE-20873 Update doc for Endpoint-based Export

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/78948d98
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/78948d98
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/78948d98

Branch: refs/heads/branch-2.1
Commit: 78948d987b2c32d6e0fdbb7048f975566b7c92ab
Parents: 1dbfe92
Author: Wei-Chiu Chuang 
Authored: Fri Jul 13 19:01:22 2018 -0700
Committer: Chia-Ping Tsai 
Committed: Wed Jul 25 10:44:58 2018 +0800

--
 src/main/asciidoc/_chapters/ops_mgt.adoc | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/78948d98/src/main/asciidoc/_chapters/ops_mgt.adoc
--
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc 
b/src/main/asciidoc/_chapters/ops_mgt.adoc
index 1a3d58f..8a20a3c 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -503,9 +503,12 @@ $ bin/hbase org.apache.hadoop.hbase.mapreduce.Export 
  [  
[ [ []]]
 
+The outputdir is a HDFS directory that does not exist prior to the export. 
When done, the exported files will be owned by the user invoking the export 
command.
 
 *The Comparison of Endpoint-based Export And Mapreduce-based Export*
 |===



hbase git commit: HBASE-20873 Update doc for Endpoint-based Export

2018-07-24 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 3a3855aad -> e44f50669


HBASE-20873 Update doc for Endpoint-based Export

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e44f5066
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e44f5066
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e44f5066

Branch: refs/heads/master
Commit: e44f506694e606cb3bd2a854c5e72afa802d26d0
Parents: 3a3855a
Author: Wei-Chiu Chuang 
Authored: Fri Jul 13 19:01:22 2018 -0700
Committer: Chia-Ping Tsai 
Committed: Wed Jul 25 10:38:12 2018 +0800

--
 src/main/asciidoc/_chapters/ops_mgt.adoc | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e44f5066/src/main/asciidoc/_chapters/ops_mgt.adoc
--
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc 
b/src/main/asciidoc/_chapters/ops_mgt.adoc
index b6be867..01e6de6 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -503,9 +503,12 @@ $ bin/hbase org.apache.hadoop.hbase.mapreduce.Export 
  [  
[ [ []]]
 
+The outputdir is a HDFS directory that does not exist prior to the export. 
When done, the exported files will be owned by the user invoking the export 
command.
 
 *The Comparison of Endpoint-based Export And Mapreduce-based Export*
 |===



hbase git commit: HBASE-20873 Update doc for Endpoint-based Export

2018-07-24 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 5add96868 -> 3c49d558f


HBASE-20873 Update doc for Endpoint-based Export

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3c49d558
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3c49d558
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3c49d558

Branch: refs/heads/branch-2.0
Commit: 3c49d558f46661171cd9b431954b6da304e28e61
Parents: 5add968
Author: Wei-Chiu Chuang 
Authored: Fri Jul 13 19:01:22 2018 -0700
Committer: Chia-Ping Tsai 
Committed: Wed Jul 25 10:45:16 2018 +0800

--
 src/main/asciidoc/_chapters/ops_mgt.adoc | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3c49d558/src/main/asciidoc/_chapters/ops_mgt.adoc
--
diff --git a/src/main/asciidoc/_chapters/ops_mgt.adoc 
b/src/main/asciidoc/_chapters/ops_mgt.adoc
index 03eccb9..1eeaaa1 100644
--- a/src/main/asciidoc/_chapters/ops_mgt.adoc
+++ b/src/main/asciidoc/_chapters/ops_mgt.adoc
@@ -503,9 +503,12 @@ $ bin/hbase org.apache.hadoop.hbase.mapreduce.Export 
  [  
[ [ []]]
 
+The outputdir is a HDFS directory that does not exist prior to the export. 
When done, the exported files will be owned by the user invoking the export 
command.
 
 *The Comparison of Endpoint-based Export And Mapreduce-based Export*
 |===



hbase git commit: HBASE-20869 Endpoint-based Export use incorrect user to write to destination

2018-07-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 724e32349 -> 1ed58e41c


HBASE-20869 Endpoint-based Export use incorrect user to write to destination

Signed-off-by: Chia-Ping Tsai 
Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1ed58e41
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1ed58e41
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1ed58e41

Branch: refs/heads/master
Commit: 1ed58e41cce526e93d8da66c9571f71d11d94e8f
Parents: 724e323
Author: Wei-Chiu Chuang 
Authored: Thu Jul 19 20:17:06 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Jul 19 20:29:55 2018 +0800

--
 .../org/apache/hadoop/hbase/coprocessor/Export.java | 13 ++---
 .../hadoop/hbase/coprocessor/TestSecureExport.java  | 16 
 2 files changed, 26 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1ed58e41/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
--
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
index 6d6c1a6..b21d5c3 100644
--- 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
+++ 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
@@ -451,9 +451,16 @@ public class Export extends ExportProtos.ExportService 
implements RegionCoproces
 SecureWriter(final Configuration conf, final UserProvider userProvider,
 final Token userToken, final List opts)
 throws IOException {
-  privilegedWriter = new PrivilegedWriter(getActiveUser(userProvider, 
userToken),
-SequenceFile.createWriter(conf,
-opts.toArray(new SequenceFile.Writer.Option[opts.size()])));
+  User user = getActiveUser(userProvider, userToken);
+  try {
+SequenceFile.Writer sequenceFileWriter =
+user.runAs((PrivilegedExceptionAction) () ->
+SequenceFile.createWriter(conf,
+opts.toArray(new 
SequenceFile.Writer.Option[opts.size()])));
+privilegedWriter = new PrivilegedWriter(user, sequenceFileWriter);
+  } catch (InterruptedException e) {
+throw new IOException(e);
+  }
 }
 
 void append(final Object key, final Object value) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/1ed58e41/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index 21f17f7..b2ca1d4 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -29,6 +29,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
@@ -336,6 +337,21 @@ public class TestSecureExport {
 LOG.error(ex.toString(), ex);
 throw new Exception(ex);
   } finally {
+if (fs.exists(new Path(openDir, "output"))) {
+  // if export completes successfully, every file under the output 
directory should be
+  // owned by the current user, not the hbase service user.
+  FileStatus outputDirFileStatus = fs.getFileStatus(new Path(openDir, 
"output"));
+  String currentUserName = User.getCurrent().getShortName();
+  assertEquals("Unexpected file owner", currentUserName, 
outputDirFileStatus.getOwner());
+
+  FileStatus[] outputFileStatus = fs.listStatus(new Path(openDir, 
"output"));
+  for (FileStatus fileStatus: outputFileStatus) {
+assertEquals("Unexpected file owner", currentUserName, 
fileStatus.getOwner());
+  }
+} else {
+  LOG.info("output directory doesn't exist. Skip check");
+}
+
 clearOutput(output);
   }
 };



hbase git commit: HBASE-20869 Endpoint-based Export use incorrect user to write to destination

2018-07-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 29f1352ed -> 4259da722


HBASE-20869 Endpoint-based Export use incorrect user to write to destination

Signed-off-by: Chia-Ping Tsai 
Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4259da72
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4259da72
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4259da72

Branch: refs/heads/branch-2
Commit: 4259da722e1982ecf91ec2cb9ded9671349cd56a
Parents: 29f1352
Author: Wei-Chiu Chuang 
Authored: Thu Jul 19 20:17:06 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Jul 19 20:18:34 2018 +0800

--
 .../org/apache/hadoop/hbase/coprocessor/Export.java | 13 ++---
 .../hadoop/hbase/coprocessor/TestSecureExport.java  | 16 
 2 files changed, 26 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4259da72/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
--
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
index 6d6c1a6..b21d5c3 100644
--- 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
+++ 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
@@ -451,9 +451,16 @@ public class Export extends ExportProtos.ExportService 
implements RegionCoproces
 SecureWriter(final Configuration conf, final UserProvider userProvider,
 final Token userToken, final List opts)
 throws IOException {
-  privilegedWriter = new PrivilegedWriter(getActiveUser(userProvider, 
userToken),
-SequenceFile.createWriter(conf,
-opts.toArray(new SequenceFile.Writer.Option[opts.size()])));
+  User user = getActiveUser(userProvider, userToken);
+  try {
+SequenceFile.Writer sequenceFileWriter =
+user.runAs((PrivilegedExceptionAction) () ->
+SequenceFile.createWriter(conf,
+opts.toArray(new 
SequenceFile.Writer.Option[opts.size()])));
+privilegedWriter = new PrivilegedWriter(user, sequenceFileWriter);
+  } catch (InterruptedException e) {
+throw new IOException(e);
+  }
 }
 
 void append(final Object key, final Object value) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/4259da72/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index 21f17f7..b2ca1d4 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -29,6 +29,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
@@ -336,6 +337,21 @@ public class TestSecureExport {
 LOG.error(ex.toString(), ex);
 throw new Exception(ex);
   } finally {
+if (fs.exists(new Path(openDir, "output"))) {
+  // if export completes successfully, every file under the output 
directory should be
+  // owned by the current user, not the hbase service user.
+  FileStatus outputDirFileStatus = fs.getFileStatus(new Path(openDir, 
"output"));
+  String currentUserName = User.getCurrent().getShortName();
+  assertEquals("Unexpected file owner", currentUserName, 
outputDirFileStatus.getOwner());
+
+  FileStatus[] outputFileStatus = fs.listStatus(new Path(openDir, 
"output"));
+  for (FileStatus fileStatus: outputFileStatus) {
+assertEquals("Unexpected file owner", currentUserName, 
fileStatus.getOwner());
+  }
+} else {
+  LOG.info("output directory doesn't exist. Skip check");
+}
+
 clearOutput(output);
   }
 };



hbase git commit: HBASE-20869 Endpoint-based Export use incorrect user to write to destination

2018-07-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 ecb51e02e -> 93e8acbe3


HBASE-20869 Endpoint-based Export use incorrect user to write to destination

Signed-off-by: Chia-Ping Tsai 
Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/93e8acbe
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/93e8acbe
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/93e8acbe

Branch: refs/heads/branch-2.0
Commit: 93e8acbe34663fd13609e6eb1152ffe084d4d035
Parents: ecb51e0
Author: Wei-Chiu Chuang 
Authored: Thu Jul 19 20:17:06 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Jul 19 20:34:27 2018 +0800

--
 .../org/apache/hadoop/hbase/coprocessor/Export.java | 13 ++---
 .../hadoop/hbase/coprocessor/TestSecureExport.java  | 16 
 2 files changed, 26 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/93e8acbe/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
--
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
index 6d6c1a6..b21d5c3 100644
--- 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
+++ 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
@@ -451,9 +451,16 @@ public class Export extends ExportProtos.ExportService 
implements RegionCoproces
 SecureWriter(final Configuration conf, final UserProvider userProvider,
 final Token userToken, final List opts)
 throws IOException {
-  privilegedWriter = new PrivilegedWriter(getActiveUser(userProvider, 
userToken),
-SequenceFile.createWriter(conf,
-opts.toArray(new SequenceFile.Writer.Option[opts.size()])));
+  User user = getActiveUser(userProvider, userToken);
+  try {
+SequenceFile.Writer sequenceFileWriter =
+user.runAs((PrivilegedExceptionAction) () ->
+SequenceFile.createWriter(conf,
+opts.toArray(new 
SequenceFile.Writer.Option[opts.size()])));
+privilegedWriter = new PrivilegedWriter(user, sequenceFileWriter);
+  } catch (InterruptedException e) {
+throw new IOException(e);
+  }
 }
 
 void append(final Object key, final Object value) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/93e8acbe/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index 21f17f7..b2ca1d4 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -29,6 +29,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
@@ -336,6 +337,21 @@ public class TestSecureExport {
 LOG.error(ex.toString(), ex);
 throw new Exception(ex);
   } finally {
+if (fs.exists(new Path(openDir, "output"))) {
+  // if export completes successfully, every file under the output 
directory should be
+  // owned by the current user, not the hbase service user.
+  FileStatus outputDirFileStatus = fs.getFileStatus(new Path(openDir, 
"output"));
+  String currentUserName = User.getCurrent().getShortName();
+  assertEquals("Unexpected file owner", currentUserName, 
outputDirFileStatus.getOwner());
+
+  FileStatus[] outputFileStatus = fs.listStatus(new Path(openDir, 
"output"));
+  for (FileStatus fileStatus: outputFileStatus) {
+assertEquals("Unexpected file owner", currentUserName, 
fileStatus.getOwner());
+  }
+} else {
+  LOG.info("output directory doesn't exist. Skip check");
+}
+
 clearOutput(output);
   }
 };



[1/2] hbase git commit: HBASE-20853 Polish "Add defaults to Table Interface so Implementors don't have to"

2018-07-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.1 9ac26b80b -> 8461e8588


HBASE-20853 Polish "Add defaults to Table Interface so Implementors don't have 
to"

Signed-off-by: Chia-Ping Tsai 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/36c4f62d
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/36c4f62d
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/36c4f62d

Branch: refs/heads/branch-2.1
Commit: 36c4f62dafcede4cae20c939f72fa11443ab5bd6
Parents: 9ac26b8
Author: Balazs Meszaros 
Authored: Wed Jul 11 15:23:15 2018 +0200
Committer: Chia-Ping Tsai 
Committed: Thu Jul 19 20:48:32 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Table.java   | 69 ++--
 1 file changed, 49 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/36c4f62d/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
index 191d359..6f779cf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
@@ -20,21 +20,24 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.yetus.audience.InterfaceAudience;
+
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-
+import org.apache.hadoop.hbase.util.Bytes;
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Message;
 import com.google.protobuf.Service;
@@ -73,7 +76,15 @@ public interface Table extends Closeable {
* use {@link #getDescriptor()}
*/
   @Deprecated
-  HTableDescriptor getTableDescriptor() throws IOException;
+  default HTableDescriptor getTableDescriptor() throws IOException {
+TableDescriptor descriptor = getDescriptor();
+
+if (descriptor instanceof HTableDescriptor) {
+  return (HTableDescriptor)descriptor;
+} else {
+  return new HTableDescriptor(descriptor);
+}
+  }
 
   /**
* Gets the {@link org.apache.hadoop.hbase.client.TableDescriptor table 
descriptor} for this table.
@@ -96,7 +107,7 @@ public interface Table extends Closeable {
* @throws IOException e
*/
   default boolean exists(Get get) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return exists(Collections.singletonList(get))[0];
   }
 
   /**
@@ -176,7 +187,7 @@ public interface Table extends Closeable {
* @since 0.20.0
*/
   default Result get(Get get) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return get(Collections.singletonList(get))[0];
   }
 
   /**
@@ -240,7 +251,6 @@ public interface Table extends Closeable {
 throw new NotImplementedException("Add an implementation!");
   }
 
-
   /**
* Puts some data in the table.
*
@@ -249,7 +259,7 @@ public interface Table extends Closeable {
* @since 0.20.0
*/
   default void put(Put put) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+put(Collections.singletonList(put));
   }
 
   /**
@@ -289,7 +299,7 @@ public interface Table extends Closeable {
   @Deprecated
   default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, 
byte[] value, Put put)
   throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return checkAndPut(row, family, qualifier, CompareOperator.EQUAL, value, 
put);
   }
 
   /**
@@ -315,7 +325,10 @@ public interface Table extends Closeable {
   @Deprecated
   default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
   CompareFilter.CompareOp compareOp, byte[] value, Put put) throws 
IOException {
-throw new NotImplementedException("Add an implementation!");
+RowMutations mutations = new RowMutations(put.getRow(), 1);
+mutations.add(put);
+
+return checkAndMutate(row, family, qualifier, compareOp, value, mutations);
   }
 
   /**
@@ -341,7 +354,10 @@ public interface Table 

[2/2] hbase git commit: HBASE-20869 Endpoint-based Export use incorrect user to write to destination

2018-07-19 Thread chia7712
HBASE-20869 Endpoint-based Export use incorrect user to write to destination

Signed-off-by: Chia-Ping Tsai 
Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8461e858
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8461e858
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8461e858

Branch: refs/heads/branch-2.1
Commit: 8461e858806bda23d27bcd4622d80b4679f83836
Parents: 36c4f62
Author: Wei-Chiu Chuang 
Authored: Thu Jul 19 20:17:06 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Jul 19 20:48:42 2018 +0800

--
 .../org/apache/hadoop/hbase/coprocessor/Export.java | 13 ++---
 .../hadoop/hbase/coprocessor/TestSecureExport.java  | 16 
 2 files changed, 26 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8461e858/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
--
diff --git 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
index 6d6c1a6..b21d5c3 100644
--- 
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
+++ 
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/Export.java
@@ -451,9 +451,16 @@ public class Export extends ExportProtos.ExportService 
implements RegionCoproces
 SecureWriter(final Configuration conf, final UserProvider userProvider,
 final Token userToken, final List opts)
 throws IOException {
-  privilegedWriter = new PrivilegedWriter(getActiveUser(userProvider, 
userToken),
-SequenceFile.createWriter(conf,
-opts.toArray(new SequenceFile.Writer.Option[opts.size()])));
+  User user = getActiveUser(userProvider, userToken);
+  try {
+SequenceFile.Writer sequenceFileWriter =
+user.runAs((PrivilegedExceptionAction) () ->
+SequenceFile.createWriter(conf,
+opts.toArray(new 
SequenceFile.Writer.Option[opts.size()])));
+privilegedWriter = new PrivilegedWriter(user, sequenceFileWriter);
+  } catch (InterruptedException e) {
+throw new IOException(e);
+  }
 }
 
 void append(final Object key, final Object value) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/8461e858/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
index 21f17f7..b2ca1d4 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestSecureExport.java
@@ -29,6 +29,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
@@ -336,6 +337,21 @@ public class TestSecureExport {
 LOG.error(ex.toString(), ex);
 throw new Exception(ex);
   } finally {
+if (fs.exists(new Path(openDir, "output"))) {
+  // if export completes successfully, every file under the output 
directory should be
+  // owned by the current user, not the hbase service user.
+  FileStatus outputDirFileStatus = fs.getFileStatus(new Path(openDir, 
"output"));
+  String currentUserName = User.getCurrent().getShortName();
+  assertEquals("Unexpected file owner", currentUserName, 
outputDirFileStatus.getOwner());
+
+  FileStatus[] outputFileStatus = fs.listStatus(new Path(openDir, 
"output"));
+  for (FileStatus fileStatus: outputFileStatus) {
+assertEquals("Unexpected file owner", currentUserName, 
fileStatus.getOwner());
+  }
+} else {
+  LOG.info("output directory doesn't exist. Skip check");
+}
+
 clearOutput(output);
   }
 };



hbase git commit: HBASE-20853 Polish "Add defaults to Table Interface so Implementors don't have to"

2018-07-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 7e599428d -> 724e32349


HBASE-20853 Polish "Add defaults to Table Interface so Implementors don't have 
to"

Signed-off-by: Chia-Ping Tsai 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/724e3234
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/724e3234
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/724e3234

Branch: refs/heads/master
Commit: 724e323494da4f6562b98604560f854ebbd8229b
Parents: 7e59942
Author: Balazs Meszaros 
Authored: Wed Jul 11 15:23:15 2018 +0200
Committer: Chia-Ping Tsai 
Committed: Thu Jul 19 19:38:43 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Table.java   | 69 ++--
 1 file changed, 49 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/724e3234/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
index 191d359..6f779cf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
@@ -20,21 +20,24 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.yetus.audience.InterfaceAudience;
+
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-
+import org.apache.hadoop.hbase.util.Bytes;
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Message;
 import com.google.protobuf.Service;
@@ -73,7 +76,15 @@ public interface Table extends Closeable {
* use {@link #getDescriptor()}
*/
   @Deprecated
-  HTableDescriptor getTableDescriptor() throws IOException;
+  default HTableDescriptor getTableDescriptor() throws IOException {
+TableDescriptor descriptor = getDescriptor();
+
+if (descriptor instanceof HTableDescriptor) {
+  return (HTableDescriptor)descriptor;
+} else {
+  return new HTableDescriptor(descriptor);
+}
+  }
 
   /**
* Gets the {@link org.apache.hadoop.hbase.client.TableDescriptor table 
descriptor} for this table.
@@ -96,7 +107,7 @@ public interface Table extends Closeable {
* @throws IOException e
*/
   default boolean exists(Get get) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return exists(Collections.singletonList(get))[0];
   }
 
   /**
@@ -176,7 +187,7 @@ public interface Table extends Closeable {
* @since 0.20.0
*/
   default Result get(Get get) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return get(Collections.singletonList(get))[0];
   }
 
   /**
@@ -240,7 +251,6 @@ public interface Table extends Closeable {
 throw new NotImplementedException("Add an implementation!");
   }
 
-
   /**
* Puts some data in the table.
*
@@ -249,7 +259,7 @@ public interface Table extends Closeable {
* @since 0.20.0
*/
   default void put(Put put) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+put(Collections.singletonList(put));
   }
 
   /**
@@ -289,7 +299,7 @@ public interface Table extends Closeable {
   @Deprecated
   default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, 
byte[] value, Put put)
   throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return checkAndPut(row, family, qualifier, CompareOperator.EQUAL, value, 
put);
   }
 
   /**
@@ -315,7 +325,10 @@ public interface Table extends Closeable {
   @Deprecated
   default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
   CompareFilter.CompareOp compareOp, byte[] value, Put put) throws 
IOException {
-throw new NotImplementedException("Add an implementation!");
+RowMutations mutations = new RowMutations(put.getRow(), 1);
+mutations.add(put);
+
+return checkAndMutate(row, family, qualifier, compareOp, value, mutations);
   }
 
   /**
@@ -341,7 +354,10 @@ public interface Table extends 

hbase git commit: HBASE-20853 Polish "Add defaults to Table Interface so Implementors don't have to"

2018-07-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e464230bd -> 29f1352ed


HBASE-20853 Polish "Add defaults to Table Interface so Implementors don't have 
to"

Signed-off-by: Chia-Ping Tsai 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/29f1352e
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/29f1352e
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/29f1352e

Branch: refs/heads/branch-2
Commit: 29f1352edda7f3452d8efb39df3d1940dcc4011c
Parents: e464230
Author: Balazs Meszaros 
Authored: Wed Jul 11 15:23:15 2018 +0200
Committer: Chia-Ping Tsai 
Committed: Thu Jul 19 19:57:09 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Table.java   | 69 ++--
 1 file changed, 49 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/29f1352e/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
index 191d359..6f779cf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
@@ -20,21 +20,24 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.yetus.audience.InterfaceAudience;
+
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-
+import org.apache.hadoop.hbase.util.Bytes;
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Message;
 import com.google.protobuf.Service;
@@ -73,7 +76,15 @@ public interface Table extends Closeable {
* use {@link #getDescriptor()}
*/
   @Deprecated
-  HTableDescriptor getTableDescriptor() throws IOException;
+  default HTableDescriptor getTableDescriptor() throws IOException {
+TableDescriptor descriptor = getDescriptor();
+
+if (descriptor instanceof HTableDescriptor) {
+  return (HTableDescriptor)descriptor;
+} else {
+  return new HTableDescriptor(descriptor);
+}
+  }
 
   /**
* Gets the {@link org.apache.hadoop.hbase.client.TableDescriptor table 
descriptor} for this table.
@@ -96,7 +107,7 @@ public interface Table extends Closeable {
* @throws IOException e
*/
   default boolean exists(Get get) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return exists(Collections.singletonList(get))[0];
   }
 
   /**
@@ -176,7 +187,7 @@ public interface Table extends Closeable {
* @since 0.20.0
*/
   default Result get(Get get) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return get(Collections.singletonList(get))[0];
   }
 
   /**
@@ -240,7 +251,6 @@ public interface Table extends Closeable {
 throw new NotImplementedException("Add an implementation!");
   }
 
-
   /**
* Puts some data in the table.
*
@@ -249,7 +259,7 @@ public interface Table extends Closeable {
* @since 0.20.0
*/
   default void put(Put put) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+put(Collections.singletonList(put));
   }
 
   /**
@@ -289,7 +299,7 @@ public interface Table extends Closeable {
   @Deprecated
   default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, 
byte[] value, Put put)
   throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return checkAndPut(row, family, qualifier, CompareOperator.EQUAL, value, 
put);
   }
 
   /**
@@ -315,7 +325,10 @@ public interface Table extends Closeable {
   @Deprecated
   default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
   CompareFilter.CompareOp compareOp, byte[] value, Put put) throws 
IOException {
-throw new NotImplementedException("Add an implementation!");
+RowMutations mutations = new RowMutations(put.getRow(), 1);
+mutations.add(put);
+
+return checkAndMutate(row, family, qualifier, compareOp, value, mutations);
   }
 
   /**
@@ -341,7 +354,10 @@ public interface Table exte

hbase git commit: HBASE-20853 Polish "Add defaults to Table Interface so Implementors don't have to"

2018-07-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 f17c97bc1 -> ecb51e02e


HBASE-20853 Polish "Add defaults to Table Interface so Implementors don't have 
to"

Signed-off-by: Chia-Ping Tsai 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ecb51e02
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ecb51e02
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ecb51e02

Branch: refs/heads/branch-2.0
Commit: ecb51e02ea69fc9f632739a96bd1e753b6c688ac
Parents: f17c97b
Author: Balazs Meszaros 
Authored: Wed Jul 11 15:23:15 2018 +0200
Committer: Chia-Ping Tsai 
Committed: Thu Jul 19 19:43:37 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Table.java   | 69 ++--
 1 file changed, 49 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ecb51e02/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
index 191d359..6f779cf 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
@@ -20,21 +20,24 @@ package org.apache.hadoop.hbase.client;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.yetus.audience.InterfaceAudience;
+
 import org.apache.hadoop.hbase.client.coprocessor.Batch;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-
+import org.apache.hadoop.hbase.util.Bytes;
 import com.google.protobuf.Descriptors;
 import com.google.protobuf.Message;
 import com.google.protobuf.Service;
@@ -73,7 +76,15 @@ public interface Table extends Closeable {
* use {@link #getDescriptor()}
*/
   @Deprecated
-  HTableDescriptor getTableDescriptor() throws IOException;
+  default HTableDescriptor getTableDescriptor() throws IOException {
+TableDescriptor descriptor = getDescriptor();
+
+if (descriptor instanceof HTableDescriptor) {
+  return (HTableDescriptor)descriptor;
+} else {
+  return new HTableDescriptor(descriptor);
+}
+  }
 
   /**
* Gets the {@link org.apache.hadoop.hbase.client.TableDescriptor table 
descriptor} for this table.
@@ -96,7 +107,7 @@ public interface Table extends Closeable {
* @throws IOException e
*/
   default boolean exists(Get get) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return exists(Collections.singletonList(get))[0];
   }
 
   /**
@@ -176,7 +187,7 @@ public interface Table extends Closeable {
* @since 0.20.0
*/
   default Result get(Get get) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return get(Collections.singletonList(get))[0];
   }
 
   /**
@@ -240,7 +251,6 @@ public interface Table extends Closeable {
 throw new NotImplementedException("Add an implementation!");
   }
 
-
   /**
* Puts some data in the table.
*
@@ -249,7 +259,7 @@ public interface Table extends Closeable {
* @since 0.20.0
*/
   default void put(Put put) throws IOException {
-throw new NotImplementedException("Add an implementation!");
+put(Collections.singletonList(put));
   }
 
   /**
@@ -289,7 +299,7 @@ public interface Table extends Closeable {
   @Deprecated
   default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier, 
byte[] value, Put put)
   throws IOException {
-throw new NotImplementedException("Add an implementation!");
+return checkAndPut(row, family, qualifier, CompareOperator.EQUAL, value, 
put);
   }
 
   /**
@@ -315,7 +325,10 @@ public interface Table extends Closeable {
   @Deprecated
   default boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
   CompareFilter.CompareOp compareOp, byte[] value, Put put) throws 
IOException {
-throw new NotImplementedException("Add an implementation!");
+RowMutations mutations = new RowMutations(put.getRow(), 1);
+mutations.add(put);
+
+return checkAndMutate(row, family, qualifier, compareOp, value, mutations);
   }
 
   /**
@@ -341,7 +354,10 @@ public interface Table 

hbase git commit: HBASE-20575 Fail to config COMPACTION_ENABLED by hbase shell

2018-06-29 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.2 20772f139 -> 3845c089d


HBASE-20575 Fail to config COMPACTION_ENABLED by hbase shell

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3845c089
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3845c089
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3845c089

Branch: refs/heads/branch-1.2
Commit: 3845c089dfdc4e2f7d880d87ee555fd49318020b
Parents: 20772f1
Author: Mingdao Yang 
Authored: Fri Jun 29 14:52:26 2018 +
Committer: Chia-Ping Tsai 
Committed: Sat Jun 30 00:25:37 2018 +0800

--
 hbase-shell/src/main/ruby/hbase/admin.rb  |  4 ++--
 hbase-shell/src/test/ruby/hbase/admin_test.rb | 22 +-
 2 files changed, 23 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3845c089/hbase-shell/src/main/ruby/hbase/admin.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 0fd1267..3f31eeb 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -330,7 +330,7 @@ module Hbase
 htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER]
 htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if 
arg[MAX_FILESIZE]
 htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if 
arg[READONLY]
-
htd.setCompactionEnabled(JBoolean.valueOf(arg.delete[COMPACTION_ENABLED])) if 
arg[COMPACTION_ENABLED]
+
htd.setCompactionEnabled(JBoolean.valueOf(arg.delete(COMPACTION_ENABLED))) if 
arg[COMPACTION_ENABLED]
 
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if 
arg[MEMSTORE_FLUSHSIZE]
 # DEFERRED_LOG_FLUSH is deprecated and was replaced by DURABILITY.  To 
keep backward compatible, it still exists.
 # However, it has to be set before DURABILITY so that DURABILITY could 
overwrite if both args are set
@@ -602,7 +602,7 @@ module Hbase
 htd.setOwnerString(arg.delete(OWNER)) if arg[OWNER]
 htd.setMaxFileSize(JLong.valueOf(arg.delete(MAX_FILESIZE))) if 
arg[MAX_FILESIZE]
 htd.setReadOnly(JBoolean.valueOf(arg.delete(READONLY))) if 
arg[READONLY]
-
htd.setCompactionEnabled(JBoolean.valueOf(arg.delete[COMPACTION_ENABLED])) if 
arg[COMPACTION_ENABLED]
+
htd.setCompactionEnabled(JBoolean.valueOf(arg.delete(COMPACTION_ENABLED))) if 
arg[COMPACTION_ENABLED]
 parse_htd_args(htd, arg)
 
htd.setMemStoreFlushSize(JLong.valueOf(arg.delete(MEMSTORE_FLUSHSIZE))) if 
arg[MEMSTORE_FLUSHSIZE]
 # DEFERRED_LOG_FLUSH is deprecated and was replaced by DURABILITY.  To 
keep backward compatible, it still exists.

http://git-wip-us.apache.org/repos/asf/hbase/blob/3845c089/hbase-shell/src/test/ruby/hbase/admin_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/admin_test.rb 
b/hbase-shell/src/test/ruby/hbase/admin_test.rb
index 5dbcc89..e168e83 100644
--- a/hbase-shell/src/test/ruby/hbase/admin_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/admin_test.rb
@@ -198,7 +198,27 @@ module Hbase
   
assert_match(/org.apache.hadoop.hbase.regionserver.IncreasingToUpperBoundRegionSplitPolicy/,admin.describe(@create_test_name))
   assert_match(/REGION_MEMSTORE_REPLICATION/, 
admin.describe(@create_test_name))
 end
-
+
+define_test 'create should be able to set compaction in table options' do
+  drop_test_table(@create_test_name)
+  admin.create(@create_test_name, 'a', 'b',
+   'MAX_FILESIZE' => 12_345_678,
+   OWNER => '987654321',
+   FLUSH_POLICY => 'org.apache.hadoop.hbase.regionserver' \
+   '.FlushAllLargeStoresPolicy',
+   SPLIT_POLICY => 'org.apache.hadoop.hbase.regionserver' \
+   '.IncreasingToUpperBoundRegionSplitPolicy',
+   COMPACTION_ENABLED => 'TRUE')
+  assert_equal(['a:', 'b:'], table(@create_test_name).get_all_columns.sort)
+  assert_match(/12345678/, admin.describe(@create_test_name))
+  assert_match(/987654321/, admin.describe(@create_test_name))
+  
assert_match(/org.apache.hadoop.hbase.regionserver.FlushAllLargeStoresPolicy/, \
+   admin.describe(@create_test_name))
+  
assert_match(/org.apache.hadoop.hbase.regionserver.IncreasingToUpperBoundRegionSplitPolicy/,
 \
+   admin.describe(@create_test_name))
+  assert_match(/COMPACTION_ENABLED/, admin.describe(@create_test_name))
+end
+
 define_test "create should ignore table_att" do
   drop_test_table(@create_test_name)

hbase git commit: HBASE-20575 Fail to config COMPACTION_ENABLED by hbase shell

2018-06-29 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.3 5db4894ba -> 673e63337


HBASE-20575 Fail to config COMPACTION_ENABLED by hbase shell

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/673e6333
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/673e6333
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/673e6333

Branch: refs/heads/branch-1.3
Commit: 673e6333781261573a5c536558ad9b40056920aa
Parents: 5db4894
Author: Mingdao Yang 
Authored: Fri Jun 29 14:52:26 2018 +
Committer: Chia-Ping Tsai 
Committed: Sat Jun 30 00:27:35 2018 +0800

--
 hbase-shell/src/test/ruby/hbase/admin_test.rb | 22 +-
 1 file changed, 21 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/673e6333/hbase-shell/src/test/ruby/hbase/admin_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/admin_test.rb 
b/hbase-shell/src/test/ruby/hbase/admin_test.rb
index 5cae742..5a8ad39 100644
--- a/hbase-shell/src/test/ruby/hbase/admin_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/admin_test.rb
@@ -222,7 +222,27 @@ module Hbase
   
assert_match(/org.apache.hadoop.hbase.regionserver.ConstantSizeRegionSplitPolicy/,
 admin.describe(@create_test_name))
 end
-
+
+define_test 'create should be able to set compaction in table options' do
+  drop_test_table(@create_test_name)
+  admin.create(@create_test_name, 'a', 'b',
+   'MAX_FILESIZE' => 12_345_678,
+   OWNER => '987654321',
+   FLUSH_POLICY => 'org.apache.hadoop.hbase.regionserver' \
+   '.FlushAllLargeStoresPolicy',
+   SPLIT_POLICY => 'org.apache.hadoop.hbase.regionserver' \
+   '.IncreasingToUpperBoundRegionSplitPolicy',
+   COMPACTION_ENABLED => 'TRUE')
+  assert_equal(['a:', 'b:'], table(@create_test_name).get_all_columns.sort)
+  assert_match(/12345678/, admin.describe(@create_test_name))
+  assert_match(/987654321/, admin.describe(@create_test_name))
+  
assert_match(/org.apache.hadoop.hbase.regionserver.FlushAllLargeStoresPolicy/, \
+   admin.describe(@create_test_name))
+  
assert_match(/org.apache.hadoop.hbase.regionserver.IncreasingToUpperBoundRegionSplitPolicy/,
 \
+   admin.describe(@create_test_name))
+  assert_match(/COMPACTION_ENABLED/, admin.describe(@create_test_name))
+end
+
 define_test "create should ignore table_att" do
   drop_test_table(@create_test_name)
   admin.create(@create_test_name, 'a', 'b', METHOD => 'table_att', OWNER 
=> '987654321')



hbase git commit: HBASE-20737 (addendum) put collection into ArrayList instead of addAll function -- RetriesExhaustedWithDetailsException.java

2018-06-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master c08eff67a -> d23a517b6


HBASE-20737 (addendum) put collection into ArrayList instead of addAll function 
-- RetriesExhaustedWithDetailsException.java

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d23a517b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d23a517b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d23a517b

Branch: refs/heads/master
Commit: d23a517b6129a90f69062b7558bc777a4b76bf8f
Parents: c08eff6
Author: taiynlee 
Authored: Tue Jun 19 08:42:47 2018 +
Committer: Chia-Ping Tsai 
Committed: Wed Jun 20 14:41:36 2018 +0800

--
 .../hadoop/hbase/client/RetriesExhaustedWithDetailsException.java | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d23a517b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index 0cce728..ecbada9 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -117,8 +117,7 @@ extends RetriesExhaustedException {
 String s = getDesc(classifyExs(exceptions));
 StringBuilder addrs = new StringBuilder(s);
 addrs.append("servers with issues: ");
-Set uniqAddr = new HashSet<>();
-uniqAddr.addAll(hostnamePort);
+Set uniqAddr = new HashSet<>(hostnamePort);
 
 for (String addr : uniqAddr) {
   addrs.append(addr).append(", ");



hbase git commit: HBASE-20737 (addendum) put collection into ArrayList instead of addAll function -- RetriesExhaustedWithDetailsException.java

2018-06-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 83969b0da -> 8aabe36a4


HBASE-20737 (addendum) put collection into ArrayList instead of addAll function 
-- RetriesExhaustedWithDetailsException.java

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8aabe36a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8aabe36a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8aabe36a

Branch: refs/heads/branch-2
Commit: 8aabe36a49800ebf81486930d314441e132ca084
Parents: 83969b0
Author: taiynlee 
Authored: Tue Jun 19 08:42:47 2018 +
Committer: Chia-Ping Tsai 
Committed: Wed Jun 20 14:46:04 2018 +0800

--
 .../hadoop/hbase/client/RetriesExhaustedWithDetailsException.java | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8aabe36a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
index 0cce728..ecbada9 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RetriesExhaustedWithDetailsException.java
@@ -117,8 +117,7 @@ extends RetriesExhaustedException {
 String s = getDesc(classifyExs(exceptions));
 StringBuilder addrs = new StringBuilder(s);
 addrs.append("servers with issues: ");
-Set uniqAddr = new HashSet<>();
-uniqAddr.addAll(hostnamePort);
+Set uniqAddr = new HashSet<>(hostnamePort);
 
 for (String addr : uniqAddr) {
   addrs.append(addr).append(", ");



hbase git commit: HBASE-20737 put collection into ArrayList instead of addAll function

2018-06-16 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 b2afba580 -> 8edd5d948


HBASE-20737 put collection into ArrayList instead of addAll function

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/8edd5d94
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/8edd5d94
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/8edd5d94

Branch: refs/heads/branch-2
Commit: 8edd5d948a956fcceb5fae3afd7372fc0cabdf45
Parents: b2afba5
Author: taiynlee 
Authored: Fri Jun 15 09:34:43 2018 +
Committer: Chia-Ping Tsai 
Committed: Sun Jun 17 11:16:16 2018 +0800

--
 .../org/apache/hadoop/hbase/master/ClusterStatusPublisher.java| 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/8edd5d94/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
index 21fa263..af35ce4 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
@@ -187,8 +187,7 @@ public class ClusterStatusPublisher extends ScheduledChore {
 }
 
 // We're sending the new deads first.
-List> entries = new ArrayList<>();
-entries.addAll(lastSent.entrySet());
+List> entries = new 
ArrayList<>(lastSent.entrySet());
 Collections.sort(entries, new Comparator>() 
{
   @Override
   public int compare(Map.Entry o1, 
Map.Entry o2) {



hbase git commit: HBASE-20737 put collection into ArrayList instead of addAll function

2018-06-15 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 30772eef5 -> 0e43abc78


HBASE-20737 put collection into ArrayList instead of addAll function

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0e43abc7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0e43abc7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0e43abc7

Branch: refs/heads/master
Commit: 0e43abc78a5a55c84a7572125d498a950dabfb57
Parents: 30772ee
Author: taiynlee 
Authored: Fri Jun 15 09:34:43 2018 +
Committer: Chia-Ping Tsai 
Committed: Sat Jun 16 03:25:42 2018 +0800

--
 .../org/apache/hadoop/hbase/master/ClusterStatusPublisher.java| 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/0e43abc7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
index 21fa263..af35ce4 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
@@ -187,8 +187,7 @@ public class ClusterStatusPublisher extends ScheduledChore {
 }
 
 // We're sending the new deads first.
-List> entries = new ArrayList<>();
-entries.addAll(lastSent.entrySet());
+List> entries = new 
ArrayList<>(lastSent.entrySet());
 Collections.sort(entries, new Comparator>() 
{
   @Override
   public int compare(Map.Entry o1, 
Map.Entry o2) {



hbase git commit: HBASE-20485 Copy constructor of Scan doesn't copy the readType and replicaId

2018-05-09 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master c51e9adc7 -> f8281aa20


HBASE-20485 Copy constructor of Scan doesn't copy the readType and replicaId

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f8281aa2
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f8281aa2
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f8281aa2

Branch: refs/heads/master
Commit: f8281aa20877bfb3c0cb8e11f27e1de26250f90c
Parents: c51e9ad
Author: Nihal Jain 
Authored: Wed Apr 25 12:17:14 2018 +0530
Committer: Chia-Ping Tsai 
Committed: Wed May 9 16:39:57 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Scan.java|  3 +
 .../apache/hadoop/hbase/client/TestScan.java| 94 +++-
 .../hbase/master/SplitOrMergeTracker.java   |  1 -
 3 files changed, 94 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f8281aa2/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index b02bdc1..32fe2dc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -279,6 +279,8 @@ public class Scan extends Query {
 this.limit = scan.getLimit();
 this.needCursorResult = scan.isNeedCursorResult();
 setPriority(scan.getPriority());
+readType = scan.getReadType();
+super.setReplicaId(scan.getReplicaId());
   }
 
   /**
@@ -310,6 +312,7 @@ public class Scan extends Query {
 }
 this.mvccReadPoint = -1L;
 setPriority(get.getPriority());
+super.setReplicaId(get.getReplicaId());
   }
 
   public boolean isGetScan() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f8281aa2/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
index 18dcb46..7ef9524 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
@@ -24,9 +24,13 @@ import static org.junit.Assert.fail;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Set;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Scan.ReadType;
 import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.security.visibility.Authorizations;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -35,7 +39,6 @@ import org.junit.Assert;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 
@@ -66,7 +69,7 @@ public class TestScan {
   }
 
   @Test
-  public void testGetToScan() throws IOException {
+  public void testGetToScan() throws Exception {
 Get get = new Get(Bytes.toBytes(1));
 get.setCacheBlocks(true)
 .setConsistency(Consistency.TIMELINE)
@@ -79,7 +82,12 @@ public class TestScan {
 .setRowOffsetPerColumnFamily(5)
 .setTimeRange(0, 13)
 .setAttribute("att_v0", Bytes.toBytes("att_v0"))
-.setColumnFamilyTimeRange(Bytes.toBytes("cf"), 0, 123);
+.setColumnFamilyTimeRange(Bytes.toBytes("cf"), 0, 123)
+.setReplicaId(3)
+.setACL("test_user", new Permission(Permission.Action.READ))
+.setAuthorizations(new Authorizations("test_label"))
+.setPriority(3);
+
 Scan scan = new Scan(get);
 assertEquals(get.getCacheBlocks(), scan.getCacheBlocks());
 assertEquals(get.getConsistency(), scan.getConsistency());
@@ -97,6 +105,10 @@ public class TestScan {
 scan.getColumnFamilyTimeRange().get(Bytes.toBytes("cf")).getMin());
 
assertEquals(get.getColumnFamilyTimeRange().get(Bytes.toBytes("cf")).getMax(),
 scan.getColumnFamilyTimeRange().get(Bytes.toBytes("cf")).getMax());
+assertEquals(get.getReplicaId(), scan.getReplicaId());
+assertEquals(get.getACL(), scan.getACL());
+assertEquals(get.getAuthorizations().getLabels(), 
scan.getAuthorizat

hbase git commit: HBASE-20485 Copy constructor of Scan doesn't copy the readType and replicaId

2018-05-09 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 421ed6ca5 -> 55badefa3


HBASE-20485 Copy constructor of Scan doesn't copy the readType and replicaId

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/55badefa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/55badefa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/55badefa

Branch: refs/heads/branch-2
Commit: 55badefa3817b9dd43797b7e6acdd429782e81e1
Parents: 421ed6c
Author: Nihal Jain 
Authored: Wed Apr 25 12:17:14 2018 +0530
Committer: Chia-Ping Tsai 
Committed: Wed May 9 16:45:39 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Scan.java|  3 +
 .../apache/hadoop/hbase/client/TestScan.java| 94 +++-
 .../hbase/master/SplitOrMergeTracker.java   |  1 -
 3 files changed, 94 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/55badefa/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index b02bdc1..32fe2dc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -279,6 +279,8 @@ public class Scan extends Query {
 this.limit = scan.getLimit();
 this.needCursorResult = scan.isNeedCursorResult();
 setPriority(scan.getPriority());
+readType = scan.getReadType();
+super.setReplicaId(scan.getReplicaId());
   }
 
   /**
@@ -310,6 +312,7 @@ public class Scan extends Query {
 }
 this.mvccReadPoint = -1L;
 setPriority(get.getPriority());
+super.setReplicaId(get.getReplicaId());
   }
 
   public boolean isGetScan() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/55badefa/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
--
diff --git 
a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java 
b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
index 18dcb46..7ef9524 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
@@ -24,9 +24,13 @@ import static org.junit.Assert.fail;
 import java.io.IOException;
 import java.util.Arrays;
 import java.util.Set;
+
+import org.apache.commons.lang3.builder.EqualsBuilder;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
 import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.client.Scan.ReadType;
 import org.apache.hadoop.hbase.filter.FilterList;
+import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.security.visibility.Authorizations;
 import org.apache.hadoop.hbase.testclassification.ClientTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
@@ -35,7 +39,6 @@ import org.junit.Assert;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-
 import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
 import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos;
 
@@ -66,7 +69,7 @@ public class TestScan {
   }
 
   @Test
-  public void testGetToScan() throws IOException {
+  public void testGetToScan() throws Exception {
 Get get = new Get(Bytes.toBytes(1));
 get.setCacheBlocks(true)
 .setConsistency(Consistency.TIMELINE)
@@ -79,7 +82,12 @@ public class TestScan {
 .setRowOffsetPerColumnFamily(5)
 .setTimeRange(0, 13)
 .setAttribute("att_v0", Bytes.toBytes("att_v0"))
-.setColumnFamilyTimeRange(Bytes.toBytes("cf"), 0, 123);
+.setColumnFamilyTimeRange(Bytes.toBytes("cf"), 0, 123)
+.setReplicaId(3)
+.setACL("test_user", new Permission(Permission.Action.READ))
+.setAuthorizations(new Authorizations("test_label"))
+.setPriority(3);
+
 Scan scan = new Scan(get);
 assertEquals(get.getCacheBlocks(), scan.getCacheBlocks());
 assertEquals(get.getConsistency(), scan.getConsistency());
@@ -97,6 +105,10 @@ public class TestScan {
 scan.getColumnFamilyTimeRange().get(Bytes.toBytes("cf")).getMin());
 
assertEquals(get.getColumnFamilyTimeRange().get(Bytes.toBytes("cf")).getMax(),
 scan.getColumnFamilyTimeRange().get(Bytes.toBytes("cf")).getMax());
+assertEquals(get.getReplicaId(), scan.getReplicaId());
+assertEquals(get.getACL(), scan.getACL());
+assertEquals(get.getAuthorizations().getLabels(), 
scan.getAuthor

hbase git commit: HBASE-20508 TestIncrementalBackupWithBulkLoad doesn't need to be Parameterized test

2018-05-06 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 971f5350e -> 5e14e125b


HBASE-20508 TestIncrementalBackupWithBulkLoad doesn't need to be Parameterized 
test

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5e14e125
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5e14e125
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5e14e125

Branch: refs/heads/master
Commit: 5e14e125b2d930dbe29760d5872fd78e9c8299fe
Parents: 971f535
Author: maoling 
Authored: Sun May 6 15:59:21 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon May 7 09:36:44 2018 +0800

--
 .../backup/TestIncrementalBackupWithBulkLoad.java   | 16 
 1 file changed, 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/5e14e125/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java
--
diff --git 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java
 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java
index 34f732c..74dd569 100644
--- 
a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java
+++ 
b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestIncrementalBackupWithBulkLoad.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.backup;
 
 import static org.junit.Assert.assertTrue;
 
-import java.util.ArrayList;
-import java.util.Collection;
 import java.util.List;
 import java.util.Map;
 import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -41,8 +39,6 @@ import org.junit.Assert;
 import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -57,7 +53,6 @@ import 
org.apache.hbase.thirdparty.com.google.common.collect.Lists;
  * 6 Incremental backup t1
  */
 @Category(LargeTests.class)
-@RunWith(Parameterized.class)
 public class TestIncrementalBackupWithBulkLoad extends TestBackupBase {
 
   @ClassRule
@@ -66,17 +61,6 @@ public class TestIncrementalBackupWithBulkLoad extends 
TestBackupBase {
 
   private static final Logger LOG = 
LoggerFactory.getLogger(TestIncrementalBackupDeleteTable.class);
 
-  @Parameterized.Parameters
-  public static Collection data() {
-secure = true;
-List params = new ArrayList<>();
-params.add(new Object[] {Boolean.TRUE});
-return params;
-  }
-
-  public TestIncrementalBackupWithBulkLoad(Boolean b) {
-  }
-
   // implement all test cases in 1 test since incremental backup/restore has 
dependencies
   @Test
   public void TestIncBackupDeleteTable() throws Exception {



hbase git commit: HBASE-20527 Remove unused code in MetaTableAccessor

2018-05-05 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 291dedbf8 -> acd0d1e44


HBASE-20527 Remove unused code in MetaTableAccessor

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/acd0d1e4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/acd0d1e4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/acd0d1e4

Branch: refs/heads/master
Commit: acd0d1e446c164d9c54bfb461b2d449c8d717c07
Parents: 291dedb
Author: Mingdao Yang 
Authored: Sat May 5 01:58:02 2018 +
Committer: Chia-Ping Tsai 
Committed: Sat May 5 22:15:54 2018 +0800

--
 .../java/org/apache/hadoop/hbase/MetaTableAccessor.java   | 10 --
 1 file changed, 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/acd0d1e4/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index e8ce811..91f3cf7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -143,16 +143,6 @@ public class MetaTableAccessor {
   private static final Logger LOG = 
LoggerFactory.getLogger(MetaTableAccessor.class);
   private static final Logger METALOG = 
LoggerFactory.getLogger("org.apache.hadoop.hbase.META");
 
-  private static final byte[] META_REGION_PREFIX;
-  static {
-// Copy the prefix from FIRST_META_REGIONINFO into META_REGION_PREFIX.
-// FIRST_META_REGIONINFO == 'hbase:meta,,1'.  META_REGION_PREFIX == 
'hbase:meta,'
-int len = RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionName().length - 
2;
-META_REGION_PREFIX = new byte [len];
-System.arraycopy(RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionName(), 
0,
-  META_REGION_PREFIX, 0, len);
-  }
-
   @VisibleForTesting
   public static final byte[] REPLICATION_PARENT_QUALIFIER = 
Bytes.toBytes("parent");
 



hbase git commit: HBASE-20527 Remove unused code in MetaTableAccessor

2018-05-05 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 991d78ca2 -> 2373451f9


HBASE-20527 Remove unused code in MetaTableAccessor

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2373451f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2373451f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2373451f

Branch: refs/heads/branch-2
Commit: 2373451f94d8a71483213a82368031123a9d4815
Parents: 991d78c
Author: Mingdao Yang 
Authored: Sat May 5 01:58:02 2018 +
Committer: Chia-Ping Tsai 
Committed: Sat May 5 22:20:24 2018 +0800

--
 .../java/org/apache/hadoop/hbase/MetaTableAccessor.java   | 10 --
 1 file changed, 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2373451f/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index e8ce811..91f3cf7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -143,16 +143,6 @@ public class MetaTableAccessor {
   private static final Logger LOG = 
LoggerFactory.getLogger(MetaTableAccessor.class);
   private static final Logger METALOG = 
LoggerFactory.getLogger("org.apache.hadoop.hbase.META");
 
-  private static final byte[] META_REGION_PREFIX;
-  static {
-// Copy the prefix from FIRST_META_REGIONINFO into META_REGION_PREFIX.
-// FIRST_META_REGIONINFO == 'hbase:meta,,1'.  META_REGION_PREFIX == 
'hbase:meta,'
-int len = RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionName().length - 
2;
-META_REGION_PREFIX = new byte [len];
-System.arraycopy(RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionName(), 
0,
-  META_REGION_PREFIX, 0, len);
-  }
-
   @VisibleForTesting
   public static final byte[] REPLICATION_PARENT_QUALIFIER = 
Bytes.toBytes("parent");
 



hbase git commit: HBASE-20527 Remove unused code in MetaTableAccessor

2018-05-05 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 81f69e585 -> d77989e9f


HBASE-20527 Remove unused code in MetaTableAccessor

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/d77989e9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/d77989e9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/d77989e9

Branch: refs/heads/branch-2.0
Commit: d77989e9f7b51f058d09694e5109e3aec05e0596
Parents: 81f69e5
Author: Mingdao Yang 
Authored: Sat May 5 22:26:12 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat May 5 22:27:27 2018 +0800

--
 .../java/org/apache/hadoop/hbase/MetaTableAccessor.java   | 10 --
 1 file changed, 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/d77989e9/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
index d6bbf53..1880a0d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/MetaTableAccessor.java
@@ -137,16 +137,6 @@ public class MetaTableAccessor {
   private static final Logger LOG = 
LoggerFactory.getLogger(MetaTableAccessor.class);
   private static final Logger METALOG = 
LoggerFactory.getLogger("org.apache.hadoop.hbase.META");
 
-  static final byte [] META_REGION_PREFIX;
-  static {
-// Copy the prefix from FIRST_META_REGIONINFO into META_REGION_PREFIX.
-// FIRST_META_REGIONINFO == 'hbase:meta,,1'.  META_REGION_PREFIX == 
'hbase:meta,'
-int len = RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionName().length - 
2;
-META_REGION_PREFIX = new byte [len];
-System.arraycopy(RegionInfoBuilder.FIRST_META_REGIONINFO.getRegionName(), 
0,
-  META_REGION_PREFIX, 0, len);
-  }
-
   /**
* Lists all of the table regions currently in META.
* Deprecated, keep there until some test use this.



hbase git commit: HBASE-20484 Remove the unnecessary autoboxing in FilterListBase

2018-05-01 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 86b763051 -> 6f89cec39


HBASE-20484 Remove the unnecessary autoboxing in FilterListBase

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6f89cec3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6f89cec3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6f89cec3

Branch: refs/heads/branch-2.0
Commit: 6f89cec39bb256eef8f1da5b030b1b90e50a6d89
Parents: 86b7630
Author: Guangxu Cheng 
Authored: Wed Apr 25 10:55:52 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed May 2 10:08:20 2018 +0800

--
 .../main/java/org/apache/hadoop/hbase/filter/FilterListBase.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6f89cec3/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index e02f7e2..d6bb75c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -56,8 +56,8 @@ public abstract class FilterListBase extends FilterBase {
 if (rowFilters.isEmpty()) {
   return defaultValue;
 }
-Boolean retValue = rowFilters.get(0).isReversed();
-boolean allEqual = 
rowFilters.stream().map(Filter::isReversed).allMatch(retValue::equals);
+boolean retValue = rowFilters.get(0).isReversed();
+boolean allEqual = rowFilters.stream().allMatch(f -> f.isReversed() == 
retValue);
 if (!allEqual) {
   throw new IllegalArgumentException("Filters in the list must have the 
same reversed flag");
 }



hbase git commit: HBASE-20327 When qualifier is not specified, append and incr operation do not work (shell)

2018-05-01 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 31e0cd42a -> 86b763051


HBASE-20327 When qualifier is not specified, append and incr operation do not 
work (shell)

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/86b76305
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/86b76305
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/86b76305

Branch: refs/heads/branch-2.0
Commit: 86b763051becbdea59f50280105c18a0ec379d90
Parents: 31e0cd4
Author: Nihal Jain 
Authored: Thu Apr 19 02:36:33 2018 +0530
Committer: Chia-Ping Tsai 
Committed: Wed May 2 09:59:04 2018 +0800

--
 hbase-shell/src/main/ruby/hbase/table.rb  |  8 ++--
 hbase-shell/src/test/ruby/hbase/table_test.rb | 12 
 2 files changed, 14 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/86b76305/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index d12b30f..55211b0 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -251,14 +251,12 @@ EOF
 
 
#--
 # Increment a counter atomically
+# rubocop:disable Metrics/AbcSize, CyclomaticComplexity, MethodLength
 def _incr_internal(row, column, value = nil, args = {})
   value = 1 if value.is_a?(Hash)
   value ||= 1
   incr = 
org.apache.hadoop.hbase.client.Increment.new(row.to_s.to_java_bytes)
   family, qualifier = parse_column_name(column)
-  if qualifier.nil?
-raise ArgumentError, 'Failed to provide both column family and column 
qualifier for incr'
-  end
   if args.any?
 attributes = args[ATTRIBUTES]
 visibility = args[VISIBILITY]
@@ -282,9 +280,6 @@ EOF
 def _append_internal(row, column, value, args = {})
   append = 
org.apache.hadoop.hbase.client.Append.new(row.to_s.to_java_bytes)
   family, qualifier = parse_column_name(column)
-  if qualifier.nil?
-raise ArgumentError, 'Failed to provide both column family and column 
qualifier for append'
-  end
   if args.any?
 attributes = args[ATTRIBUTES]
 visibility = args[VISIBILITY]
@@ -302,6 +297,7 @@ EOF
   org.apache.hadoop.hbase.util.Bytes.toStringBinary(cell.getValueArray,
 cell.getValueOffset, 
cell.getValueLength)
 end
+# rubocop:enable Metrics/AbcSize, CyclomaticComplexity, MethodLength
 
 
#--
 # Count rows in a table

http://git-wip-us.apache.org/repos/asf/hbase/blob/86b76305/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index 0885761..9b15f83 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -186,7 +186,19 @@ module Hbase
   @test_table.append("123", 'x:cnt2', '123')
   assert_equal("123123", @test_table._append_internal("123", 'x:cnt2', 
'123'))
 end
+
+define_test 'append should work without qualifier' do
+  @test_table.append('1001', 'x', '123')
+  assert_equal('123321', @test_table._append_internal('1001', 'x', '321'))
+end
+
 
#---
+define_test 'incr should work without qualifier' do
+  @test_table.incr('1010', 'x', 123)
+  assert_equal(123, @test_table._get_counter_internal('1010', 'x'))
+  @test_table.incr('1010', 'x', 123)
+  assert_equal(246, @test_table._get_counter_internal('1010', 'x'))
+end
 
 define_test "get_counter should work with integer keys" do
   @test_table.incr(12345, 'x:cnt')



hbase git commit: HBASE-20327 When qualifier is not specified, append and incr operation do not work (shell)

2018-04-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 31e3ef679 -> ce08826f0


HBASE-20327 When qualifier is not specified, append and incr operation do not 
work (shell)

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ce08826f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ce08826f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ce08826f

Branch: refs/heads/branch-2
Commit: ce08826f0d3c275efefd3f485929196420a586d4
Parents: 31e3ef6
Author: Nihal Jain 
Authored: Thu Apr 19 02:36:33 2018 +0530
Committer: Chia-Ping Tsai 
Committed: Sat Apr 28 12:47:45 2018 +0800

--
 hbase-shell/src/main/ruby/hbase/table.rb  |  8 ++--
 hbase-shell/src/test/ruby/hbase/table_test.rb | 12 
 2 files changed, 14 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ce08826f/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index d12b30f..55211b0 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -251,14 +251,12 @@ EOF
 
 
#--
 # Increment a counter atomically
+# rubocop:disable Metrics/AbcSize, CyclomaticComplexity, MethodLength
 def _incr_internal(row, column, value = nil, args = {})
   value = 1 if value.is_a?(Hash)
   value ||= 1
   incr = 
org.apache.hadoop.hbase.client.Increment.new(row.to_s.to_java_bytes)
   family, qualifier = parse_column_name(column)
-  if qualifier.nil?
-raise ArgumentError, 'Failed to provide both column family and column 
qualifier for incr'
-  end
   if args.any?
 attributes = args[ATTRIBUTES]
 visibility = args[VISIBILITY]
@@ -282,9 +280,6 @@ EOF
 def _append_internal(row, column, value, args = {})
   append = 
org.apache.hadoop.hbase.client.Append.new(row.to_s.to_java_bytes)
   family, qualifier = parse_column_name(column)
-  if qualifier.nil?
-raise ArgumentError, 'Failed to provide both column family and column 
qualifier for append'
-  end
   if args.any?
 attributes = args[ATTRIBUTES]
 visibility = args[VISIBILITY]
@@ -302,6 +297,7 @@ EOF
   org.apache.hadoop.hbase.util.Bytes.toStringBinary(cell.getValueArray,
 cell.getValueOffset, 
cell.getValueLength)
 end
+# rubocop:enable Metrics/AbcSize, CyclomaticComplexity, MethodLength
 
 
#--
 # Count rows in a table

http://git-wip-us.apache.org/repos/asf/hbase/blob/ce08826f/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index 0885761..9b15f83 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -186,7 +186,19 @@ module Hbase
   @test_table.append("123", 'x:cnt2', '123')
   assert_equal("123123", @test_table._append_internal("123", 'x:cnt2', 
'123'))
 end
+
+define_test 'append should work without qualifier' do
+  @test_table.append('1001', 'x', '123')
+  assert_equal('123321', @test_table._append_internal('1001', 'x', '321'))
+end
+
 
#---
+define_test 'incr should work without qualifier' do
+  @test_table.incr('1010', 'x', 123)
+  assert_equal(123, @test_table._get_counter_internal('1010', 'x'))
+  @test_table.incr('1010', 'x', 123)
+  assert_equal(246, @test_table._get_counter_internal('1010', 'x'))
+end
 
 define_test "get_counter should work with integer keys" do
   @test_table.incr(12345, 'x:cnt')



hbase git commit: HBASE-20327 When qualifier is not specified, append and incr operation do not work (shell)

2018-04-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 4be96dd8a -> 59f6ecd6b


HBASE-20327 When qualifier is not specified, append and incr operation do not 
work (shell)

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/59f6ecd6
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/59f6ecd6
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/59f6ecd6

Branch: refs/heads/master
Commit: 59f6ecd6b2155f65fa9d0b6a8bae47fec0ecbeb4
Parents: 4be96dd
Author: Nihal Jain 
Authored: Thu Apr 19 02:36:33 2018 +0530
Committer: Chia-Ping Tsai 
Committed: Sat Apr 28 12:55:30 2018 +0800

--
 hbase-shell/src/main/ruby/hbase/table.rb  |  8 ++--
 hbase-shell/src/test/ruby/hbase/table_test.rb | 12 
 2 files changed, 14 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/59f6ecd6/hbase-shell/src/main/ruby/hbase/table.rb
--
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index d12b30f..55211b0 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -251,14 +251,12 @@ EOF
 
 
#--
 # Increment a counter atomically
+# rubocop:disable Metrics/AbcSize, CyclomaticComplexity, MethodLength
 def _incr_internal(row, column, value = nil, args = {})
   value = 1 if value.is_a?(Hash)
   value ||= 1
   incr = 
org.apache.hadoop.hbase.client.Increment.new(row.to_s.to_java_bytes)
   family, qualifier = parse_column_name(column)
-  if qualifier.nil?
-raise ArgumentError, 'Failed to provide both column family and column 
qualifier for incr'
-  end
   if args.any?
 attributes = args[ATTRIBUTES]
 visibility = args[VISIBILITY]
@@ -282,9 +280,6 @@ EOF
 def _append_internal(row, column, value, args = {})
   append = 
org.apache.hadoop.hbase.client.Append.new(row.to_s.to_java_bytes)
   family, qualifier = parse_column_name(column)
-  if qualifier.nil?
-raise ArgumentError, 'Failed to provide both column family and column 
qualifier for append'
-  end
   if args.any?
 attributes = args[ATTRIBUTES]
 visibility = args[VISIBILITY]
@@ -302,6 +297,7 @@ EOF
   org.apache.hadoop.hbase.util.Bytes.toStringBinary(cell.getValueArray,
 cell.getValueOffset, 
cell.getValueLength)
 end
+# rubocop:enable Metrics/AbcSize, CyclomaticComplexity, MethodLength
 
 
#--
 # Count rows in a table

http://git-wip-us.apache.org/repos/asf/hbase/blob/59f6ecd6/hbase-shell/src/test/ruby/hbase/table_test.rb
--
diff --git a/hbase-shell/src/test/ruby/hbase/table_test.rb 
b/hbase-shell/src/test/ruby/hbase/table_test.rb
index 0885761..9b15f83 100644
--- a/hbase-shell/src/test/ruby/hbase/table_test.rb
+++ b/hbase-shell/src/test/ruby/hbase/table_test.rb
@@ -186,7 +186,19 @@ module Hbase
   @test_table.append("123", 'x:cnt2', '123')
   assert_equal("123123", @test_table._append_internal("123", 'x:cnt2', 
'123'))
 end
+
+define_test 'append should work without qualifier' do
+  @test_table.append('1001', 'x', '123')
+  assert_equal('123321', @test_table._append_internal('1001', 'x', '321'))
+end
+
 
#---
+define_test 'incr should work without qualifier' do
+  @test_table.incr('1010', 'x', 123)
+  assert_equal(123, @test_table._get_counter_internal('1010', 'x'))
+  @test_table.incr('1010', 'x', 123)
+  assert_equal(246, @test_table._get_counter_internal('1010', 'x'))
+end
 
 define_test "get_counter should work with integer keys" do
   @test_table.incr(12345, 'x:cnt')



hbase git commit: HBASE-20484 Remove the unnecessary autoboxing in FilterListBase

2018-04-26 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 101055e34 -> 3fc2c3bf3


HBASE-20484 Remove the unnecessary autoboxing in FilterListBase

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3fc2c3bf
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3fc2c3bf
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3fc2c3bf

Branch: refs/heads/branch-2
Commit: 3fc2c3bf34c473529bc3f164b0e96c59d068a77d
Parents: 101055e
Author: Guangxu Cheng 
Authored: Wed Apr 25 10:55:52 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Apr 26 16:32:14 2018 +0800

--
 .../main/java/org/apache/hadoop/hbase/filter/FilterListBase.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3fc2c3bf/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index e02f7e2..d6bb75c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -56,8 +56,8 @@ public abstract class FilterListBase extends FilterBase {
 if (rowFilters.isEmpty()) {
   return defaultValue;
 }
-Boolean retValue = rowFilters.get(0).isReversed();
-boolean allEqual = 
rowFilters.stream().map(Filter::isReversed).allMatch(retValue::equals);
+boolean retValue = rowFilters.get(0).isReversed();
+boolean allEqual = rowFilters.stream().allMatch(f -> f.isReversed() == 
retValue);
 if (!allEqual) {
   throw new IllegalArgumentException("Filters in the list must have the 
same reversed flag");
 }



hbase git commit: HBASE-20484 Remove the unnecessary autoboxing in FilterListBase

2018-04-26 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 8a30acf46 -> f39ecac48


HBASE-20484 Remove the unnecessary autoboxing in FilterListBase

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f39ecac4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f39ecac4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f39ecac4

Branch: refs/heads/master
Commit: f39ecac4882cb2f92490c66f385ed4c6387f4510
Parents: 8a30acf
Author: Guangxu Cheng 
Authored: Wed Apr 25 10:55:52 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Apr 26 16:32:03 2018 +0800

--
 .../main/java/org/apache/hadoop/hbase/filter/FilterListBase.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f39ecac4/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
index e02f7e2..d6bb75c 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListBase.java
@@ -56,8 +56,8 @@ public abstract class FilterListBase extends FilterBase {
 if (rowFilters.isEmpty()) {
   return defaultValue;
 }
-Boolean retValue = rowFilters.get(0).isReversed();
-boolean allEqual = 
rowFilters.stream().map(Filter::isReversed).allMatch(retValue::equals);
+boolean retValue = rowFilters.get(0).isReversed();
+boolean allEqual = rowFilters.stream().allMatch(f -> f.isReversed() == 
retValue);
 if (!allEqual) {
   throw new IllegalArgumentException("Filters in the list must have the 
same reversed flag");
 }



hbase git commit: HBASE-20301 Remove the meaningless plus sign from table.jsp

2018-04-04 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 0ccdffe95 -> 382c5f079


HBASE-20301 Remove the meaningless plus sign from table.jsp

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/382c5f07
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/382c5f07
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/382c5f07

Branch: refs/heads/branch-1.4
Commit: 382c5f0791af7d7489f580a3abb258075579ad37
Parents: 0ccdffe
Author: Chia-Ping Tsai 
Authored: Wed Mar 28 14:46:35 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Apr 4 20:11:14 2018 +0800

--
 hbase-server/src/main/resources/hbase-webapps/master/table.jsp | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/382c5f07/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
--
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
index 86a5a76..44f0a64 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
@@ -665,7 +665,7 @@ ShowDetailName&Start/End Key

hbase git commit: HBASE-20301 Remove the meaningless plus sign from table.jsp

2018-04-04 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1 2eae8104d -> 2f683cd43


HBASE-20301 Remove the meaningless plus sign from table.jsp

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/2f683cd4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/2f683cd4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/2f683cd4

Branch: refs/heads/branch-1
Commit: 2f683cd4386e99381fcab769ead21e1385f494e9
Parents: 2eae810
Author: Chia-Ping Tsai 
Authored: Wed Mar 28 14:46:35 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Apr 4 20:10:46 2018 +0800

--
 hbase-server/src/main/resources/hbase-webapps/master/table.jsp | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/2f683cd4/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
--
diff --git a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp 
b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
index 5fa068c..2d77e57 100644
--- a/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
+++ b/hbase-server/src/main/resources/hbase-webapps/master/table.jsp
@@ -672,7 +672,7 @@ ShowDetailName&Start/End Key

[1/2] hbase git commit: HBASE-19504 Add TimeRange support into checkAndMutate

2018-03-23 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master cd5a821c7 -> ad47c2daf


http://git-wip-us.apache.org/repos/asf/hbase/blob/ad47c2da/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 3272afa..3526689 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -1738,7 +1738,7 @@ public class TestHRegion {
 
   // checkAndPut with empty value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  emptyVal), put, true);
+  emptyVal), put);
   assertTrue(res);
 
   // Putting data in key
@@ -1747,25 +1747,25 @@ public class TestHRegion {
 
   // checkAndPut with correct value
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  put, true);
+  put);
   assertTrue(res);
 
   // not empty anymore
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  put, true);
+  put);
   assertFalse(res);
 
   Delete delete = new Delete(row1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  delete, true);
+  delete);
   assertFalse(res);
 
   put = new Put(row1);
   put.addColumn(fam1, qf1, val2);
   // checkAndPut with correct value
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val1),
-  put, true);
+  put);
   assertTrue(res);
 
   // checkAndDelete with correct value
@@ -1773,12 +1773,12 @@ public class TestHRegion {
   delete.addColumn(fam1, qf1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val2),
-  delete, true);
+  delete);
   assertTrue(res);
 
   delete = new Delete(row1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  delete, true);
+  delete);
   assertTrue(res);
 
   // checkAndPut looking for a null value
@@ -1786,7 +1786,7 @@ public class TestHRegion {
   put.addColumn(fam1, qf1, val1);
 
   res = region
-  .checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
NullComparator(), put, true);
+  .checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
NullComparator(), put);
   assertTrue(res);
 } finally {
   HBaseTestingUtility.closeRegionAndWAL(this.region);
@@ -1814,14 +1814,14 @@ public class TestHRegion {
 
   // checkAndPut with wrong value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  val2), put, true);
+  val2), put);
   assertEquals(false, res);
 
   // checkAndDelete with wrong value
   Delete delete = new Delete(row1);
   delete.addFamily(fam1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val2),
-  put, true);
+  put);
   assertEquals(false, res);
 
   // Putting data in key
@@ -1832,7 +1832,7 @@ public class TestHRegion {
   // checkAndPut with wrong value
   res =
   region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BigDecimalComparator(
-  bd2), put, true);
+  bd2), put);
   assertEquals(false, res);
 
   // checkAndDelete with wrong value
@@ -1840,7 +1840,7 @@ public class TestHRegion {
   delete.addFamily(fam1);
   res =
   region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BigDecimalComparator(
-  bd2), put, true);
+  bd2), put);
   assertEquals(false, res);
 } finally {
   HBaseTestingUtility.closeRegionAndWAL(this.region);
@@ -1866,14 +1866,14 @@ public class TestHRegion {
 
   // checkAndPut with correct value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  val1), put, true);
+  val1), put);
   assertEquals(true, res);
 
   // checkAndDelete with correct value
   Delete delete = new Delete(row1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val1),
-  delete, true);
+  delete);
   assertEquals(true, res);
 
   // Putting data in key
@@ -1884,7 +1884,7 @@ public class TestHRegion {
  

[2/2] hbase git commit: HBASE-19504 Add TimeRange support into checkAndMutate

2018-03-23 Thread chia7712
HBASE-19504 Add TimeRange support into checkAndMutate

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ad47c2da
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ad47c2da
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ad47c2da

Branch: refs/heads/master
Commit: ad47c2daf4d9dc3b85ec91e0fe8385aa6dd9c492
Parents: cd5a821
Author: Chia-Ping Tsai 
Authored: Sat Mar 24 00:05:41 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Mar 24 00:12:38 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Append.java  |   2 +-
 .../apache/hadoop/hbase/client/AsyncTable.java  |   8 +-
 .../hadoop/hbase/client/AsyncTableImpl.java |   9 +-
 .../org/apache/hadoop/hbase/client/Get.java |   2 +-
 .../org/apache/hadoop/hbase/client/HTable.java  | 111 +++--
 .../apache/hadoop/hbase/client/Increment.java   |   2 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  15 +-
 .../org/apache/hadoop/hbase/client/Scan.java|   2 +-
 .../org/apache/hadoop/hbase/client/Table.java   |   6 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  81 +++---
 .../hbase/shaded/protobuf/ProtobufUtil.java | 108 +
 .../hbase/shaded/protobuf/RequestConverter.java |  77 -
 .../hbase/shaded/protobuf/TestProtobufUtil.java |   6 +-
 .../org/apache/hadoop/hbase/io/TimeRange.java   |  17 ++
 .../src/main/protobuf/Client.proto  |   1 +
 hbase-protocol/src/main/protobuf/Client.proto   |   1 +
 .../hadoop/hbase/rest/client/RemoteHTable.java  |   5 +
 .../hadoop/hbase/regionserver/HRegion.java  |  24 +--
 .../hbase/regionserver/RSRpcServices.java   | 158 ++-
 .../hadoop/hbase/regionserver/Region.java   |  55 +--
 .../hadoop/hbase/client/TestAsyncTable.java |  63 
 .../hadoop/hbase/client/TestFromClientSide.java |  55 +++
 .../client/TestMalformedCellFromClient.java |   2 +-
 .../hadoop/hbase/protobuf/TestProtobufUtil.java |   5 +
 .../hbase/regionserver/TestAtomicOperation.java |   2 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  68 
 .../TestSimpleTimeRangeTracker.java |  10 +-
 27 files changed, 529 insertions(+), 366 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ad47c2da/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 61474b7..3a08d68 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -48,7 +48,7 @@ import org.slf4j.LoggerFactory;
 public class Append extends Mutation {
   private static final Logger LOG = LoggerFactory.getLogger(Append.class);
   private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + 
ClassSize.TIMERANGE;
-  private TimeRange tr = new TimeRange();
+  private TimeRange tr = TimeRange.allTime();
 
   /**
* Sets the TimeRange to be used on the Get for this append.

http://git-wip-us.apache.org/repos/asf/hbase/blob/ad47c2da/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
index 37c80b3..cc1ba87 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
@@ -22,15 +22,14 @@ import static 
org.apache.hadoop.hbase.client.ConnectionUtils.allOf;
 import static 
org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly;
 
 import com.google.protobuf.RpcChannel;
-
 import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -236,6 +235,11 @@ public interface AsyncTable {
 CheckAndMutateBuilder qualifier(byte[] qualifier);
 
 /**
+ * @param timeRange time range to check.
+ */
+CheckAndMutateBuilder timeRange(TimeRange timeRange);
+
+/**
  * Check for lack of column.
  */
 CheckAndMutateBuilder ifNotExists();

http://git-wip-us.apache.org/repos/asf/hbase/blob/ad47c2da/hbase-client/src/main/java/org/ap

[1/2] hbase git commit: HBASE-19504 Add TimeRange support into checkAndMutate

2018-03-23 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 489e875a7 -> 6aba045aa


http://git-wip-us.apache.org/repos/asf/hbase/blob/6aba045a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 3272afa..3526689 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -1738,7 +1738,7 @@ public class TestHRegion {
 
   // checkAndPut with empty value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  emptyVal), put, true);
+  emptyVal), put);
   assertTrue(res);
 
   // Putting data in key
@@ -1747,25 +1747,25 @@ public class TestHRegion {
 
   // checkAndPut with correct value
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  put, true);
+  put);
   assertTrue(res);
 
   // not empty anymore
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  put, true);
+  put);
   assertFalse(res);
 
   Delete delete = new Delete(row1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  delete, true);
+  delete);
   assertFalse(res);
 
   put = new Put(row1);
   put.addColumn(fam1, qf1, val2);
   // checkAndPut with correct value
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val1),
-  put, true);
+  put);
   assertTrue(res);
 
   // checkAndDelete with correct value
@@ -1773,12 +1773,12 @@ public class TestHRegion {
   delete.addColumn(fam1, qf1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val2),
-  delete, true);
+  delete);
   assertTrue(res);
 
   delete = new Delete(row1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  delete, true);
+  delete);
   assertTrue(res);
 
   // checkAndPut looking for a null value
@@ -1786,7 +1786,7 @@ public class TestHRegion {
   put.addColumn(fam1, qf1, val1);
 
   res = region
-  .checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
NullComparator(), put, true);
+  .checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
NullComparator(), put);
   assertTrue(res);
 } finally {
   HBaseTestingUtility.closeRegionAndWAL(this.region);
@@ -1814,14 +1814,14 @@ public class TestHRegion {
 
   // checkAndPut with wrong value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  val2), put, true);
+  val2), put);
   assertEquals(false, res);
 
   // checkAndDelete with wrong value
   Delete delete = new Delete(row1);
   delete.addFamily(fam1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val2),
-  put, true);
+  put);
   assertEquals(false, res);
 
   // Putting data in key
@@ -1832,7 +1832,7 @@ public class TestHRegion {
   // checkAndPut with wrong value
   res =
   region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BigDecimalComparator(
-  bd2), put, true);
+  bd2), put);
   assertEquals(false, res);
 
   // checkAndDelete with wrong value
@@ -1840,7 +1840,7 @@ public class TestHRegion {
   delete.addFamily(fam1);
   res =
   region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BigDecimalComparator(
-  bd2), put, true);
+  bd2), put);
   assertEquals(false, res);
 } finally {
   HBaseTestingUtility.closeRegionAndWAL(this.region);
@@ -1866,14 +1866,14 @@ public class TestHRegion {
 
   // checkAndPut with correct value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  val1), put, true);
+  val1), put);
   assertEquals(true, res);
 
   // checkAndDelete with correct value
   Delete delete = new Delete(row1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val1),
-  delete, true);
+  delete);
   assertEquals(true, res);
 
   // Putting data in key
@@ -1884,7 +1884,7 @@ public class TestHRegion {

[2/2] hbase git commit: HBASE-19504 Add TimeRange support into checkAndMutate

2018-03-23 Thread chia7712
HBASE-19504 Add TimeRange support into checkAndMutate

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6aba045a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6aba045a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6aba045a

Branch: refs/heads/branch-2
Commit: 6aba045aaed0f2f729e6a7409fdefaac35815b1e
Parents: 489e875
Author: Chia-Ping Tsai 
Authored: Sat Mar 24 00:00:36 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Mar 24 00:05:22 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Append.java  |   2 +-
 .../apache/hadoop/hbase/client/AsyncTable.java  |   8 +-
 .../hadoop/hbase/client/AsyncTableImpl.java |   9 +-
 .../org/apache/hadoop/hbase/client/Get.java |   2 +-
 .../org/apache/hadoop/hbase/client/HTable.java  | 111 +++--
 .../apache/hadoop/hbase/client/Increment.java   |   2 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  15 +-
 .../org/apache/hadoop/hbase/client/Scan.java|   2 +-
 .../org/apache/hadoop/hbase/client/Table.java   |   6 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  81 +++---
 .../hbase/shaded/protobuf/ProtobufUtil.java | 108 +
 .../hbase/shaded/protobuf/RequestConverter.java |  77 -
 .../hbase/shaded/protobuf/TestProtobufUtil.java |   6 +-
 .../org/apache/hadoop/hbase/io/TimeRange.java   |  17 ++
 .../src/main/protobuf/Client.proto  |   1 +
 hbase-protocol/src/main/protobuf/Client.proto   |   1 +
 .../hadoop/hbase/rest/client/RemoteHTable.java  |   5 +
 .../hadoop/hbase/regionserver/HRegion.java  |  24 +--
 .../hbase/regionserver/RSRpcServices.java   | 158 ++-
 .../hadoop/hbase/regionserver/Region.java   |  55 +--
 .../hadoop/hbase/client/TestAsyncTable.java |  63 
 .../hadoop/hbase/client/TestFromClientSide.java |  55 +++
 .../client/TestMalformedCellFromClient.java |   2 +-
 .../hadoop/hbase/protobuf/TestProtobufUtil.java |   5 +
 .../hbase/regionserver/TestAtomicOperation.java |   2 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  68 
 .../TestSimpleTimeRangeTracker.java |  10 +-
 27 files changed, 529 insertions(+), 366 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/6aba045a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 61474b7..3a08d68 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -48,7 +48,7 @@ import org.slf4j.LoggerFactory;
 public class Append extends Mutation {
   private static final Logger LOG = LoggerFactory.getLogger(Append.class);
   private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + 
ClassSize.TIMERANGE;
-  private TimeRange tr = new TimeRange();
+  private TimeRange tr = TimeRange.allTime();
 
   /**
* Sets the TimeRange to be used on the Get for this append.

http://git-wip-us.apache.org/repos/asf/hbase/blob/6aba045a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
index 37c80b3..cc1ba87 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
@@ -22,15 +22,14 @@ import static 
org.apache.hadoop.hbase.client.ConnectionUtils.allOf;
 import static 
org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly;
 
 import com.google.protobuf.RpcChannel;
-
 import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -236,6 +235,11 @@ public interface AsyncTable {
 CheckAndMutateBuilder qualifier(byte[] qualifier);
 
 /**
+ * @param timeRange time range to check.
+ */
+CheckAndMutateBuilder timeRange(TimeRange timeRange);
+
+/**
  * Check for lack of column.
  */
 CheckAndMutateBuilder ifNotExists();

http://git-wip-us.apache.org/repos/asf/hbase/blob/6aba045a/hbase-client/src/main/java/org/

[1/2] hbase git commit: HBASE-19504 Add TimeRange support into checkAndMutate

2018-03-23 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 4df0b4f0e -> 468cc059d


http://git-wip-us.apache.org/repos/asf/hbase/blob/468cc059/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
index 31dfa2a..de848f7 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
@@ -1738,7 +1738,7 @@ public class TestHRegion {
 
   // checkAndPut with empty value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  emptyVal), put, true);
+  emptyVal), put);
   assertTrue(res);
 
   // Putting data in key
@@ -1747,25 +1747,25 @@ public class TestHRegion {
 
   // checkAndPut with correct value
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  put, true);
+  put);
   assertTrue(res);
 
   // not empty anymore
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  put, true);
+  put);
   assertFalse(res);
 
   Delete delete = new Delete(row1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  delete, true);
+  delete);
   assertFalse(res);
 
   put = new Put(row1);
   put.addColumn(fam1, qf1, val2);
   // checkAndPut with correct value
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val1),
-  put, true);
+  put);
   assertTrue(res);
 
   // checkAndDelete with correct value
@@ -1773,12 +1773,12 @@ public class TestHRegion {
   delete.addColumn(fam1, qf1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val2),
-  delete, true);
+  delete);
   assertTrue(res);
 
   delete = new Delete(row1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(emptyVal),
-  delete, true);
+  delete);
   assertTrue(res);
 
   // checkAndPut looking for a null value
@@ -1786,7 +1786,7 @@ public class TestHRegion {
   put.addColumn(fam1, qf1, val1);
 
   res = region
-  .checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
NullComparator(), put, true);
+  .checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
NullComparator(), put);
   assertTrue(res);
 } finally {
   HBaseTestingUtility.closeRegionAndWAL(this.region);
@@ -1814,14 +1814,14 @@ public class TestHRegion {
 
   // checkAndPut with wrong value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  val2), put, true);
+  val2), put);
   assertEquals(false, res);
 
   // checkAndDelete with wrong value
   Delete delete = new Delete(row1);
   delete.addFamily(fam1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val2),
-  put, true);
+  put);
   assertEquals(false, res);
 
   // Putting data in key
@@ -1832,7 +1832,7 @@ public class TestHRegion {
   // checkAndPut with wrong value
   res =
   region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BigDecimalComparator(
-  bd2), put, true);
+  bd2), put);
   assertEquals(false, res);
 
   // checkAndDelete with wrong value
@@ -1840,7 +1840,7 @@ public class TestHRegion {
   delete.addFamily(fam1);
   res =
   region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BigDecimalComparator(
-  bd2), put, true);
+  bd2), put);
   assertEquals(false, res);
 } finally {
   HBaseTestingUtility.closeRegionAndWAL(this.region);
@@ -1866,14 +1866,14 @@ public class TestHRegion {
 
   // checkAndPut with correct value
   boolean res = region.checkAndMutate(row1, fam1, qf1, 
CompareOperator.EQUAL, new BinaryComparator(
-  val1), put, true);
+  val1), put);
   assertEquals(true, res);
 
   // checkAndDelete with correct value
   Delete delete = new Delete(row1);
   delete.addColumn(fam1, qf1);
   res = region.checkAndMutate(row1, fam1, qf1, CompareOperator.EQUAL, new 
BinaryComparator(val1),
-  delete, true);
+  delete);
   assertEquals(true, res);
 
   // Putting data in key
@@ -1884,7 +1884,7 @@ public class TestHRegion {
  

[2/2] hbase git commit: HBASE-19504 Add TimeRange support into checkAndMutate

2018-03-23 Thread chia7712
HBASE-19504 Add TimeRange support into checkAndMutate

Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/468cc059
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/468cc059
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/468cc059

Branch: refs/heads/branch-2.0
Commit: 468cc059d896c21a8399fa7bede0688754ca9f15
Parents: 4df0b4f
Author: Chia-Ping Tsai 
Authored: Sat Mar 24 00:00:36 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Mar 24 00:07:51 2018 +0800

--
 .../org/apache/hadoop/hbase/client/Append.java  |   2 +-
 .../apache/hadoop/hbase/client/AsyncTable.java  |   8 +-
 .../hadoop/hbase/client/AsyncTableImpl.java |   9 +-
 .../org/apache/hadoop/hbase/client/Get.java |   2 +-
 .../org/apache/hadoop/hbase/client/HTable.java  | 111 +++--
 .../apache/hadoop/hbase/client/Increment.java   |   2 +-
 .../hadoop/hbase/client/RawAsyncTableImpl.java  |  15 +-
 .../org/apache/hadoop/hbase/client/Scan.java|   2 +-
 .../org/apache/hadoop/hbase/client/Table.java   |   6 +
 .../hadoop/hbase/protobuf/ProtobufUtil.java |  81 +++---
 .../hbase/shaded/protobuf/ProtobufUtil.java | 108 +
 .../hbase/shaded/protobuf/RequestConverter.java |  77 -
 .../hbase/shaded/protobuf/TestProtobufUtil.java |   6 +-
 .../org/apache/hadoop/hbase/io/TimeRange.java   |  17 ++
 .../src/main/protobuf/Client.proto  |   1 +
 hbase-protocol/src/main/protobuf/Client.proto   |   1 +
 .../hadoop/hbase/rest/client/RemoteHTable.java  |   5 +
 .../hadoop/hbase/regionserver/HRegion.java  |  24 +--
 .../hbase/regionserver/RSRpcServices.java   | 158 ++-
 .../hadoop/hbase/regionserver/Region.java   |  55 +--
 .../hadoop/hbase/client/TestAsyncTable.java |  63 
 .../hadoop/hbase/client/TestFromClientSide.java |  55 +++
 .../client/TestMalformedCellFromClient.java |   2 +-
 .../hadoop/hbase/protobuf/TestProtobufUtil.java |   5 +
 .../hbase/regionserver/TestAtomicOperation.java |   2 +-
 .../hadoop/hbase/regionserver/TestHRegion.java  |  68 
 .../TestSimpleTimeRangeTracker.java |  10 +-
 27 files changed, 529 insertions(+), 366 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/468cc059/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
index 61474b7..3a08d68 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Append.java
@@ -48,7 +48,7 @@ import org.slf4j.LoggerFactory;
 public class Append extends Mutation {
   private static final Logger LOG = LoggerFactory.getLogger(Append.class);
   private static final long HEAP_OVERHEAD = ClassSize.REFERENCE + 
ClassSize.TIMERANGE;
-  private TimeRange tr = new TimeRange();
+  private TimeRange tr = TimeRange.allTime();
 
   /**
* Sets the TimeRange to be used on the Get for this append.

http://git-wip-us.apache.org/repos/asf/hbase/blob/468cc059/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
index 37c80b3..cc1ba87 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncTable.java
@@ -22,15 +22,14 @@ import static 
org.apache.hadoop.hbase.client.ConnectionUtils.allOf;
 import static 
org.apache.hadoop.hbase.client.ConnectionUtils.toCheckExistenceOnly;
 
 import com.google.protobuf.RpcChannel;
-
 import java.util.List;
 import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.TimeUnit;
 import java.util.function.Function;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.CompareOperator;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 
@@ -236,6 +235,11 @@ public interface AsyncTable {
 CheckAndMutateBuilder qualifier(byte[] qualifier);
 
 /**
+ * @param timeRange time range to check.
+ */
+CheckAndMutateBuilder timeRange(TimeRange timeRange);
+
+/**
  * Check for lack of column.
  */
 CheckAndMutateBuilder ifNotExists();

http://git-wip-us.apache.org/repos/asf/hbase/blob/468cc059/hbase-client/src/main/java/or

[2/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
index 6fdc77e..5008354 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
@@ -18,14 +18,6 @@
  */
 package org.apache.hadoop.hbase;
 
-import com.google.protobuf.Service;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.rmi.registry.LocateRegistry;
@@ -34,13 +26,20 @@ import java.rmi.server.RMIClientSocketFactory;
 import java.rmi.server.RMIServerSocketFactory;
 import java.rmi.server.UnicastRemoteObject;
 import java.util.HashMap;
-import java.util.Optional;
-
 import javax.management.MBeanServer;
 import javax.management.remote.JMXConnectorServer;
 import javax.management.remote.JMXConnectorServerFactory;
 import javax.management.remote.JMXServiceURL;
 import javax.management.remote.rmi.RMIConnectorServer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Pluggable JMX Agent for HBase(to fix the 2 random TCP ports issue
@@ -49,6 +48,7 @@ import javax.management.remote.rmi.RMIConnectorServer;
  * 2)support password authentication
  * 3)support subset of SSL (with default configuration)
  */
+@InterfaceAudience.Private
 public class JMXListener implements MasterCoprocessor, RegionServerCoprocessor 
{
   private static final Logger LOG = LoggerFactory.getLogger(JMXListener.class);
   public static final String RMI_REGISTRY_PORT_CONF_KEY = ".rmi.registry.port";

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
index d505d6f..5dffb73 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
@@ -13,14 +13,15 @@ package org.apache.hadoop.hbase;
 import java.io.IOException;
 import java.net.Socket;
 import java.util.ArrayList;
-
 import javax.net.ssl.SSLSocket;
 import javax.rmi.ssl.SslRMIClientSocketFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Avoid SSL V3.0 "Poodle" Vulnerability - CVE-2014-3566
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Private
 public class SslRMIClientSocketFactorySecure extends SslRMIClientSocketFactory 
{
   @Override
   public Socket createSocket(String host, int port) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
index 3583afe..8a92236 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
@@ -14,14 +14,15 @@ import java.io.IOException;
 import java.net.ServerSocket;
 import java.net.Socket;
 import java.util.ArrayList;
-
 import javax.net.ssl.SSLSocket;
 import javax.net.ssl.SSLSocketFactory;
 import javax.rmi.ssl.SslRMIServerSocketFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Avoid SSL V3.0 "Poodle" Vulnerability - CVE-2014-3566
  */
+@InterfaceAudience.Private
 public class SslRMIServerSocketFactorySecure extends SslRMIServerSocketFactory 
{
   // If you add more constructors, you may have to change the rest of this 
implementation,
   // which assumes an empty constructor, i.e. there are no specially enabled 
protocols or

http://git-wip-us.a

[4/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
HBASE-20212 Make all Public classes have InterfaceAudience category

Signed-off-by: tedyu 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a6eeb26c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a6eeb26c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a6eeb26c

Branch: refs/heads/master
Commit: a6eeb26cc0b4d0af3fff50b5b931b6847df1f9d2
Parents: 68b2f55
Author: Chia-Ping Tsai 
Authored: Tue Mar 20 15:46:51 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Mar 22 18:10:23 2018 +0800

--
 hbase-annotations/pom.xml   |   4 +
 hbase-backup/pom.xml|   4 +
 hbase-client/pom.xml|   4 +
 .../hbase/TestInterfaceAudienceAnnotations.java | 540 ---
 hbase-common/pom.xml|   4 +
 .../apache/hadoop/hbase/trace/TraceUtil.java|   2 +
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  11 +-
 .../hadoop/hbase/util/JSONMetricUtil.java   |  11 +-
 hbase-endpoint/pom.xml  |   4 +
 hbase-examples/pom.xml  |   4 +
 .../client/example/AsyncClientExample.java  |   3 +-
 .../client/example/BufferedMutatorExample.java  |   3 +-
 .../client/example/ExportEndpointExample.java   |  15 +-
 .../hbase/client/example/HttpProxyExample.java  |   3 +-
 .../example/MultiThreadedClientExample.java |   3 +-
 .../client/example/RefreshHFilesClient.java |   7 +-
 .../coprocessor/example/BulkDeleteEndpoint.java |  10 +-
 .../example/DelegatingInternalScanner.java  |   3 +-
 .../ExampleMasterObserverWithMetrics.java   |   3 +-
 .../ExampleRegionObserverWithMetrics.java   |   3 +-
 .../example/RefreshHFilesEndpoint.java  |   8 +-
 .../coprocessor/example/RowCountEndpoint.java   |  10 +-
 .../example/ScanModifyingObserver.java  |   3 +-
 .../example/ValueRewritingObserver.java |   3 +-
 .../example/WriteHeavyIncrementObserver.java|   3 +-
 .../example/ZooKeeperScanPolicyObserver.java|   3 +-
 .../hadoop/hbase/mapreduce/IndexBuilder.java|   3 +-
 .../hadoop/hbase/mapreduce/SampleUploader.java  |   3 +-
 .../apache/hadoop/hbase/thrift/DemoClient.java  |   4 +-
 .../hadoop/hbase/thrift/HttpDoAsClient.java |   5 +-
 .../apache/hadoop/hbase/thrift2/DemoClient.java |   5 +-
 .../org/apache/hadoop/hbase/types/PBCell.java   |   5 +-
 hbase-external-blockcache/pom.xml   |   4 +
 hbase-hadoop-compat/pom.xml |   4 +
 .../hadoop/hbase/CompatibilityFactory.java  |   2 +
 .../hbase/CompatibilitySingletonFactory.java|   2 +
 .../apache/hadoop/hbase/io/MetricsIOSource.java |   2 +
 .../hadoop/hbase/io/MetricsIOWrapper.java   |   3 +
 .../hbase/ipc/MetricsHBaseServerSource.java |   2 +
 .../ipc/MetricsHBaseServerSourceFactory.java|   3 +
 .../hbase/ipc/MetricsHBaseServerWrapper.java|   3 +
 .../master/MetricsAssignmentManagerSource.java  |   2 +
 .../master/MetricsMasterFileSystemSource.java   |   2 +
 .../hbase/master/MetricsMasterProcSource.java   |   2 +
 .../master/MetricsMasterProcSourceFactory.java  |   3 +
 .../hbase/master/MetricsMasterQuotaSource.java  |   2 +
 .../master/MetricsMasterQuotaSourceFactory.java |   3 +
 .../hbase/master/MetricsMasterSource.java   |   2 +
 .../master/MetricsMasterSourceFactory.java  |   3 +
 .../hbase/master/MetricsMasterWrapper.java  |   2 +
 .../hbase/master/MetricsSnapshotSource.java |   2 +
 .../master/balancer/MetricsBalancerSource.java  |   2 +
 .../MetricsStochasticBalancerSource.java|   7 +-
 .../apache/hadoop/hbase/metrics/BaseSource.java |   3 +
 .../hbase/metrics/ExceptionTrackingSource.java  |   3 +
 .../hbase/metrics/JvmPauseMonitorSource.java|   3 +
 .../hadoop/hbase/metrics/MBeanSource.java   |   2 +
 .../hadoop/hbase/metrics/OperationMetrics.java  |   3 +
 .../MetricsHeapMemoryManagerSource.java |   2 +
 .../MetricsRegionAggregateSource.java   |   2 +
 .../MetricsRegionServerQuotaSource.java |   2 +
 .../regionserver/MetricsRegionServerSource.java |   2 +
 .../MetricsRegionServerSourceFactory.java   |   2 +
 .../MetricsRegionServerWrapper.java |   3 +
 .../hbase/regionserver/MetricsRegionSource.java |   2 +
 .../regionserver/MetricsRegionWrapper.java  |   3 +
 .../MetricsTableAggregateSource.java|   2 +
 .../regionserver/MetricsTableLatencies.java |   3 +
 .../hbase/regionserver/MetricsTableSource.java  |   3 +
 .../MetricsTableWrapperAggregate.java   |   2 +
 .../regionserver/wal/MetricsWALSource.java  |   2 +
 .../MetricsReplicationSinkSource.java   |   3 +
 .../regionserver/MetricsReplicationSource.java  |   2 +
 .../MetricsReplicationSourceFactory.java|   3 +
 .../MetricsReplicationSourceSource.java |   2 +
 

[3/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
index 6cf942b..91dc71a 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterFileSystemSource.java
@@ -19,7 +19,9 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
+@InterfaceAudience.Private
 public interface MetricsMasterFileSystemSource extends BaseSource {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
index 51a17a8..db4f25e 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSource.java
@@ -19,10 +19,12 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Interface that classes that expose metrics about the master will implement.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterProcSource extends BaseSource {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
index b282e06..197f9f9 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.hbase.master;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 /**
  * Interface of a factory to create MetricsMasterSource when given a 
MetricsMasterWrapper
  */
+@InterfaceAudience.Private
 public interface MetricsMasterProcSourceFactory {
 
   MetricsMasterProcSource create(MetricsMasterWrapper masterWrapper);

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
index 99c5441..8450432 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
@@ -17,10 +17,12 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A collection of exposed metrics for space quotas from the HBase Master.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterQuotaSource extends BaseSource {
 
   String METRICS_NAME = "Quotas";

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
index 6e10746..2dcd945 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
@@ -16,9 +16,12 @@
  */
 package org.apache.hadoop.hbase.master;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 /**
  * Interface of a factory to create MetricsMasterQuotaS

[1/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 68b2f5502 -> a6eeb26cc


http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java
--
diff --git 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java
 
b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java
index cb9e0c7..b5143de 100644
--- 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java
+++ 
b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkGetExample.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.spark.example.hbasecontext;
 import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -32,11 +31,13 @@ import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.Function;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This is a simple example of getting records in HBase
  * with the bulkGet function.
  */
+@InterfaceAudience.Private
 final public class JavaHBaseBulkGetExample {
 
   private JavaHBaseBulkGetExample() {}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java
--
diff --git 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java
 
b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java
index f0f3e79..6738059 100644
--- 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java
+++ 
b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkLoadExample.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.spark.example.hbasecontext;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
@@ -33,6 +32,7 @@ import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.Function;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Run this example using command below:
@@ -45,6 +45,7 @@ import org.apache.spark.api.java.function.Function;
  * 'hbase org.apache.hadoop.hbase.tool.LoadIncrementalHFiles' to load the 
HFiles into table to
  * verify this example.
  */
+@InterfaceAudience.Private
 final public class JavaHBaseBulkLoadExample {
   private JavaHBaseBulkLoadExample() {}
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkPutExample.java
--
diff --git 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkPutExample.java
 
b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkPutExample.java
index 5821c19..4a80b96 100644
--- 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkPutExample.java
+++ 
b/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseBulkPutExample.java
@@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.spark.example.hbasecontext;
 
 import java.util.ArrayList;
 import java.util.List;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.TableName;
@@ -29,11 +28,13 @@ import org.apache.spark.SparkConf;
 import org.apache.spark.api.java.JavaRDD;
 import org.apache.spark.api.java.JavaSparkContext;
 import org.apache.spark.api.java.function.Function;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * This is a simple example of putting records in HBase
  * with the bulkPut function.
  */
+@InterfaceAudience.Private
 final public class JavaHBaseBulkPutExample {
 
   private JavaHBaseBulkPutExample() {}

http://git-wip-us.apache.org/repos/asf/hbase/blob/a6eeb26c/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/example/hbasecontext/JavaHBaseDistributedScan.java
--
diff --git 
a/hbase-spark/src/main/java/org/apache/hadoop/hbase/spark/

[3/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
index b282e06..197f9f9 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.hbase.master;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 /**
  * Interface of a factory to create MetricsMasterSource when given a 
MetricsMasterWrapper
  */
+@InterfaceAudience.Private
 public interface MetricsMasterProcSourceFactory {
 
   MetricsMasterProcSource create(MetricsMasterWrapper masterWrapper);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
index 99c5441..8450432 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
@@ -17,10 +17,12 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A collection of exposed metrics for space quotas from the HBase Master.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterQuotaSource extends BaseSource {
 
   String METRICS_NAME = "Quotas";

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
index 6e10746..2dcd945 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
@@ -16,9 +16,12 @@
  */
 package org.apache.hadoop.hbase.master;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 /**
  * Interface of a factory to create MetricsMasterQuotaSource when given a 
MetricsMasterWrapper.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterQuotaSourceFactory {
 
   MetricsMasterQuotaSource create(MetricsMasterWrapper masterWrapper);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
index d187bb1..dcfc600 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
@@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
 import org.apache.hadoop.hbase.metrics.OperationMetrics;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Interface that classes that expose metrics about the master will implement.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterSource extends BaseSource {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
index 63a85a3..fce574a 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.hbase.m

[4/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
HBASE-20212 Make all Public classes have InterfaceAudience category

Signed-off-by: tedyu 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dd9e46bb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dd9e46bb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dd9e46bb

Branch: refs/heads/branch-2
Commit: dd9e46bbf5fa76b06720392d27d07855ce2d553e
Parents: 0a94c9c
Author: Chia-Ping Tsai 
Authored: Thu Mar 22 09:24:14 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Mar 22 18:09:54 2018 +0800

--
 hbase-annotations/pom.xml   |   4 +
 hbase-client/pom.xml|   4 +
 .../hbase/TestInterfaceAudienceAnnotations.java | 540 ---
 hbase-common/pom.xml|   4 +
 .../apache/hadoop/hbase/trace/TraceUtil.java|   2 +
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  11 +-
 .../hadoop/hbase/util/JSONMetricUtil.java   |  11 +-
 hbase-endpoint/pom.xml  |   4 +
 hbase-examples/pom.xml  |   4 +
 .../client/example/AsyncClientExample.java  |   3 +-
 .../client/example/BufferedMutatorExample.java  |   3 +-
 .../client/example/ExportEndpointExample.java   |  15 +-
 .../hbase/client/example/HttpProxyExample.java  |   3 +-
 .../example/MultiThreadedClientExample.java |   3 +-
 .../client/example/RefreshHFilesClient.java |   7 +-
 .../coprocessor/example/BulkDeleteEndpoint.java |  10 +-
 .../example/DelegatingInternalScanner.java  |   3 +-
 .../ExampleMasterObserverWithMetrics.java   |   3 +-
 .../ExampleRegionObserverWithMetrics.java   |   3 +-
 .../example/RefreshHFilesEndpoint.java  |   8 +-
 .../coprocessor/example/RowCountEndpoint.java   |  10 +-
 .../example/ScanModifyingObserver.java  |   3 +-
 .../example/ValueRewritingObserver.java |   3 +-
 .../example/WriteHeavyIncrementObserver.java|   3 +-
 .../example/ZooKeeperScanPolicyObserver.java|   3 +-
 .../hadoop/hbase/mapreduce/IndexBuilder.java|   3 +-
 .../hadoop/hbase/mapreduce/SampleUploader.java  |   3 +-
 .../apache/hadoop/hbase/thrift/DemoClient.java  |   4 +-
 .../hadoop/hbase/thrift/HttpDoAsClient.java |   5 +-
 .../apache/hadoop/hbase/thrift2/DemoClient.java |   5 +-
 .../org/apache/hadoop/hbase/types/PBCell.java   |   5 +-
 hbase-external-blockcache/pom.xml   |   4 +
 hbase-hadoop-compat/pom.xml |   4 +
 .../hadoop/hbase/CompatibilityFactory.java  |   2 +
 .../hbase/CompatibilitySingletonFactory.java|   2 +
 .../apache/hadoop/hbase/io/MetricsIOSource.java |   2 +
 .../hadoop/hbase/io/MetricsIOWrapper.java   |   3 +
 .../hbase/ipc/MetricsHBaseServerSource.java |   2 +
 .../ipc/MetricsHBaseServerSourceFactory.java|   3 +
 .../hbase/ipc/MetricsHBaseServerWrapper.java|   3 +
 .../master/MetricsAssignmentManagerSource.java  |   2 +
 .../master/MetricsMasterFileSystemSource.java   |   2 +
 .../hbase/master/MetricsMasterProcSource.java   |   2 +
 .../master/MetricsMasterProcSourceFactory.java  |   3 +
 .../hbase/master/MetricsMasterQuotaSource.java  |   2 +
 .../master/MetricsMasterQuotaSourceFactory.java |   3 +
 .../hbase/master/MetricsMasterSource.java   |   2 +
 .../master/MetricsMasterSourceFactory.java  |   3 +
 .../hbase/master/MetricsMasterWrapper.java  |   2 +
 .../hbase/master/MetricsSnapshotSource.java |   2 +
 .../master/balancer/MetricsBalancerSource.java  |   2 +
 .../MetricsStochasticBalancerSource.java|   7 +-
 .../apache/hadoop/hbase/metrics/BaseSource.java |   3 +
 .../hbase/metrics/ExceptionTrackingSource.java  |   3 +
 .../hbase/metrics/JvmPauseMonitorSource.java|   3 +
 .../hadoop/hbase/metrics/MBeanSource.java   |   2 +
 .../hadoop/hbase/metrics/OperationMetrics.java  |   3 +
 .../MetricsHeapMemoryManagerSource.java |   2 +
 .../MetricsRegionAggregateSource.java   |   2 +
 .../MetricsRegionServerQuotaSource.java |   2 +
 .../regionserver/MetricsRegionServerSource.java |   2 +
 .../MetricsRegionServerSourceFactory.java   |   2 +
 .../MetricsRegionServerWrapper.java |   3 +
 .../hbase/regionserver/MetricsRegionSource.java |   2 +
 .../regionserver/MetricsRegionWrapper.java  |   3 +
 .../MetricsTableAggregateSource.java|   2 +
 .../regionserver/MetricsTableLatencies.java |   3 +
 .../hbase/regionserver/MetricsTableSource.java  |   3 +
 .../MetricsTableWrapperAggregate.java   |   2 +
 .../regionserver/wal/MetricsWALSource.java  |   2 +
 .../MetricsReplicationSinkSource.java   |   3 +
 .../regionserver/MetricsReplicationSource.java  |   2 +
 .../MetricsReplicationSourceFactory.java|   3 +
 .../MetricsReplicationSourceSource.java |   2 +
 .../hadoop/hbase/rest/MetricsRESTSource.java|  10 +

[2/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
index 6fdc77e..5008354 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
@@ -18,14 +18,6 @@
  */
 package org.apache.hadoop.hbase;
 
-import com.google.protobuf.Service;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.rmi.registry.LocateRegistry;
@@ -34,13 +26,20 @@ import java.rmi.server.RMIClientSocketFactory;
 import java.rmi.server.RMIServerSocketFactory;
 import java.rmi.server.UnicastRemoteObject;
 import java.util.HashMap;
-import java.util.Optional;
-
 import javax.management.MBeanServer;
 import javax.management.remote.JMXConnectorServer;
 import javax.management.remote.JMXConnectorServerFactory;
 import javax.management.remote.JMXServiceURL;
 import javax.management.remote.rmi.RMIConnectorServer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Pluggable JMX Agent for HBase(to fix the 2 random TCP ports issue
@@ -49,6 +48,7 @@ import javax.management.remote.rmi.RMIConnectorServer;
  * 2)support password authentication
  * 3)support subset of SSL (with default configuration)
  */
+@InterfaceAudience.Private
 public class JMXListener implements MasterCoprocessor, RegionServerCoprocessor 
{
   private static final Logger LOG = LoggerFactory.getLogger(JMXListener.class);
   public static final String RMI_REGISTRY_PORT_CONF_KEY = ".rmi.registry.port";

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
index d505d6f..5dffb73 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
@@ -13,14 +13,15 @@ package org.apache.hadoop.hbase;
 import java.io.IOException;
 import java.net.Socket;
 import java.util.ArrayList;
-
 import javax.net.ssl.SSLSocket;
 import javax.rmi.ssl.SslRMIClientSocketFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Avoid SSL V3.0 "Poodle" Vulnerability - CVE-2014-3566
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Private
 public class SslRMIClientSocketFactorySecure extends SslRMIClientSocketFactory 
{
   @Override
   public Socket createSocket(String host, int port) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
index 3583afe..8a92236 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
@@ -14,14 +14,15 @@ import java.io.IOException;
 import java.net.ServerSocket;
 import java.net.Socket;
 import java.util.ArrayList;
-
 import javax.net.ssl.SSLSocket;
 import javax.net.ssl.SSLSocketFactory;
 import javax.rmi.ssl.SslRMIServerSocketFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Avoid SSL V3.0 "Poodle" Vulnerability - CVE-2014-3566
  */
+@InterfaceAudience.Private
 public class SslRMIServerSocketFactorySecure extends SslRMIServerSocketFactory 
{
   // If you add more constructors, you may have to change the rest of this 
implementation,
   // which assumes an empty constructor, i.e. there are no specially enabled 
protocols or

http://git-wip-us.a

[1/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 0a94c9c25 -> dd9e46bbf


http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
index 973cad7..c86f476 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
@@ -22,12 +22,14 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A ThreadPoolExecutor customized for working with HBase thrift to update 
metrics before and
  * after the execution of a task.
  */
 
+@InterfaceAudience.Private
 public class THBaseThreadPoolExecutor extends ThreadPoolExecutor {
 
   private ThriftMetrics metrics;

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-zookeeper/pom.xml
--
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index 963476a..588afb5 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -91,6 +91,10 @@
 org.codehaus.mojo
 findbugs-maven-plugin
   
+  
+net.revelc.code
+warbucks-maven-plugin
+  
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
--
diff --git 
a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
 
b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
index a50ce4c..7413879 100644
--- 
a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
+++ 
b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
@@ -19,10 +19,10 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
-
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -33,6 +33,7 @@ import 
org.apache.hadoop.hbase.shaded.protobuf.generated.RegionNormalizerProtos;
 /**
  * Tracks region normalizer state up in ZK
  */
+@InterfaceAudience.Private
 public class RegionNormalizerTracker extends ZKNodeTracker {
   private static final Logger LOG = 
LoggerFactory.getLogger(RegionNormalizerTracker.class);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/dd9e46bb/pom.xml
--
diff --git a/pom.xml b/pom.xml
index 5ca31f8..abbe4ad 100755
--- a/pom.xml
+++ b/pom.xml
@@ -909,6 +909,34 @@
 true
   
 
+
+  net.revelc.code
+  warbucks-maven-plugin
+  ${maven.warbucks.version}
+  
+false
+
+  
+
+(?!.*(.generated.|.tmpl.|\$)).*
+false
+true
+
false
+false
+false
+
org[.]apache[.]yetus[.]audience[.]InterfaceAudience.*
+  
+
+  
+  
+
+  run-warbucks
+  
+check
+  
+
+  
+
   
 
 
@@ -1382,6 +1410,7 @@
 3.0.0
 3.4
 3.0.1
+1.1.0
 1.5.0.Final
 2.8.2
 3.2.2



[3/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
index b282e06..197f9f9 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterProcSourceFactory.java
@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.hbase.master;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 /**
  * Interface of a factory to create MetricsMasterSource when given a 
MetricsMasterWrapper
  */
+@InterfaceAudience.Private
 public interface MetricsMasterProcSourceFactory {
 
   MetricsMasterProcSource create(MetricsMasterWrapper masterWrapper);

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
index 99c5441..8450432 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSource.java
@@ -17,10 +17,12 @@
 package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A collection of exposed metrics for space quotas from the HBase Master.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterQuotaSource extends BaseSource {
 
   String METRICS_NAME = "Quotas";

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
index 6e10746..2dcd945 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterQuotaSourceFactory.java
@@ -16,9 +16,12 @@
  */
 package org.apache.hadoop.hbase.master;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
 /**
  * Interface of a factory to create MetricsMasterQuotaSource when given a 
MetricsMasterWrapper.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterQuotaSourceFactory {
 
   MetricsMasterQuotaSource create(MetricsMasterWrapper masterWrapper);

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
index d187bb1..dcfc600 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSource.java
@@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.master;
 
 import org.apache.hadoop.hbase.metrics.BaseSource;
 import org.apache.hadoop.hbase.metrics.OperationMetrics;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Interface that classes that expose metrics about the master will implement.
  */
+@InterfaceAudience.Private
 public interface MetricsMasterSource extends BaseSource {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
--
diff --git 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
index 63a85a3..fce574a 100644
--- 
a/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
+++ 
b/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/master/MetricsMasterSourceFactory.java
@@ -18,9 +18,12 @@
 
 package org.apache.hadoop.hbase.m

[1/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 9802c1771 -> 70c170727


http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
index 3fb7254..f2abe2e 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescer.java
@@ -31,7 +31,6 @@ import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.LongAdder;
-
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.thrift.ThriftServerRunner.HBaseHandler;
@@ -40,6 +39,7 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.Threads;
 import org.apache.hadoop.metrics2.util.MBeans;
 import org.apache.thrift.TException;
+import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -52,6 +52,7 @@ import org.slf4j.LoggerFactory;
  * thrift server dies or is shut down before everything in the queue is 
drained.
  *
  */
+@InterfaceAudience.Private
 public class IncrementCoalescer implements IncrementCoalescerMBean {
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java
index 604fa97..06cf193 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/IncrementCoalescerMBean.java
@@ -18,6 +18,9 @@
 
 package org.apache.hadoop.hbase.thrift;
 
+import org.apache.yetus.audience.InterfaceAudience;
+
+@InterfaceAudience.Private
 public interface IncrementCoalescerMBean {
   int getQueueSize();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
--
diff --git 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
index 973cad7..c86f476 100644
--- 
a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
+++ 
b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/THBaseThreadPoolExecutor.java
@@ -22,12 +22,14 @@ import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.ThreadFactory;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * A ThreadPoolExecutor customized for working with HBase thrift to update 
metrics before and
  * after the execution of a task.
  */
 
+@InterfaceAudience.Private
 public class THBaseThreadPoolExecutor extends ThreadPoolExecutor {
 
   private ThriftMetrics metrics;

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-zookeeper/pom.xml
--
diff --git a/hbase-zookeeper/pom.xml b/hbase-zookeeper/pom.xml
index 8c92968..a49dbe7 100644
--- a/hbase-zookeeper/pom.xml
+++ b/hbase-zookeeper/pom.xml
@@ -91,6 +91,10 @@
 org.codehaus.mojo
 findbugs-maven-plugin
   
+  
+net.revelc.code
+warbucks-maven-plugin
+  
 
 
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
--
diff --git 
a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
 
b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
index a50ce4c..7413879 100644
--- 
a/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
+++ 
b/hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/RegionNormalizerTracker.java
@@ -19,10 +19,10 @@
 package org.apache.hadoop.hbase.zookeeper;
 
 import java.io.IOException;
-
 import org.apache.hadoop.hbase.Abortable;
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.util.Byt

[2/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
index 6fdc77e..5008354 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/JMXListener.java
@@ -18,14 +18,6 @@
  */
 package org.apache.hadoop.hbase;
 
-import com.google.protobuf.Service;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 import java.io.IOException;
 import java.lang.management.ManagementFactory;
 import java.rmi.registry.LocateRegistry;
@@ -34,13 +26,20 @@ import java.rmi.server.RMIClientSocketFactory;
 import java.rmi.server.RMIServerSocketFactory;
 import java.rmi.server.UnicastRemoteObject;
 import java.util.HashMap;
-import java.util.Optional;
-
 import javax.management.MBeanServer;
 import javax.management.remote.JMXConnectorServer;
 import javax.management.remote.JMXConnectorServerFactory;
 import javax.management.remote.JMXServiceURL;
 import javax.management.remote.rmi.RMIConnectorServer;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment;
+import org.apache.yetus.audience.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Pluggable JMX Agent for HBase(to fix the 2 random TCP ports issue
@@ -49,6 +48,7 @@ import javax.management.remote.rmi.RMIConnectorServer;
  * 2)support password authentication
  * 3)support subset of SSL (with default configuration)
  */
+@InterfaceAudience.Private
 public class JMXListener implements MasterCoprocessor, RegionServerCoprocessor 
{
   private static final Logger LOG = LoggerFactory.getLogger(JMXListener.class);
   public static final String RMI_REGISTRY_PORT_CONF_KEY = ".rmi.registry.port";

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
index d505d6f..5dffb73 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIClientSocketFactorySecure.java
@@ -13,14 +13,15 @@ package org.apache.hadoop.hbase;
 import java.io.IOException;
 import java.net.Socket;
 import java.util.ArrayList;
-
 import javax.net.ssl.SSLSocket;
 import javax.rmi.ssl.SslRMIClientSocketFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Avoid SSL V3.0 "Poodle" Vulnerability - CVE-2014-3566
  */
 @SuppressWarnings("serial")
+@InterfaceAudience.Private
 public class SslRMIClientSocketFactorySecure extends SslRMIClientSocketFactory 
{
   @Override
   public Socket createSocket(String host, int port) throws IOException {

http://git-wip-us.apache.org/repos/asf/hbase/blob/70c17072/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
index 3583afe..8a92236 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/SslRMIServerSocketFactorySecure.java
@@ -14,14 +14,15 @@ import java.io.IOException;
 import java.net.ServerSocket;
 import java.net.Socket;
 import java.util.ArrayList;
-
 import javax.net.ssl.SSLSocket;
 import javax.net.ssl.SSLSocketFactory;
 import javax.rmi.ssl.SslRMIServerSocketFactory;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * Avoid SSL V3.0 "Poodle" Vulnerability - CVE-2014-3566
  */
+@InterfaceAudience.Private
 public class SslRMIServerSocketFactorySecure extends SslRMIServerSocketFactory 
{
   // If you add more constructors, you may have to change the rest of this 
implementation,
   // which assumes an empty constructor, i.e. there are no specially enabled 
protocols or

http://git-wip-us.a

[4/4] hbase git commit: HBASE-20212 Make all Public classes have InterfaceAudience category

2018-03-22 Thread chia7712
HBASE-20212 Make all Public classes have InterfaceAudience category

Signed-off-by: tedyu 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70c17072
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70c17072
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70c17072

Branch: refs/heads/branch-2.0
Commit: 70c17072708f150c22a715c3f34662db278d7185
Parents: 9802c17
Author: Chia-Ping Tsai 
Authored: Thu Mar 22 09:23:16 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Mar 22 18:17:16 2018 +0800

--
 hbase-annotations/pom.xml   |   4 +
 hbase-client/pom.xml|   4 +
 .../hbase/TestInterfaceAudienceAnnotations.java | 540 ---
 hbase-common/pom.xml|   4 +
 .../apache/hadoop/hbase/trace/TraceUtil.java|   2 +
 .../org/apache/hadoop/hbase/util/JSONBean.java  |  11 +-
 .../hadoop/hbase/util/JSONMetricUtil.java   |  11 +-
 hbase-endpoint/pom.xml  |   4 +
 hbase-examples/pom.xml  |   4 +
 .../client/example/AsyncClientExample.java  |   3 +-
 .../client/example/BufferedMutatorExample.java  |   3 +-
 .../client/example/ExportEndpointExample.java   |  15 +-
 .../hbase/client/example/HttpProxyExample.java  |   3 +-
 .../example/MultiThreadedClientExample.java |   3 +-
 .../client/example/RefreshHFilesClient.java |   7 +-
 .../coprocessor/example/BulkDeleteEndpoint.java |  10 +-
 .../example/DelegatingInternalScanner.java  |   3 +-
 .../ExampleMasterObserverWithMetrics.java   |   3 +-
 .../ExampleRegionObserverWithMetrics.java   |   3 +-
 .../example/RefreshHFilesEndpoint.java  |   8 +-
 .../coprocessor/example/RowCountEndpoint.java   |  10 +-
 .../example/ScanModifyingObserver.java  |   3 +-
 .../example/ValueRewritingObserver.java |   3 +-
 .../example/WriteHeavyIncrementObserver.java|   3 +-
 .../example/ZooKeeperScanPolicyObserver.java|   3 +-
 .../hadoop/hbase/mapreduce/IndexBuilder.java|   3 +-
 .../hadoop/hbase/mapreduce/SampleUploader.java  |   3 +-
 .../apache/hadoop/hbase/thrift/DemoClient.java  |   4 +-
 .../hadoop/hbase/thrift/HttpDoAsClient.java |   5 +-
 .../apache/hadoop/hbase/thrift2/DemoClient.java |   5 +-
 .../org/apache/hadoop/hbase/types/PBCell.java   |   5 +-
 hbase-external-blockcache/pom.xml   |   4 +
 hbase-hadoop-compat/pom.xml |   4 +
 .../hadoop/hbase/CompatibilityFactory.java  |   2 +
 .../hbase/CompatibilitySingletonFactory.java|   2 +
 .../apache/hadoop/hbase/io/MetricsIOSource.java |   2 +
 .../hadoop/hbase/io/MetricsIOWrapper.java   |   3 +
 .../hbase/ipc/MetricsHBaseServerSource.java |   2 +
 .../ipc/MetricsHBaseServerSourceFactory.java|   3 +
 .../hbase/ipc/MetricsHBaseServerWrapper.java|   3 +
 .../master/MetricsAssignmentManagerSource.java  |   2 +
 .../master/MetricsMasterFileSystemSource.java   |   2 +
 .../hbase/master/MetricsMasterProcSource.java   |   2 +
 .../master/MetricsMasterProcSourceFactory.java  |   3 +
 .../hbase/master/MetricsMasterQuotaSource.java  |   2 +
 .../master/MetricsMasterQuotaSourceFactory.java |   3 +
 .../hbase/master/MetricsMasterSource.java   |   2 +
 .../master/MetricsMasterSourceFactory.java  |   3 +
 .../hbase/master/MetricsMasterWrapper.java  |   2 +
 .../hbase/master/MetricsSnapshotSource.java |   2 +
 .../master/balancer/MetricsBalancerSource.java  |   2 +
 .../MetricsStochasticBalancerSource.java|   7 +-
 .../apache/hadoop/hbase/metrics/BaseSource.java |   3 +
 .../hbase/metrics/ExceptionTrackingSource.java  |   3 +
 .../hbase/metrics/JvmPauseMonitorSource.java|   3 +
 .../hadoop/hbase/metrics/MBeanSource.java   |   2 +
 .../hadoop/hbase/metrics/OperationMetrics.java  |   3 +
 .../MetricsHeapMemoryManagerSource.java |   2 +
 .../MetricsRegionAggregateSource.java   |   2 +
 .../MetricsRegionServerQuotaSource.java |   2 +
 .../regionserver/MetricsRegionServerSource.java |   2 +
 .../MetricsRegionServerSourceFactory.java   |   2 +
 .../MetricsRegionServerWrapper.java |   3 +
 .../hbase/regionserver/MetricsRegionSource.java |   2 +
 .../regionserver/MetricsRegionWrapper.java  |   3 +
 .../MetricsTableAggregateSource.java|   2 +
 .../regionserver/MetricsTableLatencies.java |   3 +
 .../hbase/regionserver/MetricsTableSource.java  |   3 +
 .../MetricsTableWrapperAggregate.java   |   2 +
 .../regionserver/wal/MetricsWALSource.java  |   2 +
 .../MetricsReplicationSinkSource.java   |   3 +
 .../regionserver/MetricsReplicationSource.java  |   2 +
 .../MetricsReplicationSourceFactory.java|   3 +
 .../MetricsReplicationSourceSource.java |   2 +
 .../hadoop/hbase/rest/MetricsRESTSource.java|  10

hbase git commit: HBASE-20246 Remove the spark module

2018-03-21 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 4cf5884f1 -> 70e7e4866


HBASE-20246 Remove the spark module

Signed-off-by: Michael Stack 
Signed-off-by: Sean Busbey 
Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70e7e486
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70e7e486
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70e7e486

Branch: refs/heads/branch-2.0
Commit: 70e7e486650770944be439bdb7d1baa6ee0969fe
Parents: 4cf5884
Author: Chia-Ping Tsai 
Authored: Wed Mar 21 17:54:49 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Mar 22 09:09:26 2018 +0800

--
 hbase-spark/pom.xml | 717 ---
 1 file changed, 717 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/70e7e486/hbase-spark/pom.xml
--
diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml
deleted file mode 100644
index 874cf9d..000
--- a/hbase-spark/pom.xml
+++ /dev/null
@@ -1,717 +0,0 @@
-
-
-http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-  4.0.0
-  
-hbase-build-configuration
-org.apache.hbase
-3.0.0-SNAPSHOT
-../hbase-build-configuration
-  
-  hbase-spark
-  Apache HBase - Spark
-  
-1.6.0
-2.10.4
-2.10
-${project.basedir}/..
-  
-  
-
-  org.apache.hbase.thirdparty
-  hbase-shaded-miscellaneous
-
-
-
-  javax.servlet
-  javax.servlet-api
-  test
-
-
-
-  org.scala-lang
-  scala-library
-  ${scala.version}
-  provided
-
-
-
-  org.apache.spark
-  spark-core_${scala.binary.version}
-  ${spark.version}
-  provided
-  
-
-  
-  org.scala-lang
-  scala-library
-
-
-  
-  org.scala-lang
-  scalap
-
-
-   com.google.code.findbugs
-   jsr305
-
-  
-
-
-  com.google.code.findbugs
-  jsr305
-  1.3.9
-  provided
-  true
-
-
-  org.apache.spark
-  spark-sql_${scala.binary.version}
-  ${spark.version}
-  provided
-
-
-  org.apache.spark
-  spark-streaming_${scala.binary.version}
-  ${spark.version}
-  provided
-
-
-  org.apache.spark
-  spark-streaming_${scala.binary.version}
-  ${spark.version}
-  test-jar
-  tests
-  test
-
-
-  junit
-  junit
-  test
-
-
-  org.scalatest
-  scalatest_${scala.binary.version}
-  2.2.4
-  test
-
-
-  org.scalamock
-  
scalamock-scalatest-support_${scala.binary.version}
-  3.1.4
-  test
-
-
-  com.fasterxml.jackson.module
-  jackson-module-scala_${scala.binary.version}
-  ${jackson.version}
-  
-
-  org.scala-lang
-  scala-library
-
-
-  org.scala-lang
-  scala-reflect
-
-  
-
-
-  org.apache.hadoop
-  hadoop-client
-  ${hadoop-two.version}
-  
-
-  log4j
-  log4j
-
-
-  javax.servlet
-  servlet-api
-
-
-  javax.servlet.jsp
-  jsp-api
-
-
-  org.jruby
-  jruby-complete
-
-
-  org.jboss.netty
-  netty
-
-
-  io.netty
-  netty
-
-  
-
-
-  org.apache.hadoop
-  hadoop-common
-  ${hadoop-two.version}
-  
-
-  org.apache.htrace
-  htrace-core
-
-
-  log4j
-  log4j
-
-
-  javax.servlet
-  servlet-api
-
-
-  javax.servlet.jsp
-  jsp-api
-
-
-  org.jruby
-  jruby-complete
-
-
-  org.jboss.netty
-  netty
-
-
-  io.netty
-  netty
-
-
-  com.google.code.findbugs
-  jsr305
-
-  
-
-
-  org.apache.hadoop
-  hadoop-common
-  ${hadoop-two.version}
-  test-jar
-  test
-  
-
-  org.apache.htrace
-  htrace-core
-
-
-  log4j
-  log4j
-
-
-  javax.servlet
-  servlet-api
-
-
-  javax.servlet.jsp
-  jsp-api
-
-
-  org.jruby
-  jruby-complete
-
-
-  org.jboss.netty
-  netty
-
-
-  

hbase git commit: HBASE-20246 Remove the spark module

2018-03-21 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 7a130dcc3 -> 68008356a


HBASE-20246 Remove the spark module

Signed-off-by: Michael Stack 
Signed-off-by: Sean Busbey 
Signed-off-by: Mike Drob 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/68008356
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/68008356
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/68008356

Branch: refs/heads/branch-2
Commit: 68008356aef80fe498539b2ec6ab75ec3b5673d3
Parents: 7a130dc
Author: Chia-Ping Tsai 
Authored: Wed Mar 21 17:54:49 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Mar 22 09:05:29 2018 +0800

--
 hbase-spark/pom.xml | 717 ---
 1 file changed, 717 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/68008356/hbase-spark/pom.xml
--
diff --git a/hbase-spark/pom.xml b/hbase-spark/pom.xml
deleted file mode 100644
index 874cf9d..000
--- a/hbase-spark/pom.xml
+++ /dev/null
@@ -1,717 +0,0 @@
-
-
-http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-  4.0.0
-  
-hbase-build-configuration
-org.apache.hbase
-3.0.0-SNAPSHOT
-../hbase-build-configuration
-  
-  hbase-spark
-  Apache HBase - Spark
-  
-1.6.0
-2.10.4
-2.10
-${project.basedir}/..
-  
-  
-
-  org.apache.hbase.thirdparty
-  hbase-shaded-miscellaneous
-
-
-
-  javax.servlet
-  javax.servlet-api
-  test
-
-
-
-  org.scala-lang
-  scala-library
-  ${scala.version}
-  provided
-
-
-
-  org.apache.spark
-  spark-core_${scala.binary.version}
-  ${spark.version}
-  provided
-  
-
-  
-  org.scala-lang
-  scala-library
-
-
-  
-  org.scala-lang
-  scalap
-
-
-   com.google.code.findbugs
-   jsr305
-
-  
-
-
-  com.google.code.findbugs
-  jsr305
-  1.3.9
-  provided
-  true
-
-
-  org.apache.spark
-  spark-sql_${scala.binary.version}
-  ${spark.version}
-  provided
-
-
-  org.apache.spark
-  spark-streaming_${scala.binary.version}
-  ${spark.version}
-  provided
-
-
-  org.apache.spark
-  spark-streaming_${scala.binary.version}
-  ${spark.version}
-  test-jar
-  tests
-  test
-
-
-  junit
-  junit
-  test
-
-
-  org.scalatest
-  scalatest_${scala.binary.version}
-  2.2.4
-  test
-
-
-  org.scalamock
-  
scalamock-scalatest-support_${scala.binary.version}
-  3.1.4
-  test
-
-
-  com.fasterxml.jackson.module
-  jackson-module-scala_${scala.binary.version}
-  ${jackson.version}
-  
-
-  org.scala-lang
-  scala-library
-
-
-  org.scala-lang
-  scala-reflect
-
-  
-
-
-  org.apache.hadoop
-  hadoop-client
-  ${hadoop-two.version}
-  
-
-  log4j
-  log4j
-
-
-  javax.servlet
-  servlet-api
-
-
-  javax.servlet.jsp
-  jsp-api
-
-
-  org.jruby
-  jruby-complete
-
-
-  org.jboss.netty
-  netty
-
-
-  io.netty
-  netty
-
-  
-
-
-  org.apache.hadoop
-  hadoop-common
-  ${hadoop-two.version}
-  
-
-  org.apache.htrace
-  htrace-core
-
-
-  log4j
-  log4j
-
-
-  javax.servlet
-  servlet-api
-
-
-  javax.servlet.jsp
-  jsp-api
-
-
-  org.jruby
-  jruby-complete
-
-
-  org.jboss.netty
-  netty
-
-
-  io.netty
-  netty
-
-
-  com.google.code.findbugs
-  jsr305
-
-  
-
-
-  org.apache.hadoop
-  hadoop-common
-  ${hadoop-two.version}
-  test-jar
-  test
-  
-
-  org.apache.htrace
-  htrace-core
-
-
-  log4j
-  log4j
-
-
-  javax.servlet
-  servlet-api
-
-
-  javax.servlet.jsp
-  jsp-api
-
-
-  org.jruby
-  jruby-complete
-
-
-  org.jboss.netty
-  netty
-
-
-  

hbase git commit: HBASE-20119 (addendum) revert the removed methods in TableDescriptorBuilder and TableDescriptor

2018-03-18 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 7e0914759 -> a13c1ae1c


HBASE-20119 (addendum) revert the removed methods in TableDescriptorBuilder and 
TableDescriptor

Signed-off-by: Josh Elser 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a13c1ae1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a13c1ae1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a13c1ae1

Branch: refs/heads/branch-2.0
Commit: a13c1ae1c369d5ee41da68f6d9214a200f760062
Parents: 7e09147
Author: Chia-Ping Tsai 
Authored: Mon Mar 19 08:59:24 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Mar 19 09:08:01 2018 +0800

--
 .../apache/hadoop/hbase/HTableDescriptor.java   | 18 ++--
 .../hadoop/hbase/client/TableDescriptor.java| 16 ++-
 .../hbase/client/TableDescriptorBuilder.java| 48 +++-
 3 files changed, 65 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a13c1ae1/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index e59ea45..960b91f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -21,16 +21,15 @@ package org.apache.hadoop.hbase;
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
 import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
@@ -38,8 +37,7 @@ import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDesc
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * HTableDescriptor contains the details about an HBase table  such as the 
descriptors of
@@ -763,16 +761,6 @@ public class HTableDescriptor implements TableDescriptor, 
Comparable getCoprocessors() {
-return 
getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName)
-  .collect(Collectors.toList());
-  }
-
-  /**
* Remove a coprocessor from those set on the table
* @param className Class name of the co-processor
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/a13c1ae1/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
index 4c46a8f..9456fd4 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
@@ -24,7 +24,7 @@ import java.util.Comparator;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
-
+import java.util.stream.Collectors;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -87,6 +87,20 @@ public interface TableDescriptor {
   Collection getCoprocessorDescriptors();
 
   /**
+   * Return the list of attached co-processor represented by their name
+   * className
+   * @return The list of co-processors classNames
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
+   *   Use {@link #getCoprocessorDescriptors()} instead
+   */
+  @Deprecated
+  default Collection getCoprocessors() {
+return getCoprocessorDescriptors().stream()
+  .map(CoprocessorDescriptor::getClassName)
+  .collect(Collectors.toList());
+  }
+
+  /**
* Returns the durability settin

hbase git commit: HBASE-20119 (addendum) revert the removed methods in TableDescriptorBuilder and TableDescriptor

2018-03-18 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 03e7b7826 -> 7ba86d056


HBASE-20119 (addendum) revert the removed methods in TableDescriptorBuilder and 
TableDescriptor

Signed-off-by: Josh Elser 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7ba86d05
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7ba86d05
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7ba86d05

Branch: refs/heads/branch-2
Commit: 7ba86d056c4923d959c8fd98bceba2f071406529
Parents: 03e7b78
Author: Chia-Ping Tsai 
Authored: Mon Mar 19 08:59:24 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Mar 19 09:08:29 2018 +0800

--
 .../apache/hadoop/hbase/HTableDescriptor.java   | 18 ++--
 .../hadoop/hbase/client/TableDescriptor.java| 16 ++-
 .../hbase/client/TableDescriptorBuilder.java| 48 +++-
 3 files changed, 65 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7ba86d05/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
index e59ea45..960b91f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java
@@ -21,16 +21,15 @@ package org.apache.hadoop.hbase;
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
-
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
+import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor;
 import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
 import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder;
-import org.apache.yetus.audience.InterfaceAudience;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
@@ -38,8 +37,7 @@ import 
org.apache.hadoop.hbase.client.TableDescriptorBuilder.ModifyableTableDesc
 import org.apache.hadoop.hbase.exceptions.DeserializationException;
 import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
-import 
org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor;
+import org.apache.yetus.audience.InterfaceAudience;
 
 /**
  * HTableDescriptor contains the details about an HBase table  such as the 
descriptors of
@@ -763,16 +761,6 @@ public class HTableDescriptor implements TableDescriptor, 
Comparable getCoprocessors() {
-return 
getCoprocessorDescriptors().stream().map(CoprocessorDescriptor::getClassName)
-  .collect(Collectors.toList());
-  }
-
-  /**
* Remove a coprocessor from those set on the table
* @param className Class name of the co-processor
*/

http://git-wip-us.apache.org/repos/asf/hbase/blob/7ba86d05/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
index 4c46a8f..9456fd4 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
@@ -24,7 +24,7 @@ import java.util.Comparator;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
-
+import java.util.stream.Collectors;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.util.Bytes;
@@ -87,6 +87,20 @@ public interface TableDescriptor {
   Collection getCoprocessorDescriptors();
 
   /**
+   * Return the list of attached co-processor represented by their name
+   * className
+   * @return The list of co-processors classNames
+   * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0.
+   *   Use {@link #getCoprocessorDescriptors()} instead
+   */
+  @Deprecated
+  default Collection getCoprocessors() {
+return getCoprocessorDescriptors().stream()
+  .map(CoprocessorDescriptor::getClassName)
+  .collect(Collectors.toList());
+  }
+
+  /**
* Returns the durability setting fo

hbase git commit: HBASE-20119 (addendum) CP_HTD_ATTR_KEY_PATTERN should be declared Private

2018-03-18 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 00095a2ef -> 3f906badb


HBASE-20119 (addendum) CP_HTD_ATTR_KEY_PATTERN should be declared Private

Signed-off-by: Josh Elser 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/3f906bad
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/3f906bad
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/3f906bad

Branch: refs/heads/master
Commit: 3f906badbed8b7bcb6a74122ca24494f8ec9afa9
Parents: 00095a2
Author: Chia-Ping Tsai 
Authored: Mon Mar 19 09:14:11 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Mar 19 09:15:22 2018 +0800

--
 .../org/apache/hadoop/hbase/client/TableDescriptorBuilder.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/3f906bad/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
index 0f5d3ad..500cfd5 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
@@ -232,7 +232,7 @@ public class TableDescriptorBuilder {
   private static final Pattern CP_HTD_ATTR_VALUE_PARAM_PATTERN = 
Pattern.compile(
 "(" + CP_HTD_ATTR_VALUE_PARAM_KEY_PATTERN + ")=(" +
   CP_HTD_ATTR_VALUE_PARAM_VALUE_PATTERN + "),?");
-  public static final Pattern CP_HTD_ATTR_KEY_PATTERN =
+  private static final Pattern CP_HTD_ATTR_KEY_PATTERN =
 Pattern.compile("^coprocessor\\$([0-9]+)$", Pattern.CASE_INSENSITIVE);
   /**
* Table descriptor for namespace table



[2/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/95596e8b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
index 44a4f57..44f736b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
@@ -21,21 +21,17 @@ package org.apache.hadoop.hbase.security.access;
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Optional;
-import java.util.regex.Matcher;
-
 import org.apache.commons.io.FilenameUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -81,10 +77,8 @@ public class CoprocessorWhitelistMasterObserver implements 
MasterCoprocessor, Ma
* "file:///usr/hbase/coprocessors" or for all
* filesystems "/usr/hbase/coprocessors")
* @return if the path was found under the wlPath
-   * @throws IOException if a failure occurs in getting the path file system
*/
-  private static boolean validatePath(Path coprocPath, Path wlPath,
-  Configuration conf) throws IOException {
+  private static boolean validatePath(Path coprocPath, Path wlPath) {
 // verify if all are allowed
 if (wlPath.toString().equals("*")) {
   return(true);
@@ -143,58 +137,26 @@ public class CoprocessorWhitelistMasterObserver 
implements MasterCoprocessor, Ma
* @param  ctx as passed in from the coprocessor
* @param  htd as passed in from the coprocessor
*/
-  private void 
verifyCoprocessors(ObserverContext ctx,
+  private static void 
verifyCoprocessors(ObserverContext ctx,
   TableDescriptor htd) throws IOException {
-
-Configuration conf = ctx.getEnvironment().getConfiguration();
-
 Collection paths =
-conf.getStringCollection(
+  ctx.getEnvironment().getConfiguration().getStringCollection(
 CP_COPROCESSOR_WHITELIST_PATHS_KEY);
-
-Collection coprocs = htd.getCoprocessors();
-for (int i = 0; i < coprocs.size(); i++) {
-
-  String coprocSpec = Bytes.toString(htd.getValue(
-  Bytes.toBytes("coprocessor$" + (i + 1;
-  if (coprocSpec == null) {
-continue;
-  }
-
-  // File path is the 1st field of the coprocessor spec
-  Matcher matcher =
-  HConstants.CP_HTD_ATTR_VALUE_PATTERN.matcher(coprocSpec);
-  if (matcher == null || !matcher.matches()) {
-continue;
-  }
-
-  String coprocPathStr = matcher.group(1).trim();
-  // Check if coprocessor is being loaded via the classpath (i.e. no file 
path)
-  if (coprocPathStr.equals("")) {
-break;
-  }
-  Path coprocPath = new Path(coprocPathStr);
-  String coprocessorClass = matcher.group(2).trim();
-
-  boolean foundPathMatch = false;
-  for (String pathStr : paths) {
-Path wlPath = new Path(pathStr);
-try {
-  foundPathMatch = validatePath(coprocPath, wlPath, conf);
-  if (foundPathMatch == true) {
+for (CoprocessorDescriptor cp : htd.getCoprocessorDescriptors()) {
+  if (cp.getJarPath().isPresent()) {
+if (paths.stream().noneMatch(p -> {
+  Path wlPath = new Path(p);
+  if (validatePath(new Path(cp.getJarPath().get()), wlPath)) {
 LOG.debug(String.format("Coprocessor %s found in directory %s",
-coprocessorClass, pathStr));
-break;
+  cp.getClassName(), p));
+return true;
   }
-} catch (IOException e) {
-  LOG.warn(String.format("Failed to validate white list path %s for 
coprocessor path %s",
-  pathStr, coprocPathStr));
+  return false;
+})) {
+  throw new IOException(String.format("Loading %s DENIED in %s",
+cp.getClassName(), CP_COPROCESSOR_WHITE

[3/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
HBASE-20119 Introduce a pojo class to carry coprocessor information in order to 
make TableDescriptorBuilder accept multiple cp at once

Signed-off-by: Ted Yu 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/95596e8b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/95596e8b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/95596e8b

Branch: refs/heads/branch-2
Commit: 95596e8ba7569d131573ebc7ab679ce9ef3fe655
Parents: 5ea0db0
Author: Chia-Ping Tsai 
Authored: Fri Mar 16 01:17:06 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Mar 16 01:26:08 2018 +0800

--
 .../archetypes/exemplars/client/HelloHBase.java |   2 +-
 .../apache/hadoop/hbase/HTableDescriptor.java   |  25 +-
 .../hbase/client/CoprocessorDescriptor.java |  51 
 .../client/CoprocessorDescriptorBuilder.java| 118 +
 .../hadoop/hbase/client/TableDescriptor.java|   7 +-
 .../hbase/client/TableDescriptorBuilder.java| 244 ++-
 .../hbase/shaded/protobuf/ProtobufUtil.java |   2 +-
 .../hbase/client/TestCoprocessorDescriptor.java | 100 
 .../client/TestTableDescriptorBuilder.java  |  82 ++-
 .../org/apache/hadoop/hbase/HConstants.java |  19 +-
 .../hbase/coprocessor/TestClassLoading.java |   2 +-
 .../hbase/coprocessor/TestSecureExport.java |   6 +-
 .../client/example/ExportEndpointExample.java   |   4 +-
 .../example/TestScanModifyingObserver.java  |   4 +-
 .../example/TestValueReplacingCompaction.java   |   4 +-
 .../TestWriteHeavyIncrementObserver.java|   4 +-
 ...IncrementObserverWithMemStoreCompaction.java |   4 +-
 .../TestZooKeeperScanPolicyObserver.java|   4 +-
 .../hbase/IntegrationTestDDLMasterFailover.java |   2 +-
 .../hbase/chaos/actions/AddColumnAction.java|   2 +-
 .../actions/TestChangeSplitPolicyAction.java|   2 +-
 .../mapreduce/IntegrationTestBulkLoad.java  |   2 +-
 .../hadoop/hbase/mttr/IntegrationTestMTTR.java  |   2 +-
 .../mapreduce/TestCellBasedImportExport2.java   |  14 +-
 .../hbase/mapreduce/TestImportExport.java   |  14 +-
 .../replication/TestVerifyReplication.java  |   2 +-
 .../hbase/rsgroup/TestRSGroupsWithACL.java  |   2 +-
 .../hadoop/hbase/coprocessor/package-info.java  |   2 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   2 +-
 .../regionserver/RegionCoprocessorHost.java |  71 +-
 .../hbase/security/access/AccessController.java |   2 +-
 .../CoprocessorWhitelistMasterObserver.java |  68 ++
 .../hbase/tool/LoadIncrementalHFiles.java   |   2 +-
 .../hadoop/hbase/util/FSTableDescriptors.java   |  46 ++--
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |   2 +-
 .../hadoop/hbase/util/RegionSplitter.java   |   2 +-
 .../hadoop/hbase/AcidGuaranteesTestBase.java|   2 +-
 .../hadoop/hbase/AcidGuaranteesTestTool.java|   2 +-
 .../hadoop/hbase/HBaseTestingUtility.java   |   6 +-
 .../org/apache/hadoop/hbase/TestZooKeeper.java  |   6 +-
 .../client/AbstractTestCIOperationTimeout.java  |   4 +-
 .../hbase/client/AbstractTestCIRpcTimeout.java  |   4 +-
 .../hadoop/hbase/client/TestAsyncAdminBase.java |   2 +-
 .../hbase/client/TestAsyncClusterAdminApi.java  |   2 +-
 .../hbase/client/TestAsyncRegionAdminApi.java   |   5 +-
 ...estAsyncReplicationAdminApiWithClusters.java |   4 +-
 .../hbase/client/TestAsyncTableAdminApi.java|  10 +-
 .../hbase/client/TestAsyncTableAdminApi2.java   |  16 +-
 .../hbase/client/TestAsyncTableAdminApi3.java   |   2 +-
 .../hbase/client/TestAsyncTableBatch.java   |   2 +-
 .../apache/hadoop/hbase/client/TestCISleep.java |   9 +-
 .../hbase/client/TestDropTimeoutRequest.java|   4 +-
 .../hbase/client/TestFromClientSide3.java   |   4 +-
 .../client/TestMalformedCellFromClient.java |   2 +-
 .../hbase/client/TestReplicaWithCluster.java|   6 +-
 .../hbase/client/TestResultFromCoprocessor.java |   4 +-
 .../hbase/client/TestServerLoadDurability.java  |   2 +-
 .../coprocessor/TestCoreRegionCoprocessor.java  |   2 +-
 .../TestPassCustomCellViaRegionObserver.java|   4 +-
 .../hbase/coprocessor/TestWALObserver.java  |   8 +-
 .../master/TestAssignmentManagerMetrics.java|  10 +-
 .../hadoop/hbase/master/TestCatalogJanitor.java |   2 +-
 .../master/assignment/MockMasterServices.java   |   2 +-
 .../assignment/TestRogueRSAssignment.java   |   2 +-
 .../TestFavoredStochasticBalancerPickers.java   |   2 +-
 .../master/cleaner/TestSnapshotFromMaster.java  |   3 +-
 .../MasterProcedureTestingUtility.java  |   2 +-
 .../procedure/TestMasterObserverPostCalls.java  |  12 +-
 .../master/procedure/TestProcedurePriority.java |   2 +-
 .../regionserver/TestCacheOnWriteInSchema.java  |   2 +-
 .../TestCompactionArchiveConcurrentClose.java   |   2 +-
 .../TestCompactionArchiveIOException.java   |   2 +-
 .../TestCompactionLifeCy

[1/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 5ea0db0b1 -> 95596e8ba


http://git-wip-us.apache.org/repos/asf/hbase/blob/95596e8b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
index 1cdb6e5..5336963 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
@@ -252,9 +252,9 @@ public abstract class AbstractTestFSWAL {
 AbstractFSWAL wal = newWAL(FS, CommonFSUtils.getWALRootDir(conf1), 
DIR.toString(),
   HConstants.HREGION_OLDLOGDIR_NAME, conf1, null, true, null, null);
 TableDescriptor t1 = 
TableDescriptorBuilder.newBuilder(TableName.valueOf("t1"))
-  .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
+  .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
 TableDescriptor t2 = 
TableDescriptorBuilder.newBuilder(TableName.valueOf("t2"))
-  .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
+  .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
 RegionInfo hri1 = RegionInfoBuilder.newBuilder(t1.getTableName()).build();
 RegionInfo hri2 = RegionInfoBuilder.newBuilder(t2.getTableName()).build();
 // add edits and roll the wal
@@ -361,7 +361,7 @@ public abstract class AbstractTestFSWAL {
 final RegionInfo hri = RegionInfoBuilder.newBuilder(tableName).build();
 final byte[] rowName = tableName.getName();
 final TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName)
-  .addColumnFamily(ColumnFamilyDescriptorBuilder.of("f")).build();
+  .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f")).build();
 HRegion r = HBaseTestingUtility.createRegionAndWAL(hri, 
TEST_UTIL.getDefaultRootDirPath(),
   TEST_UTIL.getConfiguration(), htd);
 HBaseTestingUtility.closeRegionAndWAL(r);
@@ -449,7 +449,7 @@ public abstract class AbstractTestFSWAL {
   CONF, null, true, null, null);
 wal.close();
 TableDescriptor td = 
TableDescriptorBuilder.newBuilder(TableName.valueOf("table"))
-  .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
+  .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
 RegionInfo ri = RegionInfoBuilder.newBuilder(td.getTableName()).build();
 MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
 NavigableMap scopes = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);

http://git-wip-us.apache.org/repos/asf/hbase/blob/95596e8b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
index c6059b1..610af61 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
@@ -305,7 +305,7 @@ public abstract class AbstractTestLogRolling  {
   protected Table createTestTable(String tableName) throws IOException {
 // Create the test table and open it
 TableDescriptor desc = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(getName()))
-
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build();
+
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build();
 admin.createTable(desc);
 return TEST_UTIL.getConnection().getTable(desc.getTableName());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/95596e8b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
index f5fabbc..4effa6d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
@@ -291,7 +291,7 @@ public class TestDurability {
   private HRegion createHRegion(WALFactory wals, Durability durability) throws 
IOException {
 TableName tableName = 
TableName.valueOf(name.getMethodName().replaceAll("[^A-Za-z0-9-_]", "_"));
 TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName)
- 

[2/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/4f2133ee/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
index 44a4f57..44f736b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
@@ -21,21 +21,17 @@ package org.apache.hadoop.hbase.security.access;
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Optional;
-import java.util.regex.Matcher;
-
 import org.apache.commons.io.FilenameUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -81,10 +77,8 @@ public class CoprocessorWhitelistMasterObserver implements 
MasterCoprocessor, Ma
* "file:///usr/hbase/coprocessors" or for all
* filesystems "/usr/hbase/coprocessors")
* @return if the path was found under the wlPath
-   * @throws IOException if a failure occurs in getting the path file system
*/
-  private static boolean validatePath(Path coprocPath, Path wlPath,
-  Configuration conf) throws IOException {
+  private static boolean validatePath(Path coprocPath, Path wlPath) {
 // verify if all are allowed
 if (wlPath.toString().equals("*")) {
   return(true);
@@ -143,58 +137,26 @@ public class CoprocessorWhitelistMasterObserver 
implements MasterCoprocessor, Ma
* @param  ctx as passed in from the coprocessor
* @param  htd as passed in from the coprocessor
*/
-  private void 
verifyCoprocessors(ObserverContext ctx,
+  private static void 
verifyCoprocessors(ObserverContext ctx,
   TableDescriptor htd) throws IOException {
-
-Configuration conf = ctx.getEnvironment().getConfiguration();
-
 Collection paths =
-conf.getStringCollection(
+  ctx.getEnvironment().getConfiguration().getStringCollection(
 CP_COPROCESSOR_WHITELIST_PATHS_KEY);
-
-Collection coprocs = htd.getCoprocessors();
-for (int i = 0; i < coprocs.size(); i++) {
-
-  String coprocSpec = Bytes.toString(htd.getValue(
-  Bytes.toBytes("coprocessor$" + (i + 1;
-  if (coprocSpec == null) {
-continue;
-  }
-
-  // File path is the 1st field of the coprocessor spec
-  Matcher matcher =
-  HConstants.CP_HTD_ATTR_VALUE_PATTERN.matcher(coprocSpec);
-  if (matcher == null || !matcher.matches()) {
-continue;
-  }
-
-  String coprocPathStr = matcher.group(1).trim();
-  // Check if coprocessor is being loaded via the classpath (i.e. no file 
path)
-  if (coprocPathStr.equals("")) {
-break;
-  }
-  Path coprocPath = new Path(coprocPathStr);
-  String coprocessorClass = matcher.group(2).trim();
-
-  boolean foundPathMatch = false;
-  for (String pathStr : paths) {
-Path wlPath = new Path(pathStr);
-try {
-  foundPathMatch = validatePath(coprocPath, wlPath, conf);
-  if (foundPathMatch == true) {
+for (CoprocessorDescriptor cp : htd.getCoprocessorDescriptors()) {
+  if (cp.getJarPath().isPresent()) {
+if (paths.stream().noneMatch(p -> {
+  Path wlPath = new Path(p);
+  if (validatePath(new Path(cp.getJarPath().get()), wlPath)) {
 LOG.debug(String.format("Coprocessor %s found in directory %s",
-coprocessorClass, pathStr));
-break;
+  cp.getClassName(), p));
+return true;
   }
-} catch (IOException e) {
-  LOG.warn(String.format("Failed to validate white list path %s for 
coprocessor path %s",
-  pathStr, coprocPathStr));
+  return false;
+})) {
+  throw new IOException(String.format("Loading %s DENIED in %s",
+cp.getClassName(), CP_COPROCESSOR_WHITE

[1/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 31da4d0bc -> 4f2133ee3


http://git-wip-us.apache.org/repos/asf/hbase/blob/4f2133ee/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
index 7ea879e..e5a4f0c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/compactions/TestFIFOCompactionPolicy.java
@@ -93,7 +93,7 @@ public class TestFIFOCompactionPolicy {
   FIFOCompactionPolicy.class.getName())
 .setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
   DisabledRegionSplitPolicy.class.getName())
-
.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1).build())
+
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1).build())
 .build();
 admin.createTable(desc);
 Table table = TEST_UTIL.getConnection().getTable(tableName);
@@ -155,7 +155,7 @@ public class TestFIFOCompactionPolicy {
   FIFOCompactionPolicy.class.getName())
 .setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
   DisabledRegionSplitPolicy.class.getName())
-.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build();
+.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build();
 TEST_UTIL.getAdmin().createTable(desc);
   }
 
@@ -169,7 +169,7 @@ public class TestFIFOCompactionPolicy {
   FIFOCompactionPolicy.class.getName())
 .setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
   DisabledRegionSplitPolicy.class.getName())
-
.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1)
+
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1)
 .setMinVersions(1).build())
 .build();
 TEST_UTIL.getAdmin().createTable(desc);
@@ -187,7 +187,7 @@ public class TestFIFOCompactionPolicy {
 .setValue(HConstants.HBASE_REGION_SPLIT_POLICY_KEY,
   DisabledRegionSplitPolicy.class.getName())
 .setValue(HStore.BLOCKING_STOREFILES_KEY, "10")
-
.addColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1).build())
+
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(family).setTimeToLive(1).build())
 .build();
 TEST_UTIL.getAdmin().createTable(desc);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/4f2133ee/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java
index a7cdfa5..5319d30 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestCompactionWithThroughputController.java
@@ -203,7 +203,7 @@ public class TestCompactionWithThroughputController {
 try {
   TEST_UTIL.getAdmin()
   .createTable(TableDescriptorBuilder.newBuilder(tableName)
-  
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false)
+  
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false)
   .build());
   TEST_UTIL.waitTableAvailable(tableName);
   HRegionServer regionServer = 
TEST_UTIL.getRSForFirstRegionInTable(tableName);
@@ -260,7 +260,7 @@ public class TestCompactionWithThroughputController {
 try {
   TEST_UTIL.getAdmin()
   .createTable(TableDescriptorBuilder.newBuilder(tableName)
-  
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false)
+  
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).setCompactionEnabled(false)
   .build());
   TEST_UTIL.waitTableAvailable(tableName);
   HStore store = getStoreWithName(tableName);

http://git-wip-us.apache.org/repos/asf/hbase/blob/4f2133ee/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/throttle/TestFlushWithThroughputController.ja

[3/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
HBASE-20119 Introduce a pojo class to carry coprocessor information in order to 
make TableDescriptorBuilder accept multiple cp at once

Signed-off-by: Ted Yu 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4f2133ee
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4f2133ee
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4f2133ee

Branch: refs/heads/master
Commit: 4f2133ee328890e1200b8618b5eac2bb8322f5e7
Parents: 31da4d0
Author: Chia-Ping Tsai 
Authored: Thu Mar 15 09:49:02 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Mar 16 01:21:38 2018 +0800

--
 .../archetypes/exemplars/client/HelloHBase.java |   2 +-
 .../hbase/backup/impl/BackupSystemTable.java|   8 +-
 .../hadoop/hbase/backup/util/RestoreTool.java   |   2 +-
 .../apache/hadoop/hbase/HTableDescriptor.java   |  25 +-
 .../hbase/client/CoprocessorDescriptor.java |  51 
 .../client/CoprocessorDescriptorBuilder.java| 118 +
 .../hadoop/hbase/client/TableDescriptor.java|   7 +-
 .../hbase/client/TableDescriptorBuilder.java| 244 ++-
 .../hbase/shaded/protobuf/ProtobufUtil.java |   2 +-
 .../hbase/client/TestCoprocessorDescriptor.java | 100 
 .../client/TestTableDescriptorBuilder.java  |  82 ++-
 .../org/apache/hadoop/hbase/HConstants.java |  19 +-
 .../hbase/coprocessor/TestClassLoading.java |   2 +-
 .../hbase/coprocessor/TestSecureExport.java |   6 +-
 .../client/example/ExportEndpointExample.java   |   4 +-
 .../example/TestScanModifyingObserver.java  |   4 +-
 .../example/TestValueReplacingCompaction.java   |   4 +-
 .../TestWriteHeavyIncrementObserver.java|   4 +-
 ...IncrementObserverWithMemStoreCompaction.java |   4 +-
 .../TestZooKeeperScanPolicyObserver.java|   4 +-
 .../hbase/IntegrationTestDDLMasterFailover.java |   2 +-
 .../hbase/chaos/actions/AddColumnAction.java|   2 +-
 .../actions/TestChangeSplitPolicyAction.java|   2 +-
 .../mapreduce/IntegrationTestBulkLoad.java  |   2 +-
 .../hadoop/hbase/mttr/IntegrationTestMTTR.java  |   2 +-
 .../hbase/mapreduce/TestImportExport.java   |  14 +-
 .../replication/TestVerifyReplication.java  |   2 +-
 .../TableReplicationStorageBase.java|  12 +-
 .../hbase/rsgroup/TestRSGroupsWithACL.java  |   2 +-
 .../hadoop/hbase/coprocessor/package-info.java  |   2 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   2 +-
 .../regionserver/RegionCoprocessorHost.java |  71 +-
 .../hbase/security/access/AccessController.java |   2 +-
 .../CoprocessorWhitelistMasterObserver.java |  68 ++
 .../hbase/tool/LoadIncrementalHFiles.java   |   2 +-
 .../hadoop/hbase/util/FSTableDescriptors.java   |  61 ++---
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |   2 +-
 .../hadoop/hbase/util/RegionSplitter.java   |   2 +-
 .../hadoop/hbase/AcidGuaranteesTestBase.java|   2 +-
 .../hadoop/hbase/AcidGuaranteesTestTool.java|   2 +-
 .../hadoop/hbase/HBaseTestingUtility.java   |   6 +-
 .../org/apache/hadoop/hbase/TestZooKeeper.java  |   6 +-
 .../client/AbstractTestCIOperationTimeout.java  |   4 +-
 .../hbase/client/AbstractTestCIRpcTimeout.java  |   4 +-
 .../hadoop/hbase/client/TestAsyncAdminBase.java |   2 +-
 .../hbase/client/TestAsyncClusterAdminApi.java  |   2 +-
 .../hbase/client/TestAsyncRegionAdminApi.java   |   5 +-
 ...estAsyncReplicationAdminApiWithClusters.java |   4 +-
 .../hbase/client/TestAsyncTableAdminApi.java|  10 +-
 .../hbase/client/TestAsyncTableAdminApi2.java   |  16 +-
 .../hbase/client/TestAsyncTableAdminApi3.java   |   2 +-
 .../hbase/client/TestAsyncTableBatch.java   |   2 +-
 .../apache/hadoop/hbase/client/TestCISleep.java |   9 +-
 .../hbase/client/TestDropTimeoutRequest.java|   4 +-
 .../hbase/client/TestFromClientSide3.java   |   4 +-
 .../client/TestMalformedCellFromClient.java |   2 +-
 .../hbase/client/TestReplicaWithCluster.java|   6 +-
 .../hbase/client/TestResultFromCoprocessor.java |   4 +-
 .../hbase/client/TestServerLoadDurability.java  |   2 +-
 .../coprocessor/TestCoreRegionCoprocessor.java  |   2 +-
 .../TestPassCustomCellViaRegionObserver.java|   4 +-
 .../hbase/coprocessor/TestWALObserver.java  |   8 +-
 .../master/TestAssignmentManagerMetrics.java|  10 +-
 .../hadoop/hbase/master/TestCatalogJanitor.java |   2 +-
 .../master/assignment/MockMasterServices.java   |   2 +-
 .../assignment/TestRogueRSAssignment.java   |   2 +-
 .../TestFavoredStochasticBalancerPickers.java   |   2 +-
 .../master/cleaner/TestSnapshotFromMaster.java  |   3 +-
 .../MasterProcedureTestingUtility.java  |   2 +-
 .../procedure/TestMasterObserverPostCalls.java  |  12 +-
 .../master/procedure/TestProcedurePriority.java |   2 +-
 .../hbase/quotas/SpaceQuotaHelperForTests.java  |   4 +-
 .../quotas/TestFileArchiv

[3/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
HBASE-20119 Introduce a pojo class to carry coprocessor information in order to 
make TableDescriptorBuilder accept multiple cp at once

Signed-off-by: Ted Yu 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/cb42585a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/cb42585a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/cb42585a

Branch: refs/heads/branch-2.0
Commit: cb42585a1104e121d0faf9b324a07487807c0095
Parents: 907ec67
Author: Chia-Ping Tsai 
Authored: Fri Mar 16 01:16:16 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Mar 16 01:19:03 2018 +0800

--
 .../archetypes/exemplars/client/HelloHBase.java |   2 +-
 .../apache/hadoop/hbase/HTableDescriptor.java   |  25 +-
 .../hbase/client/CoprocessorDescriptor.java |  51 
 .../client/CoprocessorDescriptorBuilder.java| 118 +
 .../hadoop/hbase/client/TableDescriptor.java|   7 +-
 .../hbase/client/TableDescriptorBuilder.java| 244 ++-
 .../hbase/shaded/protobuf/ProtobufUtil.java |   2 +-
 .../hbase/client/TestCoprocessorDescriptor.java | 100 
 .../client/TestTableDescriptorBuilder.java  |  82 ++-
 .../org/apache/hadoop/hbase/HConstants.java |  19 +-
 .../hbase/coprocessor/TestClassLoading.java |   2 +-
 .../hbase/coprocessor/TestSecureExport.java |   6 +-
 .../client/example/ExportEndpointExample.java   |   4 +-
 .../example/TestScanModifyingObserver.java  |   4 +-
 .../example/TestValueReplacingCompaction.java   |   4 +-
 .../TestWriteHeavyIncrementObserver.java|   4 +-
 ...IncrementObserverWithMemStoreCompaction.java |   4 +-
 .../TestZooKeeperScanPolicyObserver.java|   4 +-
 .../hbase/IntegrationTestDDLMasterFailover.java |   2 +-
 .../hbase/chaos/actions/AddColumnAction.java|   2 +-
 .../actions/TestChangeSplitPolicyAction.java|   2 +-
 .../mapreduce/IntegrationTestBulkLoad.java  |   2 +-
 .../hadoop/hbase/mttr/IntegrationTestMTTR.java  |   2 +-
 .../mapreduce/TestCellBasedImportExport2.java   |  14 +-
 .../hbase/mapreduce/TestImportExport.java   |  14 +-
 .../replication/TestVerifyReplication.java  |   2 +-
 .../hbase/rsgroup/TestRSGroupsWithACL.java  |   2 +-
 .../hadoop/hbase/coprocessor/package-info.java  |   2 +-
 .../org/apache/hadoop/hbase/master/HMaster.java |   2 +-
 .../regionserver/RegionCoprocessorHost.java |  71 +-
 .../hbase/security/access/AccessController.java |   2 +-
 .../CoprocessorWhitelistMasterObserver.java |  68 ++
 .../hbase/tool/LoadIncrementalHFiles.java   |   2 +-
 .../hadoop/hbase/util/FSTableDescriptors.java   |  46 ++--
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |   2 +-
 .../hadoop/hbase/util/RegionSplitter.java   |   2 +-
 .../hadoop/hbase/AcidGuaranteesTestBase.java|   2 +-
 .../hadoop/hbase/AcidGuaranteesTestTool.java|   2 +-
 .../hadoop/hbase/HBaseTestingUtility.java   |   6 +-
 .../org/apache/hadoop/hbase/TestZooKeeper.java  |   6 +-
 .../client/AbstractTestCIOperationTimeout.java  |   4 +-
 .../hbase/client/AbstractTestCIRpcTimeout.java  |   4 +-
 .../hadoop/hbase/client/TestAsyncAdminBase.java |   2 +-
 .../hbase/client/TestAsyncClusterAdminApi.java  |   2 +-
 .../hbase/client/TestAsyncRegionAdminApi.java   |   5 +-
 ...estAsyncReplicationAdminApiWithClusters.java |   4 +-
 .../hbase/client/TestAsyncTableAdminApi.java|  10 +-
 .../hbase/client/TestAsyncTableAdminApi2.java   |  16 +-
 .../hbase/client/TestAsyncTableAdminApi3.java   |   2 +-
 .../hbase/client/TestAsyncTableBatch.java   |   2 +-
 .../apache/hadoop/hbase/client/TestCISleep.java |   9 +-
 .../hbase/client/TestDropTimeoutRequest.java|   4 +-
 .../hbase/client/TestFromClientSide3.java   |   4 +-
 .../client/TestMalformedCellFromClient.java |   2 +-
 .../hbase/client/TestReplicaWithCluster.java|   6 +-
 .../hbase/client/TestResultFromCoprocessor.java |   4 +-
 .../hbase/client/TestServerLoadDurability.java  |   2 +-
 .../coprocessor/TestCoreRegionCoprocessor.java  |   2 +-
 .../TestPassCustomCellViaRegionObserver.java|   4 +-
 .../hbase/coprocessor/TestWALObserver.java  |   8 +-
 .../master/TestAssignmentManagerMetrics.java|  10 +-
 .../hadoop/hbase/master/TestCatalogJanitor.java |   2 +-
 .../master/assignment/MockMasterServices.java   |   2 +-
 .../assignment/TestRogueRSAssignment.java   |   2 +-
 .../TestFavoredStochasticBalancerPickers.java   |   2 +-
 .../master/cleaner/TestSnapshotFromMaster.java  |   3 +-
 .../MasterProcedureTestingUtility.java  |   2 +-
 .../procedure/TestMasterObserverPostCalls.java  |  12 +-
 .../master/procedure/TestProcedurePriority.java |   2 +-
 .../regionserver/TestCacheOnWriteInSchema.java  |   2 +-
 .../TestCompactionArchiveConcurrentClose.java   |   2 +-
 .../TestCompactionArchiveIOException.java   |   2 +-
 .../TestCompactionLife

[1/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2.0 907ec6775 -> cb42585a1


http://git-wip-us.apache.org/repos/asf/hbase/blob/cb42585a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
index 1cdb6e5..5336963 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestFSWAL.java
@@ -252,9 +252,9 @@ public abstract class AbstractTestFSWAL {
 AbstractFSWAL wal = newWAL(FS, CommonFSUtils.getWALRootDir(conf1), 
DIR.toString(),
   HConstants.HREGION_OLDLOGDIR_NAME, conf1, null, true, null, null);
 TableDescriptor t1 = 
TableDescriptorBuilder.newBuilder(TableName.valueOf("t1"))
-  .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
+  .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
 TableDescriptor t2 = 
TableDescriptorBuilder.newBuilder(TableName.valueOf("t2"))
-  .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
+  .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
 RegionInfo hri1 = RegionInfoBuilder.newBuilder(t1.getTableName()).build();
 RegionInfo hri2 = RegionInfoBuilder.newBuilder(t2.getTableName()).build();
 // add edits and roll the wal
@@ -361,7 +361,7 @@ public abstract class AbstractTestFSWAL {
 final RegionInfo hri = RegionInfoBuilder.newBuilder(tableName).build();
 final byte[] rowName = tableName.getName();
 final TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName)
-  .addColumnFamily(ColumnFamilyDescriptorBuilder.of("f")).build();
+  .setColumnFamily(ColumnFamilyDescriptorBuilder.of("f")).build();
 HRegion r = HBaseTestingUtility.createRegionAndWAL(hri, 
TEST_UTIL.getDefaultRootDirPath(),
   TEST_UTIL.getConfiguration(), htd);
 HBaseTestingUtility.closeRegionAndWAL(r);
@@ -449,7 +449,7 @@ public abstract class AbstractTestFSWAL {
   CONF, null, true, null, null);
 wal.close();
 TableDescriptor td = 
TableDescriptorBuilder.newBuilder(TableName.valueOf("table"))
-  .addColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
+  .setColumnFamily(ColumnFamilyDescriptorBuilder.of("row")).build();
 RegionInfo ri = RegionInfoBuilder.newBuilder(td.getTableName()).build();
 MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
 NavigableMap scopes = new 
TreeMap<>(Bytes.BYTES_COMPARATOR);

http://git-wip-us.apache.org/repos/asf/hbase/blob/cb42585a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
index c6059b1..610af61 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/AbstractTestLogRolling.java
@@ -305,7 +305,7 @@ public abstract class AbstractTestLogRolling  {
   protected Table createTestTable(String tableName) throws IOException {
 // Create the test table and open it
 TableDescriptor desc = 
TableDescriptorBuilder.newBuilder(TableName.valueOf(getName()))
-
.addColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build();
+
.setColumnFamily(ColumnFamilyDescriptorBuilder.of(HConstants.CATALOG_FAMILY)).build();
 admin.createTable(desc);
 return TEST_UTIL.getConnection().getTable(desc.getTableName());
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/cb42585a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
index f5fabbc..4effa6d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestDurability.java
@@ -291,7 +291,7 @@ public class TestDurability {
   private HRegion createHRegion(WALFactory wals, Durability durability) throws 
IOException {
 TableName tableName = 
TableName.valueOf(name.getMethodName().replaceAll("[^A-Za-z0-9-_]", "_"));
 TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName)

[2/3] hbase git commit: HBASE-20119 Introduce a pojo class to carry coprocessor information in order to make TableDescriptorBuilder accept multiple cp at once

2018-03-15 Thread chia7712
http://git-wip-us.apache.org/repos/asf/hbase/blob/cb42585a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
index 44a4f57..44f736b 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/CoprocessorWhitelistMasterObserver.java
@@ -21,21 +21,17 @@ package org.apache.hadoop.hbase.security.access;
 import java.io.IOException;
 import java.util.Collection;
 import java.util.Optional;
-import java.util.regex.Matcher;
-
 import org.apache.commons.io.FilenameUtils;
-import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseInterfaceAudience;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
 import org.apache.hadoop.hbase.client.RegionInfo;
 import org.apache.hadoop.hbase.client.TableDescriptor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
 import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
 import org.apache.hadoop.hbase.coprocessor.MasterObserver;
 import org.apache.hadoop.hbase.coprocessor.ObserverContext;
-import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.yetus.audience.InterfaceAudience;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -81,10 +77,8 @@ public class CoprocessorWhitelistMasterObserver implements 
MasterCoprocessor, Ma
* "file:///usr/hbase/coprocessors" or for all
* filesystems "/usr/hbase/coprocessors")
* @return if the path was found under the wlPath
-   * @throws IOException if a failure occurs in getting the path file system
*/
-  private static boolean validatePath(Path coprocPath, Path wlPath,
-  Configuration conf) throws IOException {
+  private static boolean validatePath(Path coprocPath, Path wlPath) {
 // verify if all are allowed
 if (wlPath.toString().equals("*")) {
   return(true);
@@ -143,58 +137,26 @@ public class CoprocessorWhitelistMasterObserver 
implements MasterCoprocessor, Ma
* @param  ctx as passed in from the coprocessor
* @param  htd as passed in from the coprocessor
*/
-  private void 
verifyCoprocessors(ObserverContext ctx,
+  private static void 
verifyCoprocessors(ObserverContext ctx,
   TableDescriptor htd) throws IOException {
-
-Configuration conf = ctx.getEnvironment().getConfiguration();
-
 Collection paths =
-conf.getStringCollection(
+  ctx.getEnvironment().getConfiguration().getStringCollection(
 CP_COPROCESSOR_WHITELIST_PATHS_KEY);
-
-Collection coprocs = htd.getCoprocessors();
-for (int i = 0; i < coprocs.size(); i++) {
-
-  String coprocSpec = Bytes.toString(htd.getValue(
-  Bytes.toBytes("coprocessor$" + (i + 1;
-  if (coprocSpec == null) {
-continue;
-  }
-
-  // File path is the 1st field of the coprocessor spec
-  Matcher matcher =
-  HConstants.CP_HTD_ATTR_VALUE_PATTERN.matcher(coprocSpec);
-  if (matcher == null || !matcher.matches()) {
-continue;
-  }
-
-  String coprocPathStr = matcher.group(1).trim();
-  // Check if coprocessor is being loaded via the classpath (i.e. no file 
path)
-  if (coprocPathStr.equals("")) {
-break;
-  }
-  Path coprocPath = new Path(coprocPathStr);
-  String coprocessorClass = matcher.group(2).trim();
-
-  boolean foundPathMatch = false;
-  for (String pathStr : paths) {
-Path wlPath = new Path(pathStr);
-try {
-  foundPathMatch = validatePath(coprocPath, wlPath, conf);
-  if (foundPathMatch == true) {
+for (CoprocessorDescriptor cp : htd.getCoprocessorDescriptors()) {
+  if (cp.getJarPath().isPresent()) {
+if (paths.stream().noneMatch(p -> {
+  Path wlPath = new Path(p);
+  if (validatePath(new Path(cp.getJarPath().get()), wlPath)) {
 LOG.debug(String.format("Coprocessor %s found in directory %s",
-coprocessorClass, pathStr));
-break;
+  cp.getClassName(), p));
+return true;
   }
-} catch (IOException e) {
-  LOG.warn(String.format("Failed to validate white list path %s for 
coprocessor path %s",
-  pathStr, coprocPathStr));
+  return false;
+})) {
+  throw new IOException(String.format("Loading %s DENIED in %s",
+cp.getClassName(), CP_COPROCESSOR_WHITE

hbase git commit: HBASE-20058 improper quoting in presplitting command docs

2018-03-12 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master e8ee5c060 -> ec8aaeded


HBASE-20058 improper quoting in presplitting command docs

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ec8aaede
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ec8aaede
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ec8aaede

Branch: refs/heads/master
Commit: ec8aaeded846eaa37753e8eab43f665b92bd1e52
Parents: e8ee5c0
Author: maoling 
Authored: Sat Feb 24 11:59:05 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Mar 12 23:33:59 2018 +0800

--
 src/main/asciidoc/_chapters/shell.adoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ec8aaede/src/main/asciidoc/_chapters/shell.adoc
--
diff --git a/src/main/asciidoc/_chapters/shell.adoc 
b/src/main/asciidoc/_chapters/shell.adoc
index 1e51a20..522f482 100644
--- a/src/main/asciidoc/_chapters/shell.adoc
+++ b/src/main/asciidoc/_chapters/shell.adoc
@@ -377,7 +377,7 @@ This will continue for all split points up to the last. The 
last region will be
 
 [source]
 
-hbase>create 't1','f',SPLITS => ['10','20',30']
+hbase>create 't1','f',SPLITS => ['10','20','30']
 
 
 In the above example, the table 't1' will be created with column family 'f', 
pre-split to four regions. Note the first region will contain all keys from 
'\x00' up to '\x30' (as '\x31' is the ASCII code for '1').



hbase git commit: HBASE-20058 improper quoting in presplitting command docs

2018-03-12 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 fac3b8aad -> 011809517


HBASE-20058 improper quoting in presplitting command docs

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/01180951
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/01180951
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/01180951

Branch: refs/heads/branch-2
Commit: 011809517a554f0bcef2bad87c41a3f17b0147df
Parents: fac3b8a
Author: maoling 
Authored: Sat Feb 24 11:59:05 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Mar 12 23:46:37 2018 +0800

--
 src/main/asciidoc/_chapters/shell.adoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/01180951/src/main/asciidoc/_chapters/shell.adoc
--
diff --git a/src/main/asciidoc/_chapters/shell.adoc 
b/src/main/asciidoc/_chapters/shell.adoc
index 1e51a20..522f482 100644
--- a/src/main/asciidoc/_chapters/shell.adoc
+++ b/src/main/asciidoc/_chapters/shell.adoc
@@ -377,7 +377,7 @@ This will continue for all split points up to the last. The 
last region will be
 
 [source]
 
-hbase>create 't1','f',SPLITS => ['10','20',30']
+hbase>create 't1','f',SPLITS => ['10','20','30']
 
 
 In the above example, the table 't1' will be created with column family 'f', 
pre-split to four regions. Note the first region will contain all keys from 
'\x00' up to '\x30' (as '\x31' is the ASCII code for '1').



hbase git commit: HBASE-20047 AuthenticationTokenIdentifier should provide a toString

2018-03-12 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 6060d3ba5 -> 62fc7fd3e


HBASE-20047 AuthenticationTokenIdentifier should provide a toString

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/62fc7fd3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/62fc7fd3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/62fc7fd3

Branch: refs/heads/master
Commit: 62fc7fd3ea080a16c4019e7c67c8629cc9088e19
Parents: 6060d3b
Author: maoling 
Authored: Mon Mar 12 22:01:16 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Mar 12 22:06:08 2018 +0800

--
 .../hbase/security/token/AuthenticationTokenIdentifier.java   | 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/62fc7fd3/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
index 13655d5..1e4a529 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
@@ -187,4 +187,11 @@ public class AuthenticationTokenIdentifier extends 
TokenIdentifier {
   public int hashCode() {
 return (int)sequenceNumber;
   }
+
+  @Override
+  public String toString() {
+return "(username=" + username + ", keyId="
++ keyId + ", issueDate=" + issueDate
++ ", expirationDate=" + expirationDate + ", sequenceNumber=" + 
sequenceNumber + ")";
+  }
 }



hbase git commit: HBASE-20047 AuthenticationTokenIdentifier should provide a toString

2018-03-12 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 842678c99 -> fac3b8aad


HBASE-20047 AuthenticationTokenIdentifier should provide a toString

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/fac3b8aa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/fac3b8aa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/fac3b8aa

Branch: refs/heads/branch-2
Commit: fac3b8aad6cfbaeb16621318d8bf50b59ef961b9
Parents: 842678c
Author: maoling 
Authored: Mon Mar 12 22:01:16 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Mar 12 22:16:22 2018 +0800

--
 .../hbase/security/token/AuthenticationTokenIdentifier.java   | 7 +++
 1 file changed, 7 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/fac3b8aa/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
index 13655d5..1e4a529 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
@@ -187,4 +187,11 @@ public class AuthenticationTokenIdentifier extends 
TokenIdentifier {
   public int hashCode() {
 return (int)sequenceNumber;
   }
+
+  @Override
+  public String toString() {
+return "(username=" + username + ", keyId="
++ keyId + ", issueDate=" + issueDate
++ ", expirationDate=" + expirationDate + ", sequenceNumber=" + 
sequenceNumber + ")";
+  }
 }



hbase git commit: HBASE-20120 Removed unused classes/ java files from hbase-server

2018-03-09 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 70240f973 -> 45bbee490


HBASE-20120 Removed unused classes/ java files from hbase-server

deleted:
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
deleted:
hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
deleted:
hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
deleted:
hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java
Signed-off-by: Sean Busbey 
Signed-off-by: tedyu 
Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/45bbee49
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/45bbee49
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/45bbee49

Branch: refs/heads/master
Commit: 45bbee4905fb031259c935502444c729b32ddac8
Parents: 70240f9
Author: Umesh Agashe 
Authored: Tue Feb 6 16:09:43 2018 -0800
Committer: Chia-Ping Tsai 
Committed: Sat Mar 10 13:08:38 2018 +0800

--
 .../hbase/regionserver/NoOpHeapMemoryTuner.java |  48 -
 .../hbase/replication/BaseWALEntryFilter.java   |  29 --
 .../apache/hadoop/hbase/util/FSMapRUtils.java   |  46 
 .../org/apache/hadoop/hbase/util/ProtoUtil.java | 104 ---
 4 files changed, 227 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/45bbee49/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
deleted file mode 100644
index 24cf85c..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.regionserver;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext;
-import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerResult;
-
-/**
- * An implementation of HeapMemoryTuner which is not doing any tuning activity 
but just allows to
- * continue with old style fixed proportions.
- */
-@InterfaceAudience.Private
-public class NoOpHeapMemoryTuner implements HeapMemoryTuner {
-  
-  private static final TunerResult NO_OP_TUNER_RESULT = new TunerResult(false);
-
-  @Override
-  public Configuration getConf() {
-return null;
-  }
-
-  @Override
-  public void setConf(Configuration conf) {
-
-  }
-
-  @Override
-  public TunerResult tune(TunerContext context) {
-return NO_OP_TUNER_RESULT;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/45bbee49/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
deleted file mode 100644
index cd062a3..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under th

hbase git commit: HBASE-20120 Removed unused classes/ java files from hbase-server

2018-03-09 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 1728b48c8 -> 842678c99


HBASE-20120 Removed unused classes/ java files from hbase-server

deleted:
hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
deleted:
hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
deleted:
hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSMapRUtils.java
deleted:
hbase-server/src/main/java/org/apache/hadoop/hbase/util/ProtoUtil.java
Signed-off-by: Sean Busbey 
Signed-off-by: tedyu 
Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/842678c9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/842678c9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/842678c9

Branch: refs/heads/branch-2
Commit: 842678c9914dfe64fc762fee552c8216fd44f903
Parents: 1728b48
Author: Umesh Agashe 
Authored: Tue Feb 6 16:09:43 2018 -0800
Committer: Chia-Ping Tsai 
Committed: Sat Mar 10 12:59:48 2018 +0800

--
 .../hbase/regionserver/NoOpHeapMemoryTuner.java |  48 -
 .../hbase/replication/BaseWALEntryFilter.java   |  29 --
 .../apache/hadoop/hbase/util/FSMapRUtils.java   |  46 
 .../org/apache/hadoop/hbase/util/ProtoUtil.java | 104 ---
 4 files changed, 227 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/842678c9/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
deleted file mode 100644
index 24cf85c..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/NoOpHeapMemoryTuner.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.regionserver;
-
-import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerContext;
-import org.apache.hadoop.hbase.regionserver.HeapMemoryManager.TunerResult;
-
-/**
- * An implementation of HeapMemoryTuner which is not doing any tuning activity 
but just allows to
- * continue with old style fixed proportions.
- */
-@InterfaceAudience.Private
-public class NoOpHeapMemoryTuner implements HeapMemoryTuner {
-  
-  private static final TunerResult NO_OP_TUNER_RESULT = new TunerResult(false);
-
-  @Override
-  public Configuration getConf() {
-return null;
-  }
-
-  @Override
-  public void setConf(Configuration conf) {
-
-  }
-
-  @Override
-  public TunerResult tune(TunerContext context) {
-return NO_OP_TUNER_RESULT;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hbase/blob/842678c9/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
deleted file mode 100644
index cd062a3..000
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/replication/BaseWALEntryFilter.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed unde

hbase git commit: HBASE-20132 Change the "KV" to "Cell" for web UI

2018-03-09 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 b87165cbf -> 1728b48c8


HBASE-20132 Change the "KV" to "Cell" for web UI

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1728b48c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1728b48c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1728b48c

Branch: refs/heads/branch-2
Commit: 1728b48c8238a4d0372fca01ad733ce703c65874
Parents: b87165c
Author: Guangxu Cheng 
Authored: Mon Mar 5 11:20:25 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Mar 10 12:36:29 2018 +0800

--
 .../hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon  | 12 ++--
 .../hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon |  6 +++---
 .../hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon |  4 ++--
 3 files changed, 11 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/1728b48c/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
index 9a0e369..41fe487 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
@@ -310,9 +310,9 @@ if (master.getServerManager() != null) {
 
 
 RSGroup Name
-Num. Compacting KVs
-Num. Compacted KVs
-Remaining KVs
+Num. Compacting Cells
+Num. Compacted Cells
+Remaining Cells
 Compaction Progress
 
 <%java>
@@ -321,7 +321,7 @@ if (master.getServerManager() != null) {
   int numStores = 0;
   long totalCompactingCells = 0;
   long totalCompactedCells = 0;
-  long remainingKVs = 0;
+  long remainingCells = 0;
   long compactionProgress  = 0;
   for (Address server : rsGroupInfo.getServers()) {
 ServerMetrics sl = collectServers.get(server);
@@ -332,7 +332,7 @@ if (master.getServerManager() != null) {
   }
 }
   }
-  remainingKVs = totalCompactingCells - totalCompactedCells;
+  remainingCells = totalCompactingCells - totalCompactedCells;
   String percentDone = "";
   if  (totalCompactingCells > 0) {
percentDone = String.format("%.2f", 100 *
@@ -343,7 +343,7 @@ if (master.getServerManager() != null) {
 <& rsGroupLink; rsGroupName=rsGroupName; &>
 <% totalCompactingCells %>
 <% totalCompactedCells %>
-<% remainingKVs %>
+<% remainingCells %>
 <% percentDone %>
 
 <%java>

http://git-wip-us.apache.org/repos/asf/hbase/blob/1728b48c/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
index fb7dd54..f353d32 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
@@ -289,9 +289,9 @@ if (sl != null) {
 
 
 ServerName
-Num. Compacting KVs
-Num. Compacted KVs
-Remaining KVs
+Num. Compacting Cells
+Num. Compacted Cells
+Remaining Cells
 Compaction Progress
 
 <%java>

http://git-wip-us.apache.org/repos/asf/hbase/blob/1728b48c/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
index d16ce06..6a01533 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
@@ -196,8 +196,8 @@
 
 
 Region Name
-Num. Compacting KVs
-Num. Compacted KVs
+Num. Compacting Cells
+Num. Compacted Cells
 Compaction Progress
 Last Major Compaction
 



hbase git commit: HBASE-20132 Change the "KV" to "Cell" for web UI

2018-03-09 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master dd6f4525e -> 70240f973


HBASE-20132 Change the "KV" to "Cell" for web UI

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/70240f97
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/70240f97
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/70240f97

Branch: refs/heads/master
Commit: 70240f9732da11484b47eb720a39c1cc8bb1a4e4
Parents: dd6f452
Author: Guangxu Cheng 
Authored: Mon Mar 5 11:20:25 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Mar 10 12:27:59 2018 +0800

--
 .../hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon  | 12 ++--
 .../hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon |  6 +++---
 .../hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon |  4 ++--
 3 files changed, 11 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/70240f97/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
index 9a0e369..41fe487 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RSGroupListTmpl.jamon
@@ -310,9 +310,9 @@ if (master.getServerManager() != null) {
 
 
 RSGroup Name
-Num. Compacting KVs
-Num. Compacted KVs
-Remaining KVs
+Num. Compacting Cells
+Num. Compacted Cells
+Remaining Cells
 Compaction Progress
 
 <%java>
@@ -321,7 +321,7 @@ if (master.getServerManager() != null) {
   int numStores = 0;
   long totalCompactingCells = 0;
   long totalCompactedCells = 0;
-  long remainingKVs = 0;
+  long remainingCells = 0;
   long compactionProgress  = 0;
   for (Address server : rsGroupInfo.getServers()) {
 ServerMetrics sl = collectServers.get(server);
@@ -332,7 +332,7 @@ if (master.getServerManager() != null) {
   }
 }
   }
-  remainingKVs = totalCompactingCells - totalCompactedCells;
+  remainingCells = totalCompactingCells - totalCompactedCells;
   String percentDone = "";
   if  (totalCompactingCells > 0) {
percentDone = String.format("%.2f", 100 *
@@ -343,7 +343,7 @@ if (master.getServerManager() != null) {
 <& rsGroupLink; rsGroupName=rsGroupName; &>
 <% totalCompactingCells %>
 <% totalCompactedCells %>
-<% remainingKVs %>
+<% remainingCells %>
 <% percentDone %>
 
 <%java>

http://git-wip-us.apache.org/repos/asf/hbase/blob/70240f97/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
index fb7dd54..f353d32 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/RegionServerListTmpl.jamon
@@ -289,9 +289,9 @@ if (sl != null) {
 
 
 ServerName
-Num. Compacting KVs
-Num. Compacted KVs
-Remaining KVs
+Num. Compacting Cells
+Num. Compacted Cells
+Remaining Cells
 Compaction Progress
 
 <%java>

http://git-wip-us.apache.org/repos/asf/hbase/blob/70240f97/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
index d16ce06..6a01533 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/regionserver/RegionListTmpl.jamon
@@ -196,8 +196,8 @@
 
 
 Region Name
-Num. Compacting KVs
-Num. Compacted KVs
+Num. Compacting Cells
+Num. Compacted Cells
 Compaction Progress
 Last Major Compaction
 



hbase git commit: HBASE-19437 Batch operation can't handle the null result for Append/Increment

2018-03-02 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 2a65066b3 -> db131be39


HBASE-19437 Batch operation can't handle the null result for Append/Increment

Signed-off-by: anoopsamjohn 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/db131be3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/db131be3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/db131be3

Branch: refs/heads/master
Commit: db131be39a2e4821751be1d654dbaff81024d0ee
Parents: 2a65066
Author: Chia-Ping Tsai 
Authored: Fri Mar 2 23:22:01 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Mar 2 23:31:56 2018 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |   4 +-
 .../TestIncrementAndAppendWithNullResult.java   | 162 +++
 2 files changed, 164 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/db131be3/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 1ff67e9..803d3e8 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -692,7 +692,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
   region.getTableDescriptor().getTableName(),
 EnvironmentEdgeManager.currentTime() - before);
 }
-return r;
+return r == null ? Result.EMPTY_RESULT : r;
   }
 
   /**
@@ -744,7 +744,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
   region.getTableDescriptor().getTableName(),
   EnvironmentEdgeManager.currentTime() - before);
 }
-return r;
+return r == null ? Result.EMPTY_RESULT : r;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/db131be3/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestIncrementAndAppendWithNullResult.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestIncrementAndAppendWithNullResult.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestIncrementAndAppendWithNullResult.java
new file mode 100644
index 000..239190e
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestIncrementAndAppendWithNullResult.java
@@ -0,0 +1,162 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.coprocessor;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellBuilderFactory;
+import org.apache.hadoop.hbase.CellBuilderType;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Append;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Row;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({CoprocessorTests.class, MediumTests.class})
+public class TestIncrementAndAppendWithNullResult {
+
+  @ClassRule
+  public static final HBaseClassTest

hbase git commit: HBASE-19437 Batch operation can't handle the null result for Append/Increment

2018-03-02 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 a2de29560 -> e06e90bdb


HBASE-19437 Batch operation can't handle the null result for Append/Increment

Signed-off-by: anoopsamjohn 
Signed-off-by: Michael Stack 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e06e90bd
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e06e90bd
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e06e90bd

Branch: refs/heads/branch-2
Commit: e06e90bdbc35faf294869d6c2f4ea81910b48c2d
Parents: a2de295
Author: Chia-Ping Tsai 
Authored: Fri Mar 2 23:22:01 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Fri Mar 2 23:42:41 2018 +0800

--
 .../hbase/regionserver/RSRpcServices.java   |   4 +-
 .../TestIncrementAndAppendWithNullResult.java   | 162 +++
 2 files changed, 164 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e06e90bd/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index d0a1315..6dbced2 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -691,7 +691,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
   region.getTableDescriptor().getTableName(),
 EnvironmentEdgeManager.currentTime() - before);
 }
-return r;
+return r == null ? Result.EMPTY_RESULT : r;
   }
 
   /**
@@ -743,7 +743,7 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
   region.getTableDescriptor().getTableName(),
   EnvironmentEdgeManager.currentTime() - before);
 }
-return r;
+return r == null ? Result.EMPTY_RESULT : r;
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/e06e90bd/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestIncrementAndAppendWithNullResult.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestIncrementAndAppendWithNullResult.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestIncrementAndAppendWithNullResult.java
new file mode 100644
index 000..239190e
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestIncrementAndAppendWithNullResult.java
@@ -0,0 +1,162 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.coprocessor;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Optional;
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellBuilderFactory;
+import org.apache.hadoop.hbase.CellBuilderType;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Append;
+import org.apache.hadoop.hbase.client.Increment;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Row;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+
+@Category({CoprocessorTests.class, MediumTests.class})
+public class TestIncrementAndAppendWithNullResult {
+
+  @ClassRule
+  public static final HBaseClass

hbase git commit: HBASE-20093 (addendum) remove unused import of ServerLoad

2018-03-01 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 03f9cb89a -> a2bf2ad71


HBASE-20093 (addendum) remove unused import of ServerLoad

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a2bf2ad7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a2bf2ad7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a2bf2ad7

Branch: refs/heads/branch-2
Commit: a2bf2ad71eb58bd6708f62c6defbe82d17fb1166
Parents: 03f9cb8
Author: Chia-Ping Tsai 
Authored: Thu Mar 1 23:43:40 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Mar 1 23:44:29 2018 +0800

--
 .../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon| 1 -
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java | 3 +--
 2 files changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a2bf2ad7/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 12e0a69b..3091e18 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -37,7 +37,6 @@ org.apache.hadoop.hbase.HBaseConfiguration;
 org.apache.hadoop.hbase.HConstants;
 org.apache.hadoop.hbase.HTableDescriptor;
 org.apache.hadoop.hbase.NamespaceDescriptor;
-org.apache.hadoop.hbase.ServerLoad;
 org.apache.hadoop.hbase.ServerName;
 org.apache.hadoop.hbase.TableName;
 org.apache.hadoop.hbase.client.Admin;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a2bf2ad7/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index 19b2101..b928e52 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -70,7 +70,6 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
 import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.PleaseHoldException;
-import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerMetricsBuilder;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableDescriptors;
@@ -1038,7 +1037,7 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 for (ServerName sn: this.regionServerTracker.getOnlineServers()) {
   // The isServerOnline check is opportunistic, correctness is handled 
inside
   if (!this.serverManager.isServerOnline(sn) &&
-  serverManager.checkAndRecordNewServer(sn, new 
ServerLoad(ServerMetricsBuilder.of(sn {
+  serverManager.checkAndRecordNewServer(sn, 
ServerMetricsBuilder.of(sn))) {
 LOG.info("Registered server found up in zk but who has not yet 
reported in: " + sn);
   }
 }



hbase git commit: HBASE-20093 (addendum) remove unused import of ServerLoad

2018-03-01 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 0732ef5eb -> 776eb5d9c


HBASE-20093 (addendum) remove unused import of ServerLoad

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/776eb5d9
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/776eb5d9
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/776eb5d9

Branch: refs/heads/master
Commit: 776eb5d9cb1658ee512da21b62d94d9258cf8e50
Parents: 0732ef5
Author: Chia-Ping Tsai 
Authored: Thu Mar 1 20:44:27 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Mar 1 23:47:36 2018 +0800

--
 .../org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon| 1 -
 .../src/main/java/org/apache/hadoop/hbase/master/HMaster.java | 3 +--
 2 files changed, 1 insertion(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/776eb5d9/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
--
diff --git 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
index 12e0a69b..3091e18 100644
--- 
a/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
+++ 
b/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/master/MasterStatusTmpl.jamon
@@ -37,7 +37,6 @@ org.apache.hadoop.hbase.HBaseConfiguration;
 org.apache.hadoop.hbase.HConstants;
 org.apache.hadoop.hbase.HTableDescriptor;
 org.apache.hadoop.hbase.NamespaceDescriptor;
-org.apache.hadoop.hbase.ServerLoad;
 org.apache.hadoop.hbase.ServerName;
 org.apache.hadoop.hbase.TableName;
 org.apache.hadoop.hbase.client.Admin;

http://git-wip-us.apache.org/repos/asf/hbase/blob/776eb5d9/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index b0dd0b4..c33f555 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -71,7 +71,6 @@ import org.apache.hadoop.hbase.MetaTableAccessor;
 import org.apache.hadoop.hbase.NamespaceDescriptor;
 import org.apache.hadoop.hbase.PleaseHoldException;
 import org.apache.hadoop.hbase.ReplicationPeerNotFoundException;
-import org.apache.hadoop.hbase.ServerLoad;
 import org.apache.hadoop.hbase.ServerMetricsBuilder;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableDescriptors;
@@ -1041,7 +1040,7 @@ public class HMaster extends HRegionServer implements 
MasterServices {
 for (ServerName sn: this.regionServerTracker.getOnlineServers()) {
   // The isServerOnline check is opportunistic, correctness is handled 
inside
   if (!this.serverManager.isServerOnline(sn) &&
-  serverManager.checkAndRecordNewServer(sn, new 
ServerLoad(ServerMetricsBuilder.of(sn {
+  serverManager.checkAndRecordNewServer(sn, 
ServerMetricsBuilder.of(sn))) {
 LOG.info("Registered server found up in zk but who has not yet 
reported in: " + sn);
   }
 }



hbase git commit: HBASE-20097 Merge TableDescriptors#getAll and TableDescriptors#getAllDescriptors into one

2018-02-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 e50e6f7ce -> f71c00c02


HBASE-20097 Merge TableDescriptors#getAll and 
TableDescriptors#getAllDescriptors into one

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/f71c00c0
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/f71c00c0
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/f71c00c0

Branch: refs/heads/branch-2
Commit: f71c00c02e7a27d9cd6ff1de8c320b818ebb6221
Parents: e50e6f7
Author: Chia-Ping Tsai 
Authored: Tue Feb 27 16:34:14 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Feb 28 15:21:31 2018 +0800

--
 .../org/apache/hadoop/hbase/TableDescriptors.java   |  9 -
 .../hadoop/hbase/master/TableStateManager.java  |  2 +-
 .../hadoop/hbase/util/FSTableDescriptors.java   | 16 +---
 .../hbase/master/assignment/MockMasterServices.java |  5 -
 4 files changed, 2 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/f71c00c0/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
index ecdfc82..5787f66 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
@@ -55,15 +55,6 @@ public interface TableDescriptors {
   throws IOException;
 
   /**
-   * Get Map of all TableDescriptors. Populates the descriptor cache as a
-   * side effect.
-   * @return Map of all descriptors.
-   * @throws IOException
-   */
-  Map getAllDescriptors()
-  throws IOException;
-
-  /**
* Add or update descriptor
* @param htd Descriptor to set into TableDescriptors
* @throws IOException

http://git-wip-us.apache.org/repos/asf/hbase/blob/f71c00c0/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
index 0d89eef..affb684 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
@@ -223,7 +223,7 @@ public class TableStateManager {
 
   private void fixTableStates(TableDescriptors tableDescriptors, Connection 
connection)
   throws IOException {
-final Map allDescriptors = 
tableDescriptors.getAllDescriptors();
+final Map allDescriptors = 
tableDescriptors.getAll();
 final Map states = new HashMap<>();
 // NOTE: Ful hbase:meta table scan!
 MetaTableAccessor.fullScanTables(connection, new 
MetaTableAccessor.Visitor() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/f71c00c0/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index c72b9e0..b4b0be0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -248,7 +248,7 @@ public class FSTableDescriptors implements TableDescriptors 
{
* Returns a map from table name to table descriptor for all tables.
*/
   @Override
-  public Map getAllDescriptors()
+  public Map getAll()
   throws IOException {
 Map tds = new TreeMap<>();
 
@@ -282,20 +282,6 @@ public class FSTableDescriptors implements 
TableDescriptors {
   }
 
   /**
-   * Returns a map from table name to table descriptor for all tables.
-   */
-  @Override
-  public Map getAll() throws IOException {
-Map htds = new TreeMap<>();
-Map allDescriptors = getAllDescriptors();
-for (Map.Entry entry : allDescriptors
-.entrySet()) {
-  htds.put(entry.getKey(), entry.getValue());
-}
-return htds;
-  }
-
-  /**
 * Find descriptors by namespace.
 * @see #get(org.apache.hadoop.hbase.TableName)
 */

http://git-wip-us.apache.org/repos/asf/hbase/blob/f71c00c0/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
 
b/hbase-serv

hbase git commit: HBASE-20097 Merge TableDescriptors#getAll and TableDescriptors#getAllDescriptors into one

2018-02-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 197bd7907 -> 62ee7d950


HBASE-20097 Merge TableDescriptors#getAll and 
TableDescriptors#getAllDescriptors into one

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/62ee7d95
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/62ee7d95
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/62ee7d95

Branch: refs/heads/master
Commit: 62ee7d9502d599f704ea8c9cf16f9ac4c1b1d22a
Parents: 197bd79
Author: Chia-Ping Tsai 
Authored: Tue Feb 27 16:34:14 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Feb 28 15:21:21 2018 +0800

--
 .../org/apache/hadoop/hbase/TableDescriptors.java   |  9 -
 .../hadoop/hbase/master/TableStateManager.java  |  2 +-
 .../hadoop/hbase/util/FSTableDescriptors.java   | 16 +---
 .../hbase/master/assignment/MockMasterServices.java |  5 -
 4 files changed, 2 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/62ee7d95/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
index ecdfc82..5787f66 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
@@ -55,15 +55,6 @@ public interface TableDescriptors {
   throws IOException;
 
   /**
-   * Get Map of all TableDescriptors. Populates the descriptor cache as a
-   * side effect.
-   * @return Map of all descriptors.
-   * @throws IOException
-   */
-  Map getAllDescriptors()
-  throws IOException;
-
-  /**
* Add or update descriptor
* @param htd Descriptor to set into TableDescriptors
* @throws IOException

http://git-wip-us.apache.org/repos/asf/hbase/blob/62ee7d95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
index 0d89eef..affb684 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableStateManager.java
@@ -223,7 +223,7 @@ public class TableStateManager {
 
   private void fixTableStates(TableDescriptors tableDescriptors, Connection 
connection)
   throws IOException {
-final Map allDescriptors = 
tableDescriptors.getAllDescriptors();
+final Map allDescriptors = 
tableDescriptors.getAll();
 final Map states = new HashMap<>();
 // NOTE: Ful hbase:meta table scan!
 MetaTableAccessor.fullScanTables(connection, new 
MetaTableAccessor.Visitor() {

http://git-wip-us.apache.org/repos/asf/hbase/blob/62ee7d95/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
index c72b9e0..b4b0be0 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSTableDescriptors.java
@@ -248,7 +248,7 @@ public class FSTableDescriptors implements TableDescriptors 
{
* Returns a map from table name to table descriptor for all tables.
*/
   @Override
-  public Map getAllDescriptors()
+  public Map getAll()
   throws IOException {
 Map tds = new TreeMap<>();
 
@@ -282,20 +282,6 @@ public class FSTableDescriptors implements 
TableDescriptors {
   }
 
   /**
-   * Returns a map from table name to table descriptor for all tables.
-   */
-  @Override
-  public Map getAll() throws IOException {
-Map htds = new TreeMap<>();
-Map allDescriptors = getAllDescriptors();
-for (Map.Entry entry : allDescriptors
-.entrySet()) {
-  htds.put(entry.getKey(), entry.getValue());
-}
-return htds;
-  }
-
-  /**
 * Find descriptors by namespace.
 * @see #get(org.apache.hadoop.hbase.TableName)
 */

http://git-wip-us.apache.org/repos/asf/hbase/blob/62ee7d95/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/assignment/MockMasterServices.java
 
b/hbase-server/s

hbase git commit: HBASE-20084 Refactor the RSRpcServices#doBatchOp

2018-02-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master 7f6e971c4 -> 197bd7907


HBASE-20084 Refactor the RSRpcServices#doBatchOp

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/197bd790
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/197bd790
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/197bd790

Branch: refs/heads/master
Commit: 197bd790701553bd5c7de8b6af47500e0e028920
Parents: 7f6e971
Author: Chia-Ping Tsai 
Authored: Mon Feb 26 20:49:05 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Feb 28 15:15:34 2018 +0800

--
 .../hbase/regionserver/RSRpcServices.java   | 115 ++-
 1 file changed, 58 insertions(+), 57 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/197bd790/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 7e01c9a..4dd826f 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -121,6 +121,7 @@ import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.access.AccessChecker;
 import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.CollectionUtils;
 import org.apache.hadoop.hbase.util.DNS;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.Pair;
@@ -763,7 +764,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 // Gather up CONTIGUOUS Puts and Deletes in this mutations List.  Idea is 
that rather than do
 // one at a time, we instead pass them in batch.  Be aware that the 
corresponding
 // ResultOrException instance that matches each Put or Delete is then 
added down in the
-// doBatchOp call.  We should be staying aligned though the Put and Delete 
are deferred/batched
+// doNonAtomicBatchOp call.  We should be staying aligned though the Put 
and Delete are
+// deferred/batched
 List mutations = null;
 long maxQuotaResultSize = Math.min(maxScannerResultSize, 
quota.getReadAvailable());
 IOException sizeIOE = null;
@@ -802,7 +804,6 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
   // use it for the response.
   //
   // This will create a copy in the builder.
-  hasResultOrException = true;
   NameBytesPair pair = ResponseConverter.buildException(sizeIOE);
   resultOrExceptionBuilder.setException(pair);
   context.incrementResponseExceptionSize(pair.getSerializedSize());
@@ -829,29 +830,23 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
   }
 } else if (action.hasServiceCall()) {
   hasResultOrException = true;
-  try {
-com.google.protobuf.Message result =
-execServiceOnRegion(region, action.getServiceCall());
-ClientProtos.CoprocessorServiceResult.Builder serviceResultBuilder 
=
-ClientProtos.CoprocessorServiceResult.newBuilder();
-resultOrExceptionBuilder.setServiceResult(
-serviceResultBuilder.setValue(
-  serviceResultBuilder.getValueBuilder()
-.setName(result.getClass().getName())
-// TODO: Copy!!!
-
.setValue(UnsafeByteOperations.unsafeWrap(result.toByteArray();
-  } catch (IOException ioe) {
-rpcServer.getMetrics().exception(ioe);
-NameBytesPair pair = ResponseConverter.buildException(ioe);
-resultOrExceptionBuilder.setException(pair);
-context.incrementResponseExceptionSize(pair.getSerializedSize());
-  }
+  com.google.protobuf.Message result =
+execServiceOnRegion(region, action.getServiceCall());
+  ClientProtos.CoprocessorServiceResult.Builder serviceResultBuilder =
+ClientProtos.CoprocessorServiceResult.newBuilder();
+  resultOrExceptionBuilder.setServiceResult(
+serviceResultBuilder.setValue(
+  serviceResultBuilder.getValueBuilder()
+.setName(result.getClass().getName())
+// TODO: Copy!!!
+
.setValue(UnsafeByteOperations.unsafeWrap(result.toByteArray();
 } else if (action.hasMutation()) {
   MutationType type = action.getMutation().getMutateType();
   if (type != MutationType.PUT && 

hbase git commit: HBASE-20084 Refactor the RSRpcServices#doBatchOp

2018-02-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 c459282fe -> e50e6f7ce


HBASE-20084 Refactor the RSRpcServices#doBatchOp

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/e50e6f7c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/e50e6f7c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/e50e6f7c

Branch: refs/heads/branch-2
Commit: e50e6f7ce92733472a757acee2c22d7fed329aef
Parents: c459282
Author: Chia-Ping Tsai 
Authored: Mon Feb 26 20:49:05 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Feb 28 15:13:09 2018 +0800

--
 .../hbase/regionserver/RSRpcServices.java   | 115 ++-
 1 file changed, 58 insertions(+), 57 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/e50e6f7c/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
--
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
index 42284e9..d0a1315 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java
@@ -122,6 +122,7 @@ import org.apache.hadoop.hbase.security.User;
 import org.apache.hadoop.hbase.security.access.AccessChecker;
 import org.apache.hadoop.hbase.security.access.Permission;
 import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.CollectionUtils;
 import org.apache.hadoop.hbase.util.DNS;
 import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
 import org.apache.hadoop.hbase.util.Pair;
@@ -762,7 +763,8 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
 // Gather up CONTIGUOUS Puts and Deletes in this mutations List.  Idea is 
that rather than do
 // one at a time, we instead pass them in batch.  Be aware that the 
corresponding
 // ResultOrException instance that matches each Put or Delete is then 
added down in the
-// doBatchOp call.  We should be staying aligned though the Put and Delete 
are deferred/batched
+// doNonAtomicBatchOp call.  We should be staying aligned though the Put 
and Delete are
+// deferred/batched
 List mutations = null;
 long maxQuotaResultSize = Math.min(maxScannerResultSize, 
quota.getReadAvailable());
 IOException sizeIOE = null;
@@ -801,7 +803,6 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
   // use it for the response.
   //
   // This will create a copy in the builder.
-  hasResultOrException = true;
   NameBytesPair pair = ResponseConverter.buildException(sizeIOE);
   resultOrExceptionBuilder.setException(pair);
   context.incrementResponseExceptionSize(pair.getSerializedSize());
@@ -828,29 +829,23 @@ public class RSRpcServices implements 
HBaseRPCErrorHandler,
   }
 } else if (action.hasServiceCall()) {
   hasResultOrException = true;
-  try {
-com.google.protobuf.Message result =
-execServiceOnRegion(region, action.getServiceCall());
-ClientProtos.CoprocessorServiceResult.Builder serviceResultBuilder 
=
-ClientProtos.CoprocessorServiceResult.newBuilder();
-resultOrExceptionBuilder.setServiceResult(
-serviceResultBuilder.setValue(
-  serviceResultBuilder.getValueBuilder()
-.setName(result.getClass().getName())
-// TODO: Copy!!!
-
.setValue(UnsafeByteOperations.unsafeWrap(result.toByteArray();
-  } catch (IOException ioe) {
-rpcServer.getMetrics().exception(ioe);
-NameBytesPair pair = ResponseConverter.buildException(ioe);
-resultOrExceptionBuilder.setException(pair);
-context.incrementResponseExceptionSize(pair.getSerializedSize());
-  }
+  com.google.protobuf.Message result =
+execServiceOnRegion(region, action.getServiceCall());
+  ClientProtos.CoprocessorServiceResult.Builder serviceResultBuilder =
+ClientProtos.CoprocessorServiceResult.newBuilder();
+  resultOrExceptionBuilder.setServiceResult(
+serviceResultBuilder.setValue(
+  serviceResultBuilder.getValueBuilder()
+.setName(result.getClass().getName())
+// TODO: Copy!!!
+
.setValue(UnsafeByteOperations.unsafeWrap(result.toByteArray();
 } else if (action.hasMutation()) {
   MutationType type = action.getMutation().getMutateType();
   if (type != MutationType.PUT

hbase git commit: HBASE-20093 Replace ServerLoad by ServerMetrics for ServerManager

2018-02-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master ba063abd2 -> 7f6e971c4


HBASE-20093 Replace ServerLoad by ServerMetrics for ServerManager

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/7f6e971c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/7f6e971c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/7f6e971c

Branch: refs/heads/master
Commit: 7f6e971c4cc2e3906f959c6304fc05faa7703054
Parents: ba063ab
Author: Chia-Ping Tsai 
Authored: Tue Feb 27 23:20:06 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Feb 28 14:57:50 2018 +0800

--
 .../hbase/coprocessor/TestClassLoading.java |  36 ++-
 .../hbase/rsgroup/TestRSGroupsOfflineMode.java  |   4 +-
 .../hbase/tmpl/master/RSGroupListTmpl.jamon |  83 ++---
 .../tmpl/master/RegionServerListTmpl.jamon  |  99 --
 .../hbase/favored/FavoredNodeLoadBalancer.java  |  16 +-
 .../hadoop/hbase/master/MasterDumpServlet.java  |  10 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |  12 +-
 .../hadoop/hbase/master/ServerManager.java  |  47 +--
 .../hbase/master/balancer/BaseLoadBalancer.java |  10 +-
 .../balancer/FavoredStochasticBalancer.java |  10 +-
 .../normalizer/SimpleRegionNormalizer.java  |  13 +-
 .../resources/hbase-webapps/master/rsgroup.jsp  |  89 --
 .../resources/hbase-webapps/master/table.jsp| 300 ---
 .../normalizer/TestSimpleRegionNormalizer.java  |  12 +-
 14 files changed, 381 insertions(+), 360 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/7f6e971c/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index 922977c..bc75881 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -22,8 +22,14 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
-import java.io.*;
-import java.util.*;
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,8 +40,8 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
-import org.apache.hadoop.hbase.RegionLoad;
-import org.apache.hadoop.hbase.ServerLoad;
+import org.apache.hadoop.hbase.RegionMetrics;
+import org.apache.hadoop.hbase.ServerMetrics;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
@@ -47,8 +53,10 @@ import 
org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.ClassLoaderTestHelper;
 import org.apache.hadoop.hbase.util.CoprocessorClassLoader;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.junit.*;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
 import org.junit.ClassRule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -482,13 +490,13 @@ public class TestClassLoading {
* @param tableName : given table.
* @return subset of all servers.
*/
-  Map serversForTable(String tableName) {
-Map serverLoadHashMap = new HashMap<>();
-for(Map.Entry server:
+  Map serversForTable(String tableName) {
+Map serverLoadHashMap = new HashMap<>();
+for(Map.Entry server:
 TEST_UTIL.getMiniHBaseCluster().getMaster().getServerManager().
 getOnlineServers().entrySet()) {
-  for( Map.Entry region:
-  server.getValue().getRegionsLoad().entrySet()) {
+  for(Map.Entry region:
+  server.getValue().getRegionMetrics().entrySet()) {
 if (region.getValue().getNameAsString().equals(tableName)) {
   // this server hosts a region of tableName: add this server..
   serverLoadHashMap.put(server.getKey(),server.getValue());
@@ -501,8 +509,7 @@ public class TestClassLoading {
   }
 
   void assertAllRegionServers(String tableName) throws InterruptedException {
-Map servers;
-String[] actualCoprocessors = null;
+Map servers;
 boolean success =

hbase git commit: HBASE-20093 Replace ServerLoad by ServerMetrics for ServerManager

2018-02-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 5317ca92b -> c459282fe


HBASE-20093 Replace ServerLoad by ServerMetrics for ServerManager

Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c459282f
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c459282f
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c459282f

Branch: refs/heads/branch-2
Commit: c459282fe0c8988e9eab2719ec8fcd1ec0176bb1
Parents: 5317ca9
Author: Chia-Ping Tsai 
Authored: Tue Feb 27 23:20:06 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Wed Feb 28 15:05:52 2018 +0800

--
 .../hbase/coprocessor/TestClassLoading.java |  36 ++-
 .../hbase/rsgroup/TestRSGroupsOfflineMode.java  |   4 +-
 .../hbase/tmpl/master/RSGroupListTmpl.jamon |  83 ++---
 .../tmpl/master/RegionServerListTmpl.jamon  |  99 --
 .../hbase/favored/FavoredNodeLoadBalancer.java  |  16 +-
 .../hadoop/hbase/master/MasterDumpServlet.java  |  10 +-
 .../hadoop/hbase/master/MasterRpcServices.java  |  12 +-
 .../hadoop/hbase/master/ServerManager.java  |  47 +--
 .../hbase/master/balancer/BaseLoadBalancer.java |  10 +-
 .../balancer/FavoredStochasticBalancer.java |  10 +-
 .../normalizer/SimpleRegionNormalizer.java  |  13 +-
 .../resources/hbase-webapps/master/rsgroup.jsp  |  89 --
 .../resources/hbase-webapps/master/table.jsp| 300 ---
 .../normalizer/TestSimpleRegionNormalizer.java  |  12 +-
 14 files changed, 381 insertions(+), 360 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/c459282f/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
--
diff --git 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
index 922977c..bc75881 100644
--- 
a/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
+++ 
b/hbase-endpoint/src/test/java/org/apache/hadoop/hbase/coprocessor/TestClassLoading.java
@@ -22,8 +22,14 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
-import java.io.*;
-import java.util.*;
+import java.io.File;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -34,8 +40,8 @@ import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MiniHBaseCluster;
-import org.apache.hadoop.hbase.RegionLoad;
-import org.apache.hadoop.hbase.ServerLoad;
+import org.apache.hadoop.hbase.RegionMetrics;
+import org.apache.hadoop.hbase.ServerMetrics;
 import org.apache.hadoop.hbase.ServerName;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
@@ -47,8 +53,10 @@ import 
org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.ClassLoaderTestHelper;
 import org.apache.hadoop.hbase.util.CoprocessorClassLoader;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.junit.*;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
 import org.junit.ClassRule;
+import org.junit.Test;
 import org.junit.experimental.categories.Category;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -482,13 +490,13 @@ public class TestClassLoading {
* @param tableName : given table.
* @return subset of all servers.
*/
-  Map serversForTable(String tableName) {
-Map serverLoadHashMap = new HashMap<>();
-for(Map.Entry server:
+  Map serversForTable(String tableName) {
+Map serverLoadHashMap = new HashMap<>();
+for(Map.Entry server:
 TEST_UTIL.getMiniHBaseCluster().getMaster().getServerManager().
 getOnlineServers().entrySet()) {
-  for( Map.Entry region:
-  server.getValue().getRegionsLoad().entrySet()) {
+  for(Map.Entry region:
+  server.getValue().getRegionMetrics().entrySet()) {
 if (region.getValue().getNameAsString().equals(tableName)) {
   // this server hosts a region of tableName: add this server..
   serverLoadHashMap.put(server.getKey(),server.getValue());
@@ -501,8 +509,7 @@ public class TestClassLoading {
   }
 
   void assertAllRegionServers(String tableName) throws InterruptedException {
-Map servers;
-String[] actualCoprocessors = null;
+Map servers;
 boolean succe

hbase git commit: HBASE-20092 Fix TestRegionMetrics#testRegionMetrics

2018-02-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master f06a89b53 -> dbd801305


HBASE-20092 Fix TestRegionMetrics#testRegionMetrics

Signed-off-by: Michael Stack 
Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dbd80130
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dbd80130
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dbd80130

Branch: refs/heads/master
Commit: dbd80130578ef2ed3ab1244090d36eab55ef32e6
Parents: f06a89b
Author: Chia-Ping Tsai 
Authored: Tue Feb 27 16:48:17 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Feb 27 16:48:17 2018 +0800

--
 .../org/apache/hadoop/hbase/TestRegionLoad.java | 15 ++
 .../apache/hadoop/hbase/TestRegionMetrics.java  | 29 +---
 2 files changed, 34 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dbd80130/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java
index d0484d6..a390aca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java
@@ -27,6 +27,7 @@ import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 import org.apache.hadoop.hbase.ClusterMetrics.Option;
 import org.apache.hadoop.hbase.client.Admin;
@@ -34,7 +35,6 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.Threads;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
@@ -61,12 +61,13 @@ public class TestRegionLoad {
   private static final TableName TABLE_2 = TableName.valueOf("table_2");
   private static final TableName TABLE_3 = TableName.valueOf("table_3");
   private static final TableName[] tables = new TableName[]{TABLE_1, TABLE_2, 
TABLE_3};
+  private static final int MSG_INTERVAL = 500; // ms
 
   @BeforeClass
   public static void beforeClass() throws Exception {
 // Make servers report eagerly. This test is about looking at the cluster 
status reported.
 // Make it so we don't have to wait around too long to see change.
-UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 500);
+UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 
MSG_INTERVAL);
 UTIL.startMiniCluster(4);
 admin = UTIL.getAdmin();
 admin.setBalancerRunning(false, true);
@@ -117,11 +118,13 @@ public class TestRegionLoad {
   }
   checkRegionsAndRegionLoads(tableRegions, regionLoads);
 }
-int pause = 
UTIL.getConfiguration().getInt("hbase.regionserver.msginterval", 3000);
 
 // Just wait here. If this fixes the test, come back and do a better job.
 // Would have to redo the below so can wait on cluster status changing.
-Threads.sleep(2 * pause);
+// Admin#getClusterMetrics retrieves data from HMaster. 
Admin#getRegionMetrics, by contrast,
+// get the data from RS. Hence, it will fail if we do the assert check 
before RS has done
+// the report.
+TimeUnit.MILLISECONDS.sleep(3 * MSG_INTERVAL);
 
 // Check RegionLoad matches the regionLoad from ClusterStatus
 ClusterStatus clusterStatus
@@ -133,10 +136,10 @@ public class TestRegionLoad {
   (v1, v2) -> {
 throw new RuntimeException("impossible!!");
   }, () -> new TreeMap<>(Bytes.BYTES_COMPARATOR)));
-  LOG.info("serverName=" + serverName + ", getRegionLoads=" +
+  LOG.debug("serverName=" + serverName + ", getRegionLoads=" +
   serverLoad.getRegionsLoad().keySet().stream().map(r -> 
Bytes.toString(r)).
   collect(Collectors.toList()));
-  LOG.info("serverName=" + serverName + ", regionLoads=" +
+  LOG.debug("serverName=" + serverName + ", regionLoads=" +
   regionLoads.keySet().stream().map(r -> Bytes.toString(r)).
   collect(Collectors.toList()));
   compareRegionLoads(serverLoad.getRegionsLoad(), regionLoads);

http://git-wip-us.apache.org/repos/asf/hbase/blob/dbd80130/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionMetrics.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionMetrics.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionMetrics.java
index

hbase git commit: HBASE-20092 Fix TestRegionMetrics#testRegionMetrics

2018-02-27 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 8e8e50683 -> ae288a7b4


HBASE-20092 Fix TestRegionMetrics#testRegionMetrics

Signed-off-by: Michael Stack 
Signed-off-by: tedyu 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ae288a7b
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ae288a7b
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ae288a7b

Branch: refs/heads/branch-2
Commit: ae288a7b41b02b9c3bb4498ba4a41bbd7d18b03d
Parents: 8e8e506
Author: Chia-Ping Tsai 
Authored: Tue Feb 27 16:48:17 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Feb 27 16:48:48 2018 +0800

--
 .../org/apache/hadoop/hbase/TestRegionLoad.java | 15 ++
 .../apache/hadoop/hbase/TestRegionMetrics.java  | 29 +---
 2 files changed, 34 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/ae288a7b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java
index d0484d6..a390aca 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionLoad.java
@@ -27,6 +27,7 @@ import java.util.EnumSet;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
+import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
 import org.apache.hadoop.hbase.ClusterMetrics.Option;
 import org.apache.hadoop.hbase.client.Admin;
@@ -34,7 +35,6 @@ import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.MiscTests;
 import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.Threads;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.ClassRule;
@@ -61,12 +61,13 @@ public class TestRegionLoad {
   private static final TableName TABLE_2 = TableName.valueOf("table_2");
   private static final TableName TABLE_3 = TableName.valueOf("table_3");
   private static final TableName[] tables = new TableName[]{TABLE_1, TABLE_2, 
TABLE_3};
+  private static final int MSG_INTERVAL = 500; // ms
 
   @BeforeClass
   public static void beforeClass() throws Exception {
 // Make servers report eagerly. This test is about looking at the cluster 
status reported.
 // Make it so we don't have to wait around too long to see change.
-UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 500);
+UTIL.getConfiguration().setInt("hbase.regionserver.msginterval", 
MSG_INTERVAL);
 UTIL.startMiniCluster(4);
 admin = UTIL.getAdmin();
 admin.setBalancerRunning(false, true);
@@ -117,11 +118,13 @@ public class TestRegionLoad {
   }
   checkRegionsAndRegionLoads(tableRegions, regionLoads);
 }
-int pause = 
UTIL.getConfiguration().getInt("hbase.regionserver.msginterval", 3000);
 
 // Just wait here. If this fixes the test, come back and do a better job.
 // Would have to redo the below so can wait on cluster status changing.
-Threads.sleep(2 * pause);
+// Admin#getClusterMetrics retrieves data from HMaster. 
Admin#getRegionMetrics, by contrast,
+// get the data from RS. Hence, it will fail if we do the assert check 
before RS has done
+// the report.
+TimeUnit.MILLISECONDS.sleep(3 * MSG_INTERVAL);
 
 // Check RegionLoad matches the regionLoad from ClusterStatus
 ClusterStatus clusterStatus
@@ -133,10 +136,10 @@ public class TestRegionLoad {
   (v1, v2) -> {
 throw new RuntimeException("impossible!!");
   }, () -> new TreeMap<>(Bytes.BYTES_COMPARATOR)));
-  LOG.info("serverName=" + serverName + ", getRegionLoads=" +
+  LOG.debug("serverName=" + serverName + ", getRegionLoads=" +
   serverLoad.getRegionsLoad().keySet().stream().map(r -> 
Bytes.toString(r)).
   collect(Collectors.toList()));
-  LOG.info("serverName=" + serverName + ", regionLoads=" +
+  LOG.debug("serverName=" + serverName + ", regionLoads=" +
   regionLoads.keySet().stream().map(r -> Bytes.toString(r)).
   collect(Collectors.toList()));
   compareRegionLoads(serverLoad.getRegionsLoad(), regionLoads);

http://git-wip-us.apache.org/repos/asf/hbase/blob/ae288a7b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionMetrics.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionMetrics.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestRegionMetrics.java
i

hbase git commit: HBASE-20019 Document the ColumnValueFilter

2018-02-25 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master a8471bd98 -> a34f129af


HBASE-20019 Document the ColumnValueFilter

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a34f129a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a34f129a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a34f129a

Branch: refs/heads/master
Commit: a34f129a9b5e1098f1c9e86b1b8e7202bb97
Parents: a8471bd
Author: Reid Chan 
Authored: Mon Feb 26 11:31:08 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Feb 26 14:59:42 2018 +0800

--
 src/main/asciidoc/_chapters/architecture.adoc | 35 ++
 1 file changed, 35 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a34f129a/src/main/asciidoc/_chapters/architecture.adoc
--
diff --git a/src/main/asciidoc/_chapters/architecture.adoc 
b/src/main/asciidoc/_chapters/architecture.adoc
index 9091d5e..6fb5891 100644
--- a/src/main/asciidoc/_chapters/architecture.adoc
+++ b/src/main/asciidoc/_chapters/architecture.adoc
@@ -321,6 +321,41 @@ SingleColumnValueFilter filter = new 
SingleColumnValueFilter(
 scan.setFilter(filter);
 
 
+[[client.filter.cv.cvf]]
+ ColumnValueFilter
+
+Introduced in HBase-2.0.0 version as a complementation of 
SingleColumnValueFilter, ColumnValueFilter
+gets matched cell only, while SingleColumnValueFilter gets the entire row
+(has other columns and values) to which the matched cell belongs. Parameters 
of constructor of
+ColumnValueFilter are the same as SingleColumnValueFilter.
+[source,java]
+
+ColumnValueFilter filter = new ColumnValueFilter(
+  cf,
+  column,
+  CompareOperaor.EQUAL,
+  Bytes.toBytes("my value")
+  );
+scan.setFilter(filter);
+
+
+Note. For simple query like "equals to a family:qualifier:value", we highly 
recommend to use the
+following way instead of using SingleColumnValueFilter or ColumnValueFilter:
+[source,java]
+
+Scan scan = new Scan();
+scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes("qualifier"));
+ValueFilter vf = new ValueFilter(CompareOperator.EQUAL,
+  new BinaryComparator(Bytes.toBytes("value")));
+scan.setFilter(vf);
+...
+
+This scan will restrict to the specified column 'family:qualifier', avoiding 
scan unrelated
+families and columns, which has better performance, and `ValueFilter` is the 
condition used to do
+the value filtering.
+
+But if query is much more complicated beyond this book, then please make your 
good choice case by case.
+
 [[client.filter.cvp]]
 === Column Value Comparators
 



hbase git commit: HBASE-20019 Document the ColumnValueFilter

2018-02-25 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 10270e36c -> 309f3360b


HBASE-20019 Document the ColumnValueFilter

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/309f3360
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/309f3360
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/309f3360

Branch: refs/heads/branch-2
Commit: 309f3360bf3f45c7ea572a09eddada02cf6b93b3
Parents: 10270e3
Author: Reid Chan 
Authored: Mon Feb 26 11:31:08 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Mon Feb 26 15:11:01 2018 +0800

--
 src/main/asciidoc/_chapters/architecture.adoc | 35 ++
 1 file changed, 35 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/309f3360/src/main/asciidoc/_chapters/architecture.adoc
--
diff --git a/src/main/asciidoc/_chapters/architecture.adoc 
b/src/main/asciidoc/_chapters/architecture.adoc
index 5ba81c1..b29244c 100644
--- a/src/main/asciidoc/_chapters/architecture.adoc
+++ b/src/main/asciidoc/_chapters/architecture.adoc
@@ -321,6 +321,41 @@ SingleColumnValueFilter filter = new 
SingleColumnValueFilter(
 scan.setFilter(filter);
 
 
+[[client.filter.cv.cvf]]
+ ColumnValueFilter
+
+Introduced in HBase-2.0.0 version as a complementation of 
SingleColumnValueFilter, ColumnValueFilter
+gets matched cell only, while SingleColumnValueFilter gets the entire row
+(has other columns and values) to which the matched cell belongs. Parameters 
of constructor of
+ColumnValueFilter are the same as SingleColumnValueFilter.
+[source,java]
+
+ColumnValueFilter filter = new ColumnValueFilter(
+  cf,
+  column,
+  CompareOperaor.EQUAL,
+  Bytes.toBytes("my value")
+  );
+scan.setFilter(filter);
+
+
+Note. For simple query like "equals to a family:qualifier:value", we highly 
recommend to use the
+following way instead of using SingleColumnValueFilter or ColumnValueFilter:
+[source,java]
+
+Scan scan = new Scan();
+scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes("qualifier"));
+ValueFilter vf = new ValueFilter(CompareOperator.EQUAL,
+  new BinaryComparator(Bytes.toBytes("value")));
+scan.setFilter(vf);
+...
+
+This scan will restrict to the specified column 'family:qualifier', avoiding 
scan unrelated
+families and columns, which has better performance, and `ValueFilter` is the 
condition used to do
+the value filtering.
+
+But if query is much more complicated beyond this book, then please make your 
good choice case by case.
+
 [[client.filter.cvp]]
 === Column Value Comparators
 



hbase git commit: HBASE-20016 TestCatalogJanitorInMemoryStates#testInMemoryForReplicaParentCleanup is flaky

2018-02-21 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1 a189674b4 -> af1f7cf43


HBASE-20016 
TestCatalogJanitorInMemoryStates#testInMemoryForReplicaParentCleanup is flaky


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/af1f7cf4
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/af1f7cf4
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/af1f7cf4

Branch: refs/heads/branch-1
Commit: af1f7cf4314bbf433bf0117f3045a106aee39c80
Parents: a189674
Author: Chia-Ping Tsai 
Authored: Sun Feb 18 21:22:58 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Feb 22 09:44:37 2018 +0800

--
 .../TestCatalogJanitorInMemoryStates.java   | 52 +---
 1 file changed, 22 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/af1f7cf4/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
index 5ec3d6a..301ff6b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
@@ -124,7 +124,7 @@ public class TestCatalogJanitorInMemoryStates {
* AM's serverHoldings and
*/
   @Test(timeout = 18)
-  public void testInMemoryForReplicaParentCleanup() throws IOException, 
InterruptedException {
+  public void testInMemoryForReplicaParentCleanup() throws Exception {
 final AssignmentManager am = 
TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager();
 final CatalogJanitor janitor = 
TEST_UTIL.getHBaseCluster().getMaster().catalogJanitorChore;
 
@@ -139,35 +139,27 @@ public class TestCatalogJanitorInMemoryStates {
 // There are two regions, one for primary, one for the replica.
 assertTrue(allRegionLocations.size() == 2);
 
-HRegionLocation replicaParentRegion, primaryParentRegion;
-if (RegionReplicaUtil.isDefaultReplica(
-allRegionLocations.get(0).getRegionInfo().getReplicaId())) {
-  primaryParentRegion = allRegionLocations.get(0);
-  replicaParentRegion = allRegionLocations.get(1);
-} else {
-  primaryParentRegion = allRegionLocations.get(1);
-  replicaParentRegion = allRegionLocations.get(0);
-}
-
-List primaryDaughters = 
splitRegion(primaryParentRegion.getRegionInfo(),
-Bytes.toBytes("a"));
-
-// Wait until the replica parent region is offline.
-while 
(am.getRegionStates().isRegionOnline(replicaParentRegion.getRegionInfo())) {
-  Thread.sleep(100);
-}
-
-assertNotNull("Should have found daughter regions for " + 
primaryDaughters, primaryDaughters);
-
-// check that primary parent region is not in AM's serverHoldings
-assertFalse("Primary Parent region should have been removed from 
RegionState's serverHoldings",
-
am.getRegionStates().existsInServerHoldings(primaryParentRegion.getServerName(),
-primaryParentRegion.getRegionInfo()));
-
-// check that primary parent region is not in AM's serverHoldings
-assertFalse("Primary Parent region should have been removed from 
RegionState's serverHoldings",
-
am.getRegionStates().existsInServerHoldings(replicaParentRegion.getServerName(),
-replicaParentRegion.getRegionInfo()));
+final HRegionLocation primaryParentRegion
+  = RegionReplicaUtil.isDefaultReplica(
+  allRegionLocations.get(0).getRegionInfo().getReplicaId()) ? 
allRegionLocations.get(0)
+: allRegionLocations.get(1);
+final HRegionLocation replicaParentRegion
+  = RegionReplicaUtil.isDefaultReplica(
+  allRegionLocations.get(0).getRegionInfo().getReplicaId()) ? 
allRegionLocations.get(1)
+  : allRegionLocations.get(0);
+
+assertNotNull("Should have found daughter regions for " + 
primaryParentRegion,
+  splitRegion(primaryParentRegion.getRegionInfo(), Bytes.toBytes("a")));
+
+TEST_UTIL.waitFor(60 * 1000, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws Exception {
+return 
!am.getRegionStates().existsInServerHoldings(primaryParentRegion.getServerName(),
+  primaryParentRegion.getRegionInfo()) &&
+  
!am.getRegionStates().existsInServerHoldings(replicaParentRegion.getServerName(),
+replicaParentRegion.getRegionInfo());
+  }
+});
   }
 
   /*



hbase git commit: HBASE-20016 TestCatalogJanitorInMemoryStates#testInMemoryForReplicaParentCleanup is flaky

2018-02-21 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 a01323b2c -> 08b993997


HBASE-20016 
TestCatalogJanitorInMemoryStates#testInMemoryForReplicaParentCleanup is flaky


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/08b99399
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/08b99399
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/08b99399

Branch: refs/heads/branch-1.4
Commit: 08b993997404aad7ec8f66b8c2b2083062c9ef1a
Parents: a01323b
Author: Chia-Ping Tsai 
Authored: Sun Feb 18 21:22:58 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Thu Feb 22 09:15:54 2018 +0800

--
 .../TestCatalogJanitorInMemoryStates.java   | 52 +---
 1 file changed, 22 insertions(+), 30 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/08b99399/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
--
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
index 5ec3d6a..301ff6b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitorInMemoryStates.java
@@ -124,7 +124,7 @@ public class TestCatalogJanitorInMemoryStates {
* AM's serverHoldings and
*/
   @Test(timeout = 18)
-  public void testInMemoryForReplicaParentCleanup() throws IOException, 
InterruptedException {
+  public void testInMemoryForReplicaParentCleanup() throws Exception {
 final AssignmentManager am = 
TEST_UTIL.getHBaseCluster().getMaster().getAssignmentManager();
 final CatalogJanitor janitor = 
TEST_UTIL.getHBaseCluster().getMaster().catalogJanitorChore;
 
@@ -139,35 +139,27 @@ public class TestCatalogJanitorInMemoryStates {
 // There are two regions, one for primary, one for the replica.
 assertTrue(allRegionLocations.size() == 2);
 
-HRegionLocation replicaParentRegion, primaryParentRegion;
-if (RegionReplicaUtil.isDefaultReplica(
-allRegionLocations.get(0).getRegionInfo().getReplicaId())) {
-  primaryParentRegion = allRegionLocations.get(0);
-  replicaParentRegion = allRegionLocations.get(1);
-} else {
-  primaryParentRegion = allRegionLocations.get(1);
-  replicaParentRegion = allRegionLocations.get(0);
-}
-
-List primaryDaughters = 
splitRegion(primaryParentRegion.getRegionInfo(),
-Bytes.toBytes("a"));
-
-// Wait until the replica parent region is offline.
-while 
(am.getRegionStates().isRegionOnline(replicaParentRegion.getRegionInfo())) {
-  Thread.sleep(100);
-}
-
-assertNotNull("Should have found daughter regions for " + 
primaryDaughters, primaryDaughters);
-
-// check that primary parent region is not in AM's serverHoldings
-assertFalse("Primary Parent region should have been removed from 
RegionState's serverHoldings",
-
am.getRegionStates().existsInServerHoldings(primaryParentRegion.getServerName(),
-primaryParentRegion.getRegionInfo()));
-
-// check that primary parent region is not in AM's serverHoldings
-assertFalse("Primary Parent region should have been removed from 
RegionState's serverHoldings",
-
am.getRegionStates().existsInServerHoldings(replicaParentRegion.getServerName(),
-replicaParentRegion.getRegionInfo()));
+final HRegionLocation primaryParentRegion
+  = RegionReplicaUtil.isDefaultReplica(
+  allRegionLocations.get(0).getRegionInfo().getReplicaId()) ? 
allRegionLocations.get(0)
+: allRegionLocations.get(1);
+final HRegionLocation replicaParentRegion
+  = RegionReplicaUtil.isDefaultReplica(
+  allRegionLocations.get(0).getRegionInfo().getReplicaId()) ? 
allRegionLocations.get(1)
+  : allRegionLocations.get(0);
+
+assertNotNull("Should have found daughter regions for " + 
primaryParentRegion,
+  splitRegion(primaryParentRegion.getRegionInfo(), Bytes.toBytes("a")));
+
+TEST_UTIL.waitFor(60 * 1000, new Waiter.Predicate() {
+  @Override
+  public boolean evaluate() throws Exception {
+return 
!am.getRegionStates().existsInServerHoldings(primaryParentRegion.getServerName(),
+  primaryParentRegion.getRegionInfo()) &&
+  
!am.getRegionStates().existsInServerHoldings(replicaParentRegion.getServerName(),
+replicaParentRegion.getRegionInfo());
+  }
+});
   }
 
   /*



hbase git commit: HBASE-19950 Introduce a ColumnValueFilter

2018-02-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master f3bb9b961 -> a9a6eed37


HBASE-19950 Introduce a ColumnValueFilter

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a9a6eed3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a9a6eed3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a9a6eed3

Branch: refs/heads/master
Commit: a9a6eed372fa11dd2851635ac2c4bb6e1ca9dba7
Parents: f3bb9b9
Author: Reid Chan 
Authored: Tue Feb 13 16:01:28 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Feb 20 04:56:13 2018 +0800

--
 .../hadoop/hbase/filter/ColumnValueFilter.java  | 241 +++
 .../hadoop/hbase/filter/CompareFilter.java  |  10 +-
 .../apache/hadoop/hbase/filter/ParseFilter.java |   4 +-
 .../src/main/protobuf/Filter.proto  |   7 +
 hbase-protocol/src/main/protobuf/Filter.proto   |   9 +-
 .../apache/hadoop/hbase/filter/TestFilter.java  | 155 
 .../hbase/filter/TestFilterSerialization.java   |   9 +
 .../hadoop/hbase/filter/TestParseFilter.java|  11 +
 8 files changed, 440 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/a9a6eed3/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
new file mode 100644
index 000..0795165
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
@@ -0,0 +1,241 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.CompareOperator;
+import org.apache.hadoop.hbase.PrivateCellUtil;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+
+/**
+ * Different from {@link SingleColumnValueFilter} which returns an 
entire row
+ * when specified condition is matched, {@link ColumnValueFilter} return the 
matched cell only.
+ * 
+ * This filter is used to filter cells based on column and value.
+ * It takes a {@link org.apache.hadoop.hbase.CompareOperator} operator (<, <=, 
=, !=, >, >=), and
+ * and a {@link ByteArrayComparable} comparator.
+ */
+@InterfaceAudience.Public
+public class ColumnValueFilter extends FilterBase {
+  private final byte[] family;
+  private final byte[] qualifier;
+  private final CompareOperator op;
+  private final ByteArrayComparable comparator;
+
+  // This flag is used to speed up seeking cells when matched column is found, 
such that following
+  // columns in the same row can be skipped faster by NEXT_ROW instead of 
NEXT_COL.
+  private boolean columnFound = false;
+
+  public ColumnValueFilter(final byte[] family, final byte[] qualifier,
+   final CompareOperator op, final byte[] value) {
+this(family, qualifier, op, new BinaryComparator(value));
+  }
+
+  public ColumnValueFilter(final byte[] family, final byte[] qualifier,
+   final CompareOperator op,
+   final ByteArrayComparable comparator) {
+this.family = Preconditions.checkNotNull(family, "family should not be 
null.");
+this.qualifier

hbase git commit: HBASE-19950 Introduce a ColumnValueFilter

2018-02-19 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 a458d7c40 -> 4ef6319af


HBASE-19950 Introduce a ColumnValueFilter

Signed-off-by: Chia-Ping Tsai 


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4ef6319a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4ef6319a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4ef6319a

Branch: refs/heads/branch-2
Commit: 4ef6319af018c091f72e50383b432ebbcd1d0cbf
Parents: a458d7c
Author: Reid Chan 
Authored: Tue Feb 13 16:01:28 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Tue Feb 20 05:05:19 2018 +0800

--
 .../hadoop/hbase/filter/ColumnValueFilter.java  | 241 +++
 .../hadoop/hbase/filter/CompareFilter.java  |  10 +-
 .../apache/hadoop/hbase/filter/ParseFilter.java |   4 +-
 .../src/main/protobuf/Filter.proto  |   7 +
 hbase-protocol/src/main/protobuf/Filter.proto   |   9 +-
 .../apache/hadoop/hbase/filter/TestFilter.java  | 155 
 .../hbase/filter/TestFilterSerialization.java   |   9 +
 .../hadoop/hbase/filter/TestParseFilter.java|  11 +
 8 files changed, 440 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/4ef6319a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
new file mode 100644
index 000..0795165
--- /dev/null
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
@@ -0,0 +1,241 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.filter;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+import org.apache.hadoop.hbase.Cell;
+import org.apache.hadoop.hbase.CellUtil;
+import org.apache.hadoop.hbase.CompareOperator;
+import org.apache.hadoop.hbase.PrivateCellUtil;
+import org.apache.hadoop.hbase.exceptions.DeserializationException;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.yetus.audience.InterfaceAudience;
+
+import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+import 
org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
+
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
+
+/**
+ * Different from {@link SingleColumnValueFilter} which returns an 
entire row
+ * when specified condition is matched, {@link ColumnValueFilter} return the 
matched cell only.
+ * 
+ * This filter is used to filter cells based on column and value.
+ * It takes a {@link org.apache.hadoop.hbase.CompareOperator} operator (<, <=, 
=, !=, >, >=), and
+ * and a {@link ByteArrayComparable} comparator.
+ */
+@InterfaceAudience.Public
+public class ColumnValueFilter extends FilterBase {
+  private final byte[] family;
+  private final byte[] qualifier;
+  private final CompareOperator op;
+  private final ByteArrayComparable comparator;
+
+  // This flag is used to speed up seeking cells when matched column is found, 
such that following
+  // columns in the same row can be skipped faster by NEXT_ROW instead of 
NEXT_COL.
+  private boolean columnFound = false;
+
+  public ColumnValueFilter(final byte[] family, final byte[] qualifier,
+   final CompareOperator op, final byte[] value) {
+this(family, qualifier, op, new BinaryComparator(value));
+  }
+
+  public ColumnValueFilter(final byte[] family, final byte[] qualifier,
+   final CompareOperator op,
+   final ByteArrayComparable comparator) {
+this.family = Preconditions.checkNotNull(family, "family should not be 
null.");
+this.quali

hbase git commit: HBASE-19680 BufferedMutatorImpl#mutate should wait the result from AP in order to throw the failed mutations

2018-02-16 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/master abf7de702 -> dad90f6cc


HBASE-19680 BufferedMutatorImpl#mutate should wait the result from AP in order 
to throw the failed mutations


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/dad90f6c
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/dad90f6c
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/dad90f6c

Branch: refs/heads/master
Commit: dad90f6cce5bc51e43e3778068d5e45bcb1c9de0
Parents: abf7de7
Author: Chia-Ping Tsai 
Authored: Sat Feb 17 07:16:14 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Feb 17 07:33:58 2018 +0800

--
 .../hadoop/hbase/client/AsyncProcess.java   |  54 +
 .../hadoop/hbase/client/AsyncRequestFuture.java |   6 +-
 .../hbase/client/AsyncRequestFutureImpl.java|   3 +-
 .../hbase/client/BufferedMutatorImpl.java   | 241 +--
 .../hbase/client/ConnectionImplementation.java  |   2 +-
 .../org/apache/hadoop/hbase/client/HTable.java  |   7 +-
 .../hadoop/hbase/client/HTableMultiplexer.java  |   2 +-
 .../apache/hadoop/hbase/client/RowAccess.java   |   4 +-
 .../hadoop/hbase/client/TestAsyncProcess.java   |  90 +++
 .../TestAsyncProcessWithRegionException.java|   2 +-
 .../hbase/client/HConnectionTestingUtility.java |   2 +-
 11 files changed, 171 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/dad90f6c/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index 6c4118c..de7449b 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -65,17 +65,12 @@ import org.apache.hadoop.hbase.util.Bytes;
  * The class manages internally the retries.
  * 
  * 
- * The class can be constructed in regular mode, or "global error" mode. In 
global error mode,
- * AP tracks errors across all calls (each "future" also has global view of 
all errors). That
- * mode is necessary for backward compat with HTable behavior, where multiple 
submissions are
- * made and the errors can propagate using any put/flush call, from previous 
calls.
- * In "regular" mode, the errors are tracked inside the Future object that is 
returned.
+ * The errors are tracked inside the Future object that is returned.
  * The results are always tracked inside the Future object and can be 
retrieved when the call
  * has finished. Partial results can also be retrieved if some part of 
multi-request failed.
  * 
  * 
- * This class is thread safe in regular mode; in global error code, submitting 
operations and
- * retrieving errors from different threads may be not thread safe.
+ * This class is thread safe.
  * Internally, the class is thread safe enough to manage simultaneously new 
submission and results
  * arising from older operations.
  * 
@@ -144,7 +139,6 @@ class AsyncProcess {
   final ClusterConnection connection;
   private final RpcRetryingCallerFactory rpcCallerFactory;
   final RpcControllerFactory rpcFactory;
-  final BatchErrors globalErrors;
 
   // Start configuration settings.
   final int startLogErrorsCnt;
@@ -168,14 +162,12 @@ class AsyncProcess {
   private static final int DEFAULT_LOG_DETAILS_PERIOD = 1;
   private final int periodToLog;
   AsyncProcess(ClusterConnection hc, Configuration conf,
-  RpcRetryingCallerFactory rpcCaller, boolean useGlobalErrors,
-  RpcControllerFactory rpcFactory) {
+  RpcRetryingCallerFactory rpcCaller, RpcControllerFactory rpcFactory) {
 if (hc == null) {
   throw new IllegalArgumentException("ClusterConnection cannot be null.");
 }
 
 this.connection = hc;
-this.globalErrors = useGlobalErrors ? new BatchErrors() : null;
 
 this.id = COUNTER.incrementAndGet();
 
@@ -445,10 +437,10 @@ class AsyncProcess {
 
   private Consumer getLogger(TableName tableName, long max) {
 return (currentInProgress) -> {
-  LOG.info("#" + id + (max < 0 ? ", waiting for any free slot"
-  : ", waiting for some tasks to finish. Expected max="
-  + max) + ", tasksInProgress=" + currentInProgress +
-  " hasError=" + hasError() + (tableName == null ? "" : ", tableName=" + 
tableName));
+  LOG.info("#" + id + (max < 0 ?
+  ", waiting for any free slot" :
+  ", waiting for some tasks to finish. Expected max=" + max) + ", 
tasksInProgress="
+  + currentInProgress + (tableName == null ? "" : ", tableName=" + 
tableName));
 };
   }
 
@@ -460,38 +452,6 @@ class AsyncProcess {
   void decTaskCount

hbase git commit: HBASE-19680 BufferedMutatorImpl#mutate should wait the result from AP in order to throw the failed mutations

2018-02-16 Thread chia7712
Repository: hbase
Updated Branches:
  refs/heads/branch-2 d59959d19 -> 34d5f2b70


HBASE-19680 BufferedMutatorImpl#mutate should wait the result from AP in order 
to throw the failed mutations


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/34d5f2b7
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/34d5f2b7
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/34d5f2b7

Branch: refs/heads/branch-2
Commit: 34d5f2b70eff408c884401ae79ea17eff9f53a7f
Parents: d59959d
Author: Chia-Ping Tsai 
Authored: Sat Feb 17 07:16:14 2018 +0800
Committer: Chia-Ping Tsai 
Committed: Sat Feb 17 07:27:23 2018 +0800

--
 .../hadoop/hbase/client/AsyncProcess.java   |  54 +
 .../hadoop/hbase/client/AsyncRequestFuture.java |   6 +-
 .../hbase/client/AsyncRequestFutureImpl.java|   3 +-
 .../hbase/client/BufferedMutatorImpl.java   | 241 +--
 .../hbase/client/ConnectionImplementation.java  |   2 +-
 .../org/apache/hadoop/hbase/client/HTable.java  |   7 +-
 .../hadoop/hbase/client/HTableMultiplexer.java  |   2 +-
 .../apache/hadoop/hbase/client/RowAccess.java   |   4 +-
 .../hadoop/hbase/client/TestAsyncProcess.java   |  90 +++
 .../TestAsyncProcessWithRegionException.java|   2 +-
 .../hbase/client/HConnectionTestingUtility.java |   2 +-
 11 files changed, 171 insertions(+), 242 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hbase/blob/34d5f2b7/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
--
diff --git 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
index 6c4118c..de7449b 100644
--- 
a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
+++ 
b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/AsyncProcess.java
@@ -65,17 +65,12 @@ import org.apache.hadoop.hbase.util.Bytes;
  * The class manages internally the retries.
  * 
  * 
- * The class can be constructed in regular mode, or "global error" mode. In 
global error mode,
- * AP tracks errors across all calls (each "future" also has global view of 
all errors). That
- * mode is necessary for backward compat with HTable behavior, where multiple 
submissions are
- * made and the errors can propagate using any put/flush call, from previous 
calls.
- * In "regular" mode, the errors are tracked inside the Future object that is 
returned.
+ * The errors are tracked inside the Future object that is returned.
  * The results are always tracked inside the Future object and can be 
retrieved when the call
  * has finished. Partial results can also be retrieved if some part of 
multi-request failed.
  * 
  * 
- * This class is thread safe in regular mode; in global error code, submitting 
operations and
- * retrieving errors from different threads may be not thread safe.
+ * This class is thread safe.
  * Internally, the class is thread safe enough to manage simultaneously new 
submission and results
  * arising from older operations.
  * 
@@ -144,7 +139,6 @@ class AsyncProcess {
   final ClusterConnection connection;
   private final RpcRetryingCallerFactory rpcCallerFactory;
   final RpcControllerFactory rpcFactory;
-  final BatchErrors globalErrors;
 
   // Start configuration settings.
   final int startLogErrorsCnt;
@@ -168,14 +162,12 @@ class AsyncProcess {
   private static final int DEFAULT_LOG_DETAILS_PERIOD = 1;
   private final int periodToLog;
   AsyncProcess(ClusterConnection hc, Configuration conf,
-  RpcRetryingCallerFactory rpcCaller, boolean useGlobalErrors,
-  RpcControllerFactory rpcFactory) {
+  RpcRetryingCallerFactory rpcCaller, RpcControllerFactory rpcFactory) {
 if (hc == null) {
   throw new IllegalArgumentException("ClusterConnection cannot be null.");
 }
 
 this.connection = hc;
-this.globalErrors = useGlobalErrors ? new BatchErrors() : null;
 
 this.id = COUNTER.incrementAndGet();
 
@@ -445,10 +437,10 @@ class AsyncProcess {
 
   private Consumer getLogger(TableName tableName, long max) {
 return (currentInProgress) -> {
-  LOG.info("#" + id + (max < 0 ? ", waiting for any free slot"
-  : ", waiting for some tasks to finish. Expected max="
-  + max) + ", tasksInProgress=" + currentInProgress +
-  " hasError=" + hasError() + (tableName == null ? "" : ", tableName=" + 
tableName));
+  LOG.info("#" + id + (max < 0 ?
+  ", waiting for any free slot" :
+  ", waiting for some tasks to finish. Expected max=" + max) + ", 
tasksInProgress="
+  + currentInProgress + (tableName == null ? "" : ", tableName=" + 
tableName));
 };
   }
 
@@ -460,38 +452,6 @@ class AsyncProcess {
   void decTaskC

  1   2   3   4   5   6   7   8   >