http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
index 9207f0c..0e47d39 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/constraint/TestConstraint.java
@@ -86,7 +86,8 @@ public class TestConstraint {
       // test that we don't fail on a valid put
       Put put = new Put(row1);
       byte[] value = Integer.toString(10).getBytes();
-      put.add(dummy, new byte[0], value);
+      byte[] qualifier = new byte[0];
+      put.addColumn(dummy, qualifier, value);
       table.put(put);
     } finally {
       table.close();
@@ -117,7 +118,8 @@ public class TestConstraint {
 
     // test that we do fail on violation
     Put put = new Put(row1);
-    put.add(dummy, new byte[0], "fail".getBytes());
+    byte[] qualifier = new byte[0];
+    put.addColumn(dummy, qualifier, "fail".getBytes());
     LOG.warn("Doing put in table");
     try {
       table.put(put);
@@ -160,7 +162,8 @@ public class TestConstraint {
     try {
       // test that we don't fail because its disabled
       Put put = new Put(row1);
-      put.add(dummy, new byte[0], "pass".getBytes());
+      byte[] qualifier = new byte[0];
+      put.addColumn(dummy, qualifier, "pass".getBytes());
       table.put(put);
     } finally {
       table.close();
@@ -192,7 +195,8 @@ public class TestConstraint {
     try {
       // test that we do fail on violation
       Put put = new Put(row1);
-      put.add(dummy, new byte[0], "pass".getBytes());
+      byte[] qualifier = new byte[0];
+      put.addColumn(dummy, qualifier, "pass".getBytes());
       LOG.warn("Doing put in table");
       table.put(put);
     } finally {
@@ -224,8 +228,9 @@ public class TestConstraint {
 
     // test that we do fail on violation
     Put put = new Put(row1);
-    put.add(dummy, new byte[0], "pass".getBytes());
-    
+    byte[] qualifier = new byte[0];
+    put.addColumn(dummy, qualifier, "pass".getBytes());
+
     try{
     table.put(put);
     fail("RuntimeFailConstraint wasn't triggered - this put shouldn't work!");

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
index f1513b2..4fe0d23 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestAggregateProtocol.java
@@ -93,12 +93,12 @@ public class TestAggregateProtocol {
       Put put = new Put(ROWS[i]);
       put.setDurability(Durability.SKIP_WAL);
       Long l = new Long(i);
-      put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(l));
+      put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(l));
       table.put(put);
       Put p2 = new Put(ROWS[i]);
       put.setDurability(Durability.SKIP_WAL);
-      p2.add(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(l)), Bytes
-          .toBytes(l * 10));
+      p2.addColumn(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(l)), 
Bytes
+              .toBytes(l * 10));
       table.put(p2);
     }
     table.close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
index cad4205..d62e950 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestBatchCoprocessorEndpoint.java
@@ -94,7 +94,7 @@ public class TestBatchCoprocessorEndpoint {
     Table table = util.getConnection().getTable(TEST_TABLE);
     for (int i = 0; i < ROWSIZE; i++) {
       Put put = new Put(ROWS[i]);
-      put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
+      put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
       table.put(put);
     }
     table.close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
index 6b54abb..d9fc881 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestDoubleColumnInterpreter.java
@@ -90,11 +90,12 @@ public class TestDoubleColumnInterpreter {
       Put put = new Put(ROWS[i]);
       put.setDurability(Durability.SKIP_WAL);
       Double d = new Double(i);
-      put.add(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(d));
+      put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(d));
       table.put(put);
       Put p2 = new Put(ROWS[i]);
       put.setDurability(Durability.SKIP_WAL);
-      p2.add(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(d)), 
Bytes.toBytes(d * 0.10));
+      p2.addColumn(TEST_FAMILY, Bytes.add(TEST_MULTI_CQ, Bytes.toBytes(d)),
+              Bytes.toBytes(d * 0.10));
       table.put(p2);
     }
     table.close();

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
index c1d21fa..bd89744 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestHTableWrapper.java
@@ -108,15 +108,15 @@ public class TestHTableWrapper {
     table = util.createTable(TEST_TABLE, TEST_FAMILY);
 
     Put puta = new Put(ROW_A);
-    puta.add(TEST_FAMILY, qualifierCol1, bytes1);
+    puta.addColumn(TEST_FAMILY, qualifierCol1, bytes1);
     table.put(puta);
 
     Put putb = new Put(ROW_B);
-    putb.add(TEST_FAMILY, qualifierCol1, bytes2);
+    putb.addColumn(TEST_FAMILY, qualifierCol1, bytes2);
     table.put(putb);
 
     Put putc = new Put(ROW_C);
-    putc.add(TEST_FAMILY, qualifierCol1, bytes3);
+    putc.addColumn(TEST_FAMILY, qualifierCol1, bytes3);
     table.put(putc);
   }
 
@@ -204,7 +204,7 @@ public class TestHTableWrapper {
 
   private void checkPutsAndDeletes() throws IOException {
     // put:
-    Put putD = new Put(ROW_D).add(TEST_FAMILY, qualifierCol1, bytes2);
+    Put putD = new Put(ROW_D).addColumn(TEST_FAMILY, qualifierCol1, bytes2);
     hTableInterface.put(putD);
     checkRowValue(ROW_D, bytes2);
 
@@ -214,8 +214,8 @@ public class TestHTableWrapper {
     checkRowValue(ROW_D, null);
 
     // multiple puts:
-    Put[] puts = new Put[] { new Put(ROW_D).add(TEST_FAMILY, qualifierCol1, 
bytes2),
-        new Put(ROW_E).add(TEST_FAMILY, qualifierCol1, bytes3) };
+    Put[] puts = new Put[] {new Put(ROW_D).addColumn(TEST_FAMILY, 
qualifierCol1, bytes2),
+            new Put(ROW_E).addColumn(TEST_FAMILY, qualifierCol1, bytes3)};
     hTableInterface.put(Arrays.asList(puts));
     checkRowsValues(new byte[][] { ROW_D, ROW_E }, new byte[][] { bytes2, 
bytes3 });
 
@@ -226,7 +226,7 @@ public class TestHTableWrapper {
   }
 
   private void checkCheckAndPut() throws IOException {
-    Put putC = new Put(ROW_C).add(TEST_FAMILY, qualifierCol1, bytes5);
+    Put putC = new Put(ROW_C).addColumn(TEST_FAMILY, qualifierCol1, bytes5);
     assertFalse(hTableInterface.checkAndPut(ROW_C, TEST_FAMILY, qualifierCol1, 
/* expect */bytes4,
       putC/* newValue */));
     assertTrue(hTableInterface.checkAndPut(ROW_C, TEST_FAMILY, qualifierCol1, 
/* expect */bytes3,
@@ -242,7 +242,7 @@ public class TestHTableWrapper {
   }
 
   private void checkIncrementColumnValue() throws IOException {
-    hTableInterface.put(new Put(ROW_A).add(TEST_FAMILY, qualifierCol1, 
Bytes.toBytes(1L)));
+    hTableInterface.put(new Put(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, 
Bytes.toBytes(1L)));
     checkRowValue(ROW_A, Bytes.toBytes(1L));
 
     final long newVal = hTableInterface
@@ -319,7 +319,7 @@ public class TestHTableWrapper {
   }
 
   private void checkMutateRow() throws IOException {
-    Put put = new Put(ROW_A).add(TEST_FAMILY, qualifierCol1, bytes1);
+    Put put = new Put(ROW_A).addColumn(TEST_FAMILY, qualifierCol1, bytes1);
     RowMutations rowMutations = new RowMutations(ROW_A);
     rowMutations.add(put);
     hTableInterface.mutateRow(rowMutations);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
index 3122b4c..53b34b2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
@@ -102,7 +102,7 @@ public class TestOpenTableInCoprocessor {
         final WALEdit edit, final Durability durability) throws IOException {
       Table table = e.getEnvironment().getTable(otherTable, getPool());
       Put p = new Put(new byte[] { 'a' });
-      p.add(family, null, new byte[] { 'a' });
+      p.addColumn(family, null, new byte[]{'a'});
       try {
         table.batch(Collections.singletonList(put), null);
       } catch (InterruptedException e1) {
@@ -162,7 +162,7 @@ public class TestOpenTableInCoprocessor {
 
     Table table = UTIL.getConnection().getTable(TableName.valueOf("primary"));
     Put p = new Put(new byte[] { 'a' });
-    p.add(family, null, new byte[] { 'a' });
+    p.addColumn(family, null, new byte[]{'a'});
     table.put(p);
     table.close();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
index 73d7a96..3cbbe9d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverBypass.java
@@ -94,7 +94,7 @@ public class TestRegionObserverBypass {
   public void testSimple() throws Exception {
     Table t = util.getConnection().getTable(tableName);
     Put p = new Put(row1);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     // before HBASE-4331, this would throw an exception
     t.put(p);
     checkRowAndDelete(t,row1,0);
@@ -114,13 +114,13 @@ public class TestRegionObserverBypass {
     Table t = util.getConnection().getTable(tableName);
     List<Put> puts = new ArrayList<Put>();
     Put p = new Put(row1);
-    p.add(dummy,dummy,dummy);
+    p.addColumn(dummy, dummy, dummy);
     puts.add(p);
     p = new Put(row2);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     p = new Put(row3);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     // before HBASE-4331, this would throw an exception
     t.put(puts);
@@ -130,13 +130,13 @@ public class TestRegionObserverBypass {
 
     puts.clear();
     p = new Put(row1);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     p = new Put(row2);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     p = new Put(row3);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     // before HBASE-4331, this would throw an exception
     t.put(puts);
@@ -146,13 +146,13 @@ public class TestRegionObserverBypass {
 
     puts.clear();
     p = new Put(row1);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     p = new Put(row2);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     p = new Put(row3);
-    p.add(dummy,dummy,dummy);
+    p.addColumn(dummy, dummy, dummy);
     puts.add(p);
     // this worked fine even before HBASE-4331
     t.put(puts);
@@ -162,13 +162,13 @@ public class TestRegionObserverBypass {
 
     puts.clear();
     p = new Put(row1);
-    p.add(dummy,dummy,dummy);
+    p.addColumn(dummy, dummy, dummy);
     puts.add(p);
     p = new Put(row2);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     p = new Put(row3);
-    p.add(dummy,dummy,dummy);
+    p.addColumn(dummy, dummy, dummy);
     puts.add(p);
     // this worked fine even before HBASE-4331
     t.put(puts);
@@ -178,13 +178,13 @@ public class TestRegionObserverBypass {
 
     puts.clear();
     p = new Put(row1);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     p = new Put(row2);
-    p.add(dummy,dummy,dummy);
+    p.addColumn(dummy, dummy, dummy);
     puts.add(p);
     p = new Put(row3);
-    p.add(test,dummy,dummy);
+    p.addColumn(test, dummy, dummy);
     puts.add(p);
     // before HBASE-4331, this would throw an exception
     t.put(puts);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index 3ddf601..45ba04b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -130,9 +130,9 @@ public class TestRegionObserverInterface {
         new Boolean[] { false, false, false, false, false, false, false, false 
});
 
       Put put = new Put(ROW);
-      put.add(A, A, A);
-      put.add(B, B, B);
-      put.add(C, C, C);
+      put.addColumn(A, A, A);
+      put.addColumn(B, B, B);
+      put.addColumn(C, C, C);
       table.put(put);
 
       verifyMethodResult(SimpleRegionObserver.class, new String[] { 
"hadPreGet", "hadPostGet",
@@ -189,9 +189,9 @@ public class TestRegionObserverInterface {
         tableName,
         new Boolean[] {false, false, false, false, false});
       Put put = new Put(ROW);
-      put.add(A, A, A);
-      put.add(B, B, B);
-      put.add(C, C, C);
+      put.addColumn(A, A, A);
+      put.addColumn(B, B, B);
+      put.addColumn(C, C, C);
 
       Delete delete = new Delete(ROW);
       delete.deleteColumn(A, A);
@@ -248,10 +248,10 @@ public class TestRegionObserverInterface {
         TableName.valueOf(TEST_TABLE.getNameAsString() + 
".testCheckAndPutHooks");
     try (Table table = util.createTable(tableName, new byte[][] {A, B, C})) {
       Put p = new Put(Bytes.toBytes(0));
-      p.add(A, A, A);
+      p.addColumn(A, A, A);
       table.put(p);
       p = new Put(Bytes.toBytes(0));
-      p.add(A, A, A);
+      p.addColumn(A, A, A);
       verifyMethodResult(SimpleRegionObserver.class,
           new String[] { "hadPreCheckAndPut", "hadPreCheckAndPutAfterRowLock",
               "hadPostCheckAndPut" }, tableName, new Boolean[] { false, false, 
false });
@@ -274,7 +274,7 @@ public class TestRegionObserverInterface {
     Table table = util.createTable(tableName, new byte[][] {A, B, C});
     try {
       Put p = new Put(Bytes.toBytes(0));
-      p.add(A, A, A);
+      p.addColumn(A, A, A);
       table.put(p);
       Delete d = new Delete(Bytes.toBytes(0));
       table.delete(d);
@@ -338,7 +338,7 @@ public class TestRegionObserverInterface {
 
     Table table = util.getConnection().getTable(tableName);
     Put put = new Put(ROW);
-    put.add(A, A, A);
+    put.addColumn(A, A, A);
     table.put(put);
 
     Get get = new Get(ROW);
@@ -412,7 +412,7 @@ public class TestRegionObserverInterface {
 
     Table table = util.getConnection().getTable(tableName);
     Put put = new Put(ROW);
-    put.add(A, A, A);
+    put.addColumn(A, A, A);
     table.put(put);
 
     Delete delete = new Delete(ROW);
@@ -522,7 +522,7 @@ public class TestRegionObserverInterface {
       byte[] iBytes = Bytes.toBytes(i);
       Put put = new Put(iBytes);
       put.setDurability(Durability.SKIP_WAL);
-      put.add(A, A, iBytes);
+      put.addColumn(A, A, iBytes);
       table.put(put);
     }
 
@@ -677,9 +677,9 @@ public class TestRegionObserverInterface {
         }
 
         Put put = new Put(ROW);
-        put.add(A, A, A);
-        put.add(B, B, B);
-        put.add(C, C, C);
+        put.addColumn(A, A, A);
+        put.addColumn(B, B, B);
+        put.addColumn(C, C, C);
         table.put(put);
 
         verifyMethodResult(SimpleRegionObserver.Legacy.class,
@@ -729,9 +729,9 @@ public class TestRegionObserverInterface {
       }
 
       Put put = new Put(ROW);
-      put.add(A, A, A);
-      put.add(B, B, B);
-      put.add(C, C, C);
+      put.addColumn(A, A, A);
+      put.addColumn(B, B, B);
+      put.addColumn(C, C, C);
       table.put(put);
 
       cluster.killRegionServer(rs1.getRegionServer().getServerName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
index 44e06bd..e20c4ad 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
@@ -178,7 +178,7 @@ public class TestRegionObserverScannerOpenHook {
     h.load(EmptyRegionObsever.class, Coprocessor.PRIORITY_USER, conf);
 
     Put put = new Put(ROW);
-    put.add(A, A, A);
+    put.addColumn(A, A, A);
     region.put(put);
 
     Get get = new Get(ROW);
@@ -204,7 +204,7 @@ public class TestRegionObserverScannerOpenHook {
 
     // put a row and flush it to disk
     Put put = new Put(ROW);
-    put.add(A, A, A);
+    put.addColumn(A, A, A);
     region.put(put);
     region.flush(true);
     Get get = new Get(ROW);
@@ -278,7 +278,7 @@ public class TestRegionObserverScannerOpenHook {
 
     // put a row and flush it to disk
     Put put = new Put(ROW);
-    put.add(A, A, A);
+    put.addColumn(A, A, A);
     table.put(put);
 
     HRegionServer rs = UTIL.getRSForFirstRegionInTable(desc.getTableName());
@@ -291,7 +291,7 @@ public class TestRegionObserverScannerOpenHook {
 
     // put another row and flush that too
     put = new Put(Bytes.toBytes("anotherrow"));
-    put.add(A, A, A);
+    put.addColumn(A, A, A);
     table.put(put);
     admin.flushRegion(region.getRegionInfo().getRegionName());
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
index 0a4ca16..723edcb 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverStacking.java
@@ -127,7 +127,7 @@ public class TestRegionObserverStacking extends TestCase {
     h.load(ObserverC.class, Coprocessor.PRIORITY_LOWEST, conf);
 
     Put put = new Put(ROW);
-    put.add(A, A, A);
+    put.addColumn(A, A, A);
     region.put(put);
 
     Coprocessor c = h.findCoprocessor(ObserverA.class.getName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
index 537a415..d25948b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionServerCoprocessorExceptionWithAbort.java
@@ -107,7 +107,7 @@ public class TestRegionServerCoprocessorExceptionWithAbort {
       try {
         final byte[] ROW = Bytes.toBytes("aaa");
         Put put = new Put(ROW);
-        put.add(TEST_FAMILY, ROW, ROW);
+        put.addColumn(TEST_FAMILY, ROW, ROW);
         table.put(put);
       } catch (IOException e) {
         // The region server is going to be aborted.

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
index 5f2b7bd..b3d3890 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRowProcessorEndpoint.java
@@ -138,15 +138,15 @@ public class TestRowProcessorEndpoint {
     table = util.createTable(TABLE, FAM);
     {
       Put put = new Put(ROW);
-      put.add(FAM, A, Bytes.add(B, C));    // B, C are friends of A
-      put.add(FAM, B, Bytes.add(D, E, F)); // D, E, F are friends of B
-      put.add(FAM, C, G);                  // G is a friend of C
+      put.addColumn(FAM, A, Bytes.add(B, C));    // B, C are friends of A
+      put.addColumn(FAM, B, Bytes.add(D, E, F)); // D, E, F are friends of B
+      put.addColumn(FAM, C, G);                  // G is a friend of C
       table.put(put);
       rowSize = put.size();
     }
     Put put = new Put(ROW2);
-    put.add(FAM, D, E);
-    put.add(FAM, F, G);
+    put.addColumn(FAM, D, E);
+    put.addColumn(FAM, F, G);
     table.put(put);
     row2Size = put.size();
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
index 7772664..75fe7a2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestWALObserver.java
@@ -489,7 +489,7 @@ public class TestWALObserver {
   private Put creatPutWith2Families(byte[] row) throws IOException {
     Put p = new Put(row);
     for (int i = 0; i < TEST_FAMILY.length - 1; i++) {
-      p.add(TEST_FAMILY[i], TEST_QUALIFIER[i], TEST_VALUE[i]);
+      p.addColumn(TEST_FAMILY[i], TEST_QUALIFIER[i], TEST_VALUE[i]);
     }
     return p;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
index 6ccfc62..3a635b8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java
@@ -100,26 +100,26 @@ public class TestDependentColumnFilter {
   private void addData() throws IOException {
     Put put = new Put(ROWS[0]);
     // add in an entry for each stamp, with 2 as a "good" value
-    put.add(FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]);
-    put.add(FAMILIES[0], QUALIFIER, STAMPS[1], BAD_VALS[1]);
-    put.add(FAMILIES[0], QUALIFIER, STAMPS[2], MATCH_VAL);
+    put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]);
+    put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[1], BAD_VALS[1]);
+    put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[2], MATCH_VAL);
     // add in entries for stamps 0 and 2.
     // without a value check both will be "accepted"
     // with one 2 will be accepted(since the corresponding ts entry
     // has a matching value
-    put.add(FAMILIES[1], QUALIFIER, STAMPS[0], BAD_VALS[0]);
-    put.add(FAMILIES[1], QUALIFIER, STAMPS[2], BAD_VALS[2]);
+    put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[0], BAD_VALS[0]);
+    put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[2], BAD_VALS[2]);
 
     this.region.put(put);
 
     put = new Put(ROWS[1]);
-    put.add(FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]);
+    put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[0], BAD_VALS[0]);
     // there is no corresponding timestamp for this so it should never pass
-    put.add(FAMILIES[0], QUALIFIER, STAMPS[2], MATCH_VAL);
+    put.addColumn(FAMILIES[0], QUALIFIER, STAMPS[2], MATCH_VAL);
     // if we reverse the qualifiers this one should pass
-    put.add(FAMILIES[1], QUALIFIER, STAMPS[0], MATCH_VAL);
+    put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[0], MATCH_VAL);
     // should pass
-    put.add(FAMILIES[1], QUALIFIER, STAMPS[1], BAD_VALS[2]);
+    put.addColumn(FAMILIES[1], QUALIFIER, STAMPS[1], BAD_VALS[2]);
 
     this.region.put(put);
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
index 92be81a..e0a486e 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java
@@ -152,7 +152,7 @@ public class TestFilter {
       Put p = new Put(ROW);
       p.setDurability(Durability.SKIP_WAL);
       for(byte [] QUALIFIER : QUALIFIERS_ONE) {
-        p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
+        p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
       }
       this.region.put(p);
     }
@@ -160,7 +160,7 @@ public class TestFilter {
       Put p = new Put(ROW);
       p.setDurability(Durability.SKIP_WAL);
       for(byte [] QUALIFIER : QUALIFIERS_TWO) {
-        p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
+        p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
       }
       this.region.put(p);
     }
@@ -173,7 +173,7 @@ public class TestFilter {
       Put p = new Put(ROW);
       p.setDurability(Durability.SKIP_WAL);
       for(byte [] QUALIFIER : QUALIFIERS_ONE) {
-        p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
+        p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
       }
       this.region.put(p);
     }
@@ -181,7 +181,7 @@ public class TestFilter {
       Put p = new Put(ROW);
       p.setDurability(Durability.SKIP_WAL);
       for(byte [] QUALIFIER : QUALIFIERS_TWO) {
-        p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
+        p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
       }
       this.region.put(p);
     }
@@ -229,7 +229,7 @@ public class TestFilter {
       Put p = new Put(ROW);
       p.setDurability(Durability.SKIP_WAL);
       for (byte[] QUALIFIER : QUALIFIERS_THREE) {
-        p.add(FAMILIES[0], QUALIFIER, VALUES[0]);
+        p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
 
       }
       this.region.put(p);
@@ -238,7 +238,7 @@ public class TestFilter {
       Put p = new Put(ROW);
       p.setDurability(Durability.SKIP_WAL);
       for (byte[] QUALIFIER : QUALIFIERS_FOUR) {
-        p.add(FAMILIES[1], QUALIFIER, VALUES[1]);
+        p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
       }
       this.region.put(p);
     }
@@ -250,7 +250,7 @@ public class TestFilter {
       Put p = new Put(ROW);
       p.setDurability(Durability.SKIP_WAL);
       for (byte[] QUALIFIER : QUALIFIERS_THREE) {
-        p.add(FAMILIES[1], QUALIFIER, VALUES[0]);
+        p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
       }
       this.region.put(p);
     }
@@ -258,7 +258,7 @@ public class TestFilter {
       Put p = new Put(ROW);
       p.setDurability(Durability.SKIP_WAL);
       for (byte[] QUALIFIER : QUALIFIERS_FOUR) {
-        p.add(FAMILIES[0], QUALIFIER, VALUES[1]);
+        p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
       }
       this.region.put(p);
     }
@@ -1458,7 +1458,7 @@ public class TestFilter {
     for(int i=0; i<5; i++) {
       Put p = new Put(Bytes.toBytes((char)('a'+i) + "row"));
       p.setDurability(Durability.SKIP_WAL);
-      p.add(family, qualifier, Bytes.toBytes(String.valueOf(111+i)));
+      p.addColumn(family, qualifier, Bytes.toBytes(String.valueOf(111 + i)));
       testRegion.put(p);
     }
     testRegion.flush(true);
@@ -1501,7 +1501,7 @@ public class TestFilter {
 
     // Need to change one of the group one columns to use group two value
     Put p = new Put(ROWS_ONE[2]);
-    p.add(FAMILIES[0], QUALIFIERS_ONE[2], VALUES[1]);
+    p.addColumn(FAMILIES[0], QUALIFIERS_ONE[2], VALUES[1]);
     this.region.put(p);
 
     // Now let's grab rows that have Q_ONE[0](VALUES[0]) and 
Q_ONE[2](VALUES[1])
@@ -1816,11 +1816,11 @@ public class TestFilter {
 
   @Test
   public void testColumnPaginationFilter() throws Exception {
-      // Test that the filter skips multiple column versions.
-      Put p = new Put(ROWS_ONE[0]);
-      p.setDurability(Durability.SKIP_WAL);
-      p.add(FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]);
-      this.region.put(p);
+    // Test that the filter skips multiple column versions.
+    Put p = new Put(ROWS_ONE[0]);
+    p.setDurability(Durability.SKIP_WAL);
+    p.addColumn(FAMILIES[0], QUALIFIERS_ONE[0], VALUES[0]);
+    this.region.put(p);
       this.region.flush(true);
 
       // Set of KVs (page: 1; pageSize: 1) - the first set of 1 column per row
@@ -2017,7 +2017,7 @@ public class TestFilter {
     for(int i=0; i<10; i++) {
       Put p = new Put(Bytes.toBytes("row" + i));
       p.setDurability(Durability.SKIP_WAL);
-      p.add(FAMILIES[0], columnStatus, Bytes.toBytes(i%2));
+      p.addColumn(FAMILIES[0], columnStatus, Bytes.toBytes(i % 2));
       testRegion.put(p);
     }
     testRegion.flush(true);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
index 78a4d1f..0d2940c 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java
@@ -105,8 +105,7 @@ public class TestFilterWithScanLimits extends 
FilterTestingCluster {
       for (int i = 1; i < 4; i++) {
         Put put = new Put(Bytes.toBytes("row" + i));
         for (int j = 1; j < 6; j++) {
-          put.add(Bytes.toBytes("f1"), Bytes.toBytes("c" + j),
-              Bytes.toBytes(i + "_c" + j));
+          put.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("c" + j), 
Bytes.toBytes(i + "_c" + j));
         }
         puts.add(put);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
index a53dff1..efbddf8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java
@@ -129,8 +129,8 @@ public class TestFilterWrapper {
           long timestamp = j;
           if (i != 1)
             timestamp = i;
-          put.add(Bytes.toBytes("f1"), Bytes.toBytes("c" + j), timestamp,
-              Bytes.toBytes(i + "_c" + j));
+          put.addColumn(Bytes.toBytes("f1"), Bytes.toBytes("c" + j), timestamp,
+                  Bytes.toBytes(i + "_c" + j));
         }
         puts.add(put);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
index 680ce5c..145e42f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java
@@ -116,7 +116,7 @@ public class TestFuzzyRowAndColumnRangeFilter {
 
           Put p = new Put(rk);
           p.setDurability(Durability.SKIP_WAL);
-          p.add(cf.getBytes(), cq, Bytes.toBytes(c));
+          p.addColumn(cf.getBytes(), cq, Bytes.toBytes(c));
           ht.put(p);
           LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: "
                   + Bytes.toStringBinary(cq));

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
index 5c78dfe..ba1d2a1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java
@@ -139,7 +139,7 @@ public class TestFuzzyRowFilterEndToEnd {
 
             Put p = new Put(rk);
             p.setDurability(Durability.SKIP_WAL);
-            p.add(cf.getBytes(), cq, Bytes.toBytes(c));
+            p.addColumn(cf.getBytes(), cq, Bytes.toBytes(c));
             ht.put(p);
           }
         }
@@ -277,7 +277,7 @@ public class TestFuzzyRowFilterEndToEnd {
 
           Put p = new Put(rk);
           p.setDurability(Durability.SKIP_WAL);
-          p.add(cf.getBytes(), cq, Bytes.toBytes(c));
+          p.addColumn(cf.getBytes(), cq, Bytes.toBytes(c));
           ht.put(p);
           LOG.info("Inserting: rk: " + Bytes.toStringBinary(rk) + " cq: "
               + Bytes.toStringBinary(cq));

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
index a8651d8..8291e52 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java
@@ -75,8 +75,8 @@ public class TestInvocationRecordFilter {
     Put put = new Put(ROW_BYTES);
     for (int i = 0; i < 10; i += 2) {
       // puts 0, 2, 4, 6 and 8
-      put.add(FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + i), i,
-          Bytes.toBytes(VALUE_PREFIX + i));
+      put.addColumn(FAMILY_NAME_BYTES, Bytes.toBytes(QUALIFIER_PREFIX + i), 
(long) i,
+              Bytes.toBytes(VALUE_PREFIX + i));
     }
     this.region.put(put);
     this.region.flush(true);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
index 3be10ec..21a0df1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java
@@ -75,7 +75,7 @@ public class TestScanRowPrefix extends FilterTestingCluster {
     for (byte[] rowId: rowIds) {
       Put p = new Put(rowId);
       // Use the rowId as the column qualifier
-      p.add("F".getBytes(), rowId, "Dummy value".getBytes());
+      p.addColumn("F".getBytes(), rowId, "Dummy value".getBytes());
       table.put(p);
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
index 504350c..c529107 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/fs/TestBlockReorder.java
@@ -318,7 +318,7 @@ public class TestBlockReorder {
 
       // insert one put to ensure a minimal size
       Put p = new Put(sb);
-      p.add(sb, sb, sb);
+      p.addColumn(sb, sb, sb);
       h.put(p);
 
       DirectoryListing dl = dfs.getClient().listPaths(rootDir, 
HdfsFileStatus.EMPTY_NAME);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
index 53deeb2..d7e555d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
@@ -133,8 +133,7 @@ public class TestChangingEncoding {
     for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
       Put put = new Put(getRowKey(batchId, i));
       for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
-        put.add(CF_BYTES, getQualifier(j),
-            getValue(batchId, i, j));
+        put.addColumn(CF_BYTES, getQualifier(j), getValue(batchId, i, j));
       }
       put.setDurability(Durability.SKIP_WAL);
       puts.add(put);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
index 5ccb206..ce66e82 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java
@@ -152,7 +152,7 @@ public class TestEncodedSeekers {
           KeyValue kv = new KeyValue(key, CF_BYTES, col, 
HConstants.LATEST_TIMESTAMP, value, tag);
           put.add(kv);
         } else {
-          put.add(CF_BYTES, col, value);
+          put.addColumn(CF_BYTES, col, value);
         }
         if(VERBOSE){
           KeyValue kvPut = new KeyValue(key, CF_BYTES, col, value);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
index cf2aca5..1635310 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java
@@ -131,8 +131,8 @@ public class TestForceCacheImportantBlocks {
       Put put = new Put(Bytes.toBytes("row" + i));
       for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
         for (long ts = 1; ts < NUM_TIMESTAMPS_PER_COL; ++ts) {
-          put.add(CF_BYTES, Bytes.toBytes("col" + j), ts,
-              Bytes.toBytes("value" + i + "_" + j + "_" + ts));
+          put.addColumn(CF_BYTES, Bytes.toBytes("col" + j), ts,
+                  Bytes.toBytes("value" + i + "_" + j + "_" + ts));
         }
       }
       region.put(put);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
index 7584cf2..9c6bb38 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java
@@ -106,8 +106,8 @@ public class TestScannerSelectionUsingKeyRange {
       for (int iRow = 0; iRow < NUM_ROWS; ++iRow) {
         Put put = new Put(Bytes.toBytes("row" + iRow));
         for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) {
-          put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol),
-              Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
+          put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol),
+                  Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
         }
         region.put(put);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
index d5f4bcd..08b259d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java
@@ -120,8 +120,8 @@ public class TestScannerSelectionUsingTTL {
       for (int iRow = 0; iRow < NUM_ROWS; ++iRow) {
         Put put = new Put(Bytes.toBytes("row" + iRow));
         for (int iCol = 0; iCol < NUM_COLS_PER_ROW; ++iCol) {
-          put.add(FAMILY_BYTES, Bytes.toBytes("col" + iCol),
-              ts + version, Bytes.toBytes("value" + iFile + "_" + iRow + "_" + 
iCol));
+          put.addColumn(FAMILY_BYTES, Bytes.toBytes("col" + iCol), ts + 
version,
+                  Bytes.toBytes("value" + iFile + "_" + iRow + "_" + iCol));
         }
         region.put(put);
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
index 1975c59..da322bc 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java
@@ -125,12 +125,12 @@ public class TestTableInputFormat {
     Table table = UTIL.createTable(TableName.valueOf(tableName), families);
     Put p = new Put("aaa".getBytes());
     for (byte[] family : families) {
-      p.add(family, null, "value aaa".getBytes());
+      p.addColumn(family, null, "value aaa".getBytes());
     }
     table.put(p);
     p = new Put("bbb".getBytes());
     for (byte[] family : families) {
-      p.add(family, null, "value bbb".getBytes());
+      p.addColumn(family, null, "value bbb".getBytes());
     }
     table.put(p);
     return table;

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
index daa6e71..fd0db6a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapred/TestTableMapReduceUtil.java
@@ -114,8 +114,7 @@ public class TestTableMapReduceUtil {
     for (String president : presidentsRowKeys) {
       if (presidentNames.hasNext()) {
         Put p = new Put(Bytes.toBytes(president));
-        p.add(COLUMN_FAMILY, COLUMN_QUALIFIER,
-            Bytes.toBytes(presidentNames.next()));
+        p.addColumn(COLUMN_FAMILY, COLUMN_QUALIFIER, 
Bytes.toBytes(presidentNames.next()));
         table.put(p);
       }
     }
@@ -123,7 +122,7 @@ public class TestTableMapReduceUtil {
     for (String actor : actorsRowKeys) {
       if (actorNames.hasNext()) {
         Put p = new Put(Bytes.toBytes(actor));
-        p.add(COLUMN_FAMILY, COLUMN_QUALIFIER, 
Bytes.toBytes(actorNames.next()));
+        p.addColumn(COLUMN_FAMILY, COLUMN_QUALIFIER, 
Bytes.toBytes(actorNames.next()));
         table.put(p);
       }
     }
@@ -265,8 +264,9 @@ public class TestTableMapReduceUtil {
 
       String name = Bytes.toString(result.getValue(COLUMN_FAMILY,
           COLUMN_QUALIFIER));
-      outCollector.collect(outKey, new Put(Bytes.toBytes("rowKey2")).add(
-          COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(name)));
+      outCollector.collect(outKey,
+              new Put(Bytes.toBytes("rowKey2"))
+              .addColumn(COLUMN_FAMILY, COLUMN_QUALIFIER, 
Bytes.toBytes(name)));
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
index 54a81b7..bd2f82a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellCounter.java
@@ -83,14 +83,14 @@ public class TestCellCounter {
     Table t = UTIL.createTable(sourceTable, families);
     try{
       Put p = new Put(ROW1);
-      p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
-      p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
-      p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+      p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
       t.put(p);
       p = new Put(ROW2);
-      p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
-      p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
-      p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+      p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
       t.put(p);
       String[] args = { sourceTable.getNameAsString(), 
FQ_OUTPUT_DIR.toString(), ";", "^row1" };
       runCount(args);
@@ -120,31 +120,32 @@ public class TestCellCounter {
     byte[][] families = { FAMILY_A, FAMILY_B };
     Table t = UTIL.createTable(sourceTable, families);
     try{
-    Put p = new Put(ROW1);
-    p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
-    p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
-    p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
-    t.put(p);
-    p = new Put(ROW2);
-    p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
-    p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
-    p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
-    t.put(p);
-    String[] args = {
-      sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(),  ";", "^row1", 
"--starttime=" + now,
-      "--endtime=" + now + 2 };
-    runCount(args);
-    FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + 
File.separator +
-        "part-r-00000");
-    String data = IOUtils.toString(inputStream);
-    inputStream.close();
-    assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
-    assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2"));
-    assertTrue(data.contains("Total ROWS" + "\t" + "1"));
-    assertTrue(data.contains("b;q" + "\t" + "1"));
-    assertTrue(data.contains("a;q" + "\t" + "1"));
-    assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
-    assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
+      Put p = new Put(ROW1);
+      p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+      t.put(p);
+      p = new Put(ROW2);
+      p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+      t.put(p);
+      String[] args = {
+          sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(),  ";", 
"^row1",
+          "--starttime=" + now,
+          "--endtime=" + now + 2 };
+      runCount(args);
+      FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + 
File.separator +
+          "part-r-00000");
+      String data = IOUtils.toString(inputStream);
+      inputStream.close();
+      assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
+      assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + 
"2"));
+      assertTrue(data.contains("Total ROWS" + "\t" + "1"));
+      assertTrue(data.contains("b;q" + "\t" + "1"));
+      assertTrue(data.contains("a;q" + "\t" + "1"));
+      assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
+      assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
     }finally{
       t.close();
       FileUtil.fullyDelete(new File(OUTPUT_DIR));
@@ -160,38 +161,38 @@ public class TestCellCounter {
     byte[][] families = { FAMILY_A, FAMILY_B };
     Table t = UTIL.createTable(sourceTable, families);
     try{
-    Put p = new Put(ROW1);
-    p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
-    p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
-    p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
-    t.put(p);
-    p = new Put(ROW2);
-    p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
-    p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
-    p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
-    t.put(p);
-    String[] args = {
-      sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(),  ";", "^row1",
-        "--endtime=" + now + 1 };
-    runCount(args);
-    FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + 
File.separator +
-        "part-r-00000");
-    String data = IOUtils.toString(inputStream);
-    inputStream.close();
-    assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
-    assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + "2"));
-    assertTrue(data.contains("Total ROWS" + "\t" + "1"));
-    assertTrue(data.contains("b;q" + "\t" + "1"));
-    assertTrue(data.contains("a;q" + "\t" + "1"));
-    assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
-    assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
+      Put p = new Put(ROW1);
+      p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+      t.put(p);
+      p = new Put(ROW2);
+      p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+      t.put(p);
+      String[] args = {
+          sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(),  ";", 
"^row1",
+          "--endtime=" + now + 1 };
+      runCount(args);
+      FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + 
File.separator +
+          "part-r-00000");
+      String data = IOUtils.toString(inputStream);
+      inputStream.close();
+      assertTrue(data.contains("Total Families Across all Rows" + "\t" + "2"));
+      assertTrue(data.contains("Total Qualifiers across all Rows" + "\t" + 
"2"));
+      assertTrue(data.contains("Total ROWS" + "\t" + "1"));
+      assertTrue(data.contains("b;q" + "\t" + "1"));
+      assertTrue(data.contains("a;q" + "\t" + "1"));
+      assertTrue(data.contains("row1;a;q_Versions" + "\t" + "1"));
+      assertTrue(data.contains("row1;b;q_Versions" + "\t" + "1"));
     }finally{
       t.close();
       FileUtil.fullyDelete(new File(OUTPUT_DIR));
     }
   }
 
-   /**
+  /**
    * Test CellCounter with time range all data should print to output
    */
   @Test (timeout=300000)
@@ -200,27 +201,27 @@ public class TestCellCounter {
     byte[][] families = { FAMILY_A, FAMILY_B };
     Table t = UTIL.createTable(sourceTable, families);
     try{
-    Put p = new Put(ROW1);
-    p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
-    p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
-    p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
-    t.put(p);
-    p = new Put(ROW2);
-    p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
-    p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
-    p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
-    t.put(p);
-    String[] args = {
+      Put p = new Put(ROW1);
+      p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+      t.put(p);
+      p = new Put(ROW2);
+      p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+      t.put(p);
+      String[] args = {
       sourceTable.getNameAsString(), FQ_OUTPUT_DIR.toString(),  ";", 
"--starttime=" + now + 1,
-      "--endtime=" + now + 2 };
+          "--endtime=" + now + 2 };
 
-    runCount(args);
-    FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + 
File.separator +
-        "part-r-00000");
-    String data = IOUtils.toString(inputStream);
+      runCount(args);
+      FileInputStream inputStream = new FileInputStream(OUTPUT_DIR + 
File.separator +
+          "part-r-00000");
+      String data = IOUtils.toString(inputStream);
     inputStream.close();
-    // nothing should hace been emitted to the reducer
-    assertTrue(data.isEmpty());
+      // nothing should hace been emitted to the reducer
+      assertTrue(data.isEmpty());
     }finally{
       t.close();
       FileUtil.fullyDelete(new File(OUTPUT_DIR));
@@ -283,14 +284,14 @@ public class TestCellCounter {
     Table t = UTIL.createTable(sourceTable, families);
     try {
       Put p = new Put(ROW1);
-      p.add(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
-      p.add(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
-      p.add(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
+      p.addColumn(FAMILY_A, QUALIFIER, now, Bytes.toBytes("Data11"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 1, Bytes.toBytes("Data12"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 2, Bytes.toBytes("Data13"));
       t.put(p);
       p = new Put(ROW2);
-      p.add(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
-      p.add(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
-      p.add(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
+      p.addColumn(FAMILY_B, QUALIFIER, now, Bytes.toBytes("Dat21"));
+      p.addColumn(FAMILY_A, QUALIFIER, now + 1, Bytes.toBytes("Data22"));
+      p.addColumn(FAMILY_B, QUALIFIER, now + 2, Bytes.toBytes("Data23"));
       t.put(p);
       String[] args = { sourceTable.getNameAsString(), outputDir.toString(), 
";" };
       runCount(args);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
index 626383b..628ca08 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCopyTable.java
@@ -80,7 +80,7 @@ public class TestCopyTable {
       // put rows into the first table
       for (int i = 0; i < 10; i++) {
         Put p = new Put(Bytes.toBytes("row" + i));
-        p.add(FAMILY, COLUMN1, COLUMN1);
+        p.addColumn(FAMILY, COLUMN1, COLUMN1);
         t1.put(p);
       }
 
@@ -143,13 +143,13 @@ public class TestCopyTable {
 
     // put rows into the first table
     Put p = new Put(ROW0);
-    p.add(FAMILY, COLUMN1, COLUMN1);
+    p.addColumn(FAMILY, COLUMN1, COLUMN1);
     t1.put(p);
     p = new Put(ROW1);
-    p.add(FAMILY, COLUMN1, COLUMN1);
+    p.addColumn(FAMILY, COLUMN1, COLUMN1);
     t1.put(p);
     p = new Put(ROW2);
-    p.add(FAMILY, COLUMN1, COLUMN1);
+    p.addColumn(FAMILY, COLUMN1, COLUMN1);
     t1.put(p);
 
     CopyTable copy = new CopyTable();
@@ -193,14 +193,14 @@ public class TestCopyTable {
     Table t = TEST_UTIL.createTable(sourceTable, families);
     Table t2 = TEST_UTIL.createTable(targetTable, families);
     Put p = new Put(ROW1);
-    p.add(FAMILY_A, QUALIFIER,  Bytes.toBytes("Data11"));
-    p.add(FAMILY_B, QUALIFIER,  Bytes.toBytes("Data12"));
-    p.add(FAMILY_A, QUALIFIER,  Bytes.toBytes("Data13"));
+    p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data11"));
+    p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Data12"));
+    p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data13"));
     t.put(p);
     p = new Put(ROW2);
-    p.add(FAMILY_B, QUALIFIER, Bytes.toBytes("Dat21"));
-    p.add(FAMILY_A, QUALIFIER, Bytes.toBytes("Data22"));
-    p.add(FAMILY_B, QUALIFIER, Bytes.toBytes("Data23"));
+    p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Dat21"));
+    p.addColumn(FAMILY_A, QUALIFIER, Bytes.toBytes("Data22"));
+    p.addColumn(FAMILY_B, QUALIFIER, Bytes.toBytes("Data23"));
     t.put(p);
 
     long currentTime = System.currentTimeMillis();

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
index ecbde7a..05b2b8b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
@@ -1006,7 +1006,7 @@ public class TestHFileOutputFormat  {
 
       // put some data in it and flush to create a storefile
       Put p = new Put(Bytes.toBytes("test"));
-      p.add(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
+      p.addColumn(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
       table.put(p);
       admin.flush(TABLE_NAME);
       assertEquals(1, util.countRows(table));

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 6142cc5..9ff88f0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -1047,7 +1047,7 @@ public class TestHFileOutputFormat2  {
 
       // put some data in it and flush to create a storefile
       Put p = new Put(Bytes.toBytes("test"));
-      p.add(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
+      p.addColumn(FAMILIES[0], Bytes.toBytes("1"), Bytes.toBytes("1"));
       table.put(p);
       admin.flush(TABLE_NAME);
       assertEquals(1, util.countRows(table));

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index b9ad6af..5cf2281 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -71,7 +71,6 @@ import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.testclassification.VerySlowMapReduceTests;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.LauncherSecurityManager;
-import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.hadoop.util.ToolRunner;
 import org.junit.After;
@@ -172,14 +171,14 @@ public class TestImportExport {
     String EXPORT_TABLE = "exportSimpleCase";
     Table t = UTIL.createTable(TableName.valueOf(EXPORT_TABLE), FAMILYA, 3);
     Put p = new Put(ROW1);
-    p.add(FAMILYA, QUAL, now, QUAL);
-    p.add(FAMILYA, QUAL, now+1, QUAL);
-    p.add(FAMILYA, QUAL, now+2, QUAL);
+    p.addColumn(FAMILYA, QUAL, now, QUAL);
+    p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+    p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
     t.put(p);
     p = new Put(ROW2);
-    p.add(FAMILYA, QUAL, now, QUAL);
-    p.add(FAMILYA, QUAL, now+1, QUAL);
-    p.add(FAMILYA, QUAL, now+2, QUAL);
+    p.addColumn(FAMILYA, QUAL, now, QUAL);
+    p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+    p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
     t.put(p);
 
     String[] args = new String[] {
@@ -272,12 +271,12 @@ public class TestImportExport {
      Table t = UTIL.getConnection().getTable(desc.getTableName());
 
     Put p = new Put(ROW1);
-    p.add(FAMILYA, QUAL, now, QUAL);
-    p.add(FAMILYA, QUAL, now+1, QUAL);
-    p.add(FAMILYA, QUAL, now+2, QUAL);
-    p.add(FAMILYA, QUAL, now+3, QUAL);
-    p.add(FAMILYA, QUAL, now+4, QUAL);
-    t.put(p);
+     p.addColumn(FAMILYA, QUAL, now, QUAL);
+     p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+     p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+     p.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+     p.addColumn(FAMILYA, QUAL, now + 4, QUAL);
+     t.put(p);
 
     String[] args = new String[] {
         "-D" + Export.EXPORT_BATCHING + "=" + EXPORT_BATCH_SIZE,  // added 
scanner batching arg.
@@ -303,11 +302,11 @@ public class TestImportExport {
     Table t = UTIL.getConnection().getTable(desc.getTableName());
 
     Put p = new Put(ROW1);
-    p.add(FAMILYA, QUAL, now, QUAL);
-    p.add(FAMILYA, QUAL, now+1, QUAL);
-    p.add(FAMILYA, QUAL, now+2, QUAL);
-    p.add(FAMILYA, QUAL, now+3, QUAL);
-    p.add(FAMILYA, QUAL, now+4, QUAL);
+    p.addColumn(FAMILYA, QUAL, now, QUAL);
+    p.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+    p.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+    p.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+    p.addColumn(FAMILYA, QUAL, now + 4, QUAL);
     t.put(p);
 
     Delete d = new Delete(ROW1, now+3);
@@ -371,7 +370,7 @@ public class TestImportExport {
 
     //Add first version of QUAL
     Put p = new Put(ROW1);
-    p.add(FAMILYA, QUAL, now, QUAL);
+    p.addColumn(FAMILYA, QUAL, now, QUAL);
     exportT.put(p);
 
     //Add Delete family marker
@@ -380,7 +379,7 @@ public class TestImportExport {
 
     //Add second version of QUAL
     p = new Put(ROW1);
-    p.add(FAMILYA, QUAL, now+5, "s".getBytes());
+    p.addColumn(FAMILYA, QUAL, now + 5, "s".getBytes());
     exportT.put(p);
 
     //Add second Delete family marker
@@ -447,15 +446,15 @@ public class TestImportExport {
     Table exportTable = UTIL.getConnection().getTable(desc.getTableName());
 
     Put p1 = new Put(ROW1);
-    p1.add(FAMILYA, QUAL, now, QUAL);
-    p1.add(FAMILYA, QUAL, now + 1, QUAL);
-    p1.add(FAMILYA, QUAL, now + 2, QUAL);
-    p1.add(FAMILYA, QUAL, now + 3, QUAL);
-    p1.add(FAMILYA, QUAL, now + 4, QUAL);
+    p1.addColumn(FAMILYA, QUAL, now, QUAL);
+    p1.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+    p1.addColumn(FAMILYA, QUAL, now + 2, QUAL);
+    p1.addColumn(FAMILYA, QUAL, now + 3, QUAL);
+    p1.addColumn(FAMILYA, QUAL, now + 4, QUAL);
 
     // Having another row would actually test the filter.
     Put p2 = new Put(ROW2);
-    p2.add(FAMILYA, QUAL, now, QUAL);
+    p2.addColumn(FAMILYA, QUAL, now, QUAL);
 
     exportTable.put(Arrays.asList(p1, p2));
 
@@ -639,15 +638,15 @@ public class TestImportExport {
 
     // Insert some data
     Put put = new Put(ROW1);
-    put.add(FAMILYA, QUAL, now, QUAL);
-    put.add(FAMILYA, QUAL, now + 1, QUAL);
-    put.add(FAMILYA, QUAL, now + 2, QUAL);
+    put.addColumn(FAMILYA, QUAL, now, QUAL);
+    put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+    put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
     exportTable.put(put);
 
     put = new Put(ROW2);
-    put.add(FAMILYA, QUAL, now, QUAL);
-    put.add(FAMILYA, QUAL, now + 1, QUAL);
-    put.add(FAMILYA, QUAL, now + 2, QUAL);
+    put.addColumn(FAMILYA, QUAL, now, QUAL);
+    put.addColumn(FAMILYA, QUAL, now + 1, QUAL);
+    put.addColumn(FAMILYA, QUAL, now + 2, QUAL);
     exportTable.put(put);
 
     // Run the export

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
index 03052fb..5110ef7 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestMultithreadedTableMapper.java
@@ -34,7 +34,6 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
-import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Result;
@@ -116,7 +115,7 @@ public class TestMultithreadedTableMapper {
       newValue.reverse();
       // Now set the value to be collected
       Put outval = new Put(key.get());
-      outval.add(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
+      outval.addColumn(OUTPUT_FAMILY, null, 
Bytes.toBytes(newValue.toString()));
       context.write(key, outval);
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
index 592feee..1bd2437 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestRowCounter.java
@@ -155,13 +155,13 @@ public class TestRowCounter {
     // clean up content of TABLE_NAME
     Table table = TEST_UTIL.deleteTableData(TableName.valueOf(TABLE_NAME));
     ts = System.currentTimeMillis();
-    put1.add(family, col1, ts, Bytes.toBytes("val1"));
+    put1.addColumn(family, col1, ts, Bytes.toBytes("val1"));
     table.put(put1);
     Thread.sleep(100);
 
     ts = System.currentTimeMillis();
-    put2.add(family, col1, ts, Bytes.toBytes("val2"));
-    put3.add(family, col1, ts, Bytes.toBytes("val3"));
+    put2.addColumn(family, col1, ts, Bytes.toBytes("val2"));
+    put3.addColumn(family, col1, ts, Bytes.toBytes("val3"));
     table.put(put2);
     table.put(put3);
     table.close();
@@ -227,9 +227,9 @@ public class TestRowCounter {
     for (; i < TOTAL_ROWS - ROWS_WITH_ONE_COL; i++) {
       byte[] row = Bytes.toBytes("row" + i);
       Put put = new Put(row);
-      put.add(family, col1, value);
-      put.add(family, col2, value);
-      put.add(family, col3, value);
+      put.addColumn(family, col1, value);
+      put.addColumn(family, col2, value);
+      put.addColumn(family, col3, value);
       rowsUpdate.add(put);
     }
 
@@ -237,7 +237,7 @@ public class TestRowCounter {
     for (; i < TOTAL_ROWS; i++) {
       byte[] row = Bytes.toBytes("row" + i);
       Put put = new Put(row);
-      put.add(family, col2, value);
+      put.addColumn(family, col2, value);
       rowsUpdate.add(put);
     }
     table.put(rowsUpdate);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
index b0a4243..4693519 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java
@@ -56,7 +56,6 @@ import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapreduce.InputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.JobContext;
-import org.apache.hadoop.mapreduce.Mapper.Context;
 import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -120,12 +119,12 @@ public class TestTableInputFormat {
     Table table = UTIL.createTable(TableName.valueOf(tableName), families);
     Put p = new Put("aaa".getBytes());
     for (byte[] family : families) {
-      p.add(family, null, "value aaa".getBytes());
+      p.addColumn(family, null, "value aaa".getBytes());
     }
     table.put(p);
     p = new Put("bbb".getBytes());
     for (byte[] family : families) {
-      p.add(family, null, "value bbb".getBytes());
+      p.addColumn(family, null, "value bbb".getBytes());
     }
     table.put(p);
     return table;

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
index 5ece857..a52eea6 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduce.java
@@ -85,7 +85,7 @@ public class TestTableMapReduce extends 
TestTableMapReduceBase {
       newValue.reverse();
       // Now set the value to be collected
       Put outval = new Put(key.get());
-      outval.add(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
+      outval.addColumn(OUTPUT_FAMILY, null, 
Bytes.toBytes(newValue.toString()));
       context.write(key, outval);
     }
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
index 729b085..398c248 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableMapReduceBase.java
@@ -130,7 +130,7 @@ public abstract class TestTableMapReduceBase {
     // Now set the value to be collected
 
     Put outval = new Put(key.get());
-    outval.add(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
+    outval.addColumn(OUTPUT_FAMILY, null, Bytes.toBytes(newValue.toString()));
     return outval;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 7e75830..30ae944 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -121,7 +121,7 @@ public class TestTimeRangeMapRed {
       for (Long ts : tsList) {
         Put put = new Put(key.get());
         put.setDurability(Durability.SKIP_WAL);
-        put.add(FAMILY_NAME, COLUMN_NAME, ts, Bytes.toBytes(true));
+        put.addColumn(FAMILY_NAME, COLUMN_NAME, ts, Bytes.toBytes(true));
         puts.add(put);
       }
       table.put(puts);
@@ -156,7 +156,7 @@ public class TestTimeRangeMapRed {
     for (Map.Entry<Long, Boolean> entry : TIMESTAMP.entrySet()) {
       Put put = new Put(KEY);
       put.setDurability(Durability.SKIP_WAL);
-      put.add(FAMILY_NAME, COLUMN_NAME, entry.getKey(), Bytes.toBytes(false));
+      put.addColumn(FAMILY_NAME, COLUMN_NAME, entry.getKey(), 
Bytes.toBytes(false));
       puts.add(put);
     }
     Table table = UTIL.getConnection().getTable(desc.getTableName());

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
index 60b050f..a9841db 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestWALPlayer.java
@@ -97,8 +97,8 @@ public class TestWALPlayer {
 
     // put a row into the first table
     Put p = new Put(ROW);
-    p.add(FAMILY, COLUMN1, COLUMN1);
-    p.add(FAMILY, COLUMN2, COLUMN2);
+    p.addColumn(FAMILY, COLUMN1, COLUMN1);
+    p.addColumn(FAMILY, COLUMN2, COLUMN2);
     t1.put(p);
     // delete one column
     Delete d = new Delete(ROW);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
index abeab3f..5b3abea 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestAssignmentListener.java
@@ -208,7 +208,7 @@ public class TestAssignmentListener {
         for (int i = 0; i < 10; ++i) {
           byte[] key = Bytes.toBytes("row-" + i);
           Put put = new Put(key);
-          put.add(FAMILY, null, key);
+          put.addColumn(FAMILY, null, key);
           table.put(put);
         }
       } finally {

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
index 579e28a..a9fa039 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestGetLastFlushedSequenceId.java
@@ -73,7 +73,8 @@ public class TestGetLastFlushedSequenceId {
     testUtil.getHBaseAdmin().createNamespace(
       NamespaceDescriptor.create(tableName.getNamespaceAsString()).build());
     Table table = testUtil.createTable(tableName, families);
-    table.put(new Put(Bytes.toBytes("k")).add(family, Bytes.toBytes("q"), 
Bytes.toBytes("v")));
+    table.put(new Put(Bytes.toBytes("k"))
+            .addColumn(family, Bytes.toBytes("q"), Bytes.toBytes("v")));
     MiniHBaseCluster cluster = testUtil.getMiniHBaseCluster();
     List<JVMClusterUtil.RegionServerThread> rsts = 
cluster.getRegionServerThreads();
     Region region = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
index 7cea0df..4e6460b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterTransitions.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Durability;
-import org.apache.hadoop.hbase.client.HTable;
 import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.RegionLocator;
 import org.apache.hadoop.hbase.client.Result;
@@ -504,7 +503,7 @@ public class TestMasterTransitions {
       byte [] row = getStartKey(hri);
       Put p = new Put(row);
       p.setDurability(Durability.SKIP_WAL);
-      p.add(getTestFamily(), getTestQualifier(), row);
+      p.addColumn(getTestFamily(), getTestQualifier(), row);
       t.put(p);
       rows++;
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
index ed5d919..53ee92b 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestWarmupRegion.java
@@ -93,7 +93,7 @@ public class TestWarmupRegion {
     for (int i = 0; i < numRows; i++) {
       long ts = System.currentTimeMillis() * 2;
       Put put = new Put(ROW, ts);
-      put.add(FAMILY, COLUMN, VALUE);
+      put.addColumn(FAMILY, COLUMN, VALUE);
       table.put(put);
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
index 2cf26c0..d24023d 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/normalizer/TestSimpleRegionNormalizerOnCluster.java
@@ -210,7 +210,7 @@ public class TestSimpleRegionNormalizerOnCluster {
         Put put = new Put(key);
         byte[] col = Bytes.toBytes(String.valueOf(j));
         byte[] value = dataGenerator.generateRandomSizeValue(key, col);
-        put.add(FAMILYNAME, col, value);
+        put.addColumn(FAMILYNAME, col, value);
         region.put(put);
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
index b3f29db..45b62cd 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/MasterProcedureTestingUtility.java
@@ -420,7 +420,7 @@ public class MasterProcedureTestingUtility {
     Put put = new Put(key);
     put.setDurability(Durability.SKIP_WAL);
     for (byte[] family: families) {
-      put.add(family, q, value);
+      put.addColumn(family, q, value);
     }
     return put;
   }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
index 1c2f161..0c06588 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaThrottle.java
@@ -508,7 +508,7 @@ public class TestQuotaThrottle {
     try {
       while (count < maxOps) {
         Put put = new Put(Bytes.toBytes("row-" + count));
-        put.add(FAMILY, QUALIFIER, Bytes.toBytes("data-" + count));
+        put.addColumn(FAMILY, QUALIFIER, Bytes.toBytes("data-" + count));
         for (final Table table: tables) {
           table.put(put);
         }

Reply via email to