Repository: hbase
Updated Branches:
  refs/heads/master 452e38ff8 -> 094d65e6f


http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
index c61bd78..a467071 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedUpdater.java
@@ -208,7 +208,7 @@ public class MultiThreadedUpdater extends 
MultiThreadedWriterBase {
                   switch (mt) {
                   case PUT:
                     Put put = new Put(rowKey);
-                    put.add(cf, column, hashCodeBytes);
+                    put.addColumn(cf, column, hashCodeBytes);
                     mutate(table, put, rowKeyBase, rowKey, cf, column, 
checkedValue);
                     buf.append(MutationType.PUT.getNumber());
                     break;

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
index 4d1c286..83e207a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/MultiThreadedWriter.java
@@ -105,7 +105,7 @@ public class MultiThreadedWriter extends 
MultiThreadedWriterBase {
             byte[][] columns = dataGenerator.generateColumnsForCf(rowKey, cf);
             for (byte[] column : columns) {
               byte[] value = dataGenerator.generateValue(rowKey, cf, column);
-              put.add(cf, column, value);
+              put.addColumn(cf, column, value);
               ++columnCount;
               if (!isMultiPut) {
                 insert(table, put, rowKeyBase);
@@ -114,8 +114,8 @@ public class MultiThreadedWriter extends 
MultiThreadedWriterBase {
               }
             }
             long rowKeyHash = Arrays.hashCode(rowKey);
-            put.add(cf, MUTATE_INFO, HConstants.EMPTY_BYTE_ARRAY);
-            put.add(cf, INCREMENT, Bytes.toBytes(rowKeyHash));
+            put.addColumn(cf, MUTATE_INFO, HConstants.EMPTY_BYTE_ARRAY);
+            put.addColumn(cf, INCREMENT, Bytes.toBytes(rowKeyHash));
             if (!isMultiPut) {
               insert(table, put, rowKeyBase);
               numCols.addAndGet(1);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
index 751130f..c988761 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorScanPolicy.java
@@ -110,17 +110,17 @@ public class TestCoprocessorScanPolicy {
     // set the version override to 2
     Put p = new Put(R);
     p.setAttribute("versions", new byte[]{});
-    p.add(F, tableName.getName(), Bytes.toBytes(2));
+    p.addColumn(F, tableName.getName(), Bytes.toBytes(2));
     t.put(p);
 
     long now = EnvironmentEdgeManager.currentTime();
 
     // insert 2 versions
     p = new Put(R);
-    p.add(F, Q, now, Q);
+    p.addColumn(F, Q, now, Q);
     t.put(p);
     p = new Put(R);
-    p.add(F, Q, now+1, Q);
+    p.addColumn(F, Q, now + 1, Q);
     t.put(p);
     Get g = new Get(R);
     g.setMaxVersions(10);
@@ -138,7 +138,7 @@ public class TestCoprocessorScanPolicy {
 
     // insert a 3rd version
     p = new Put(R);
-    p.add(F, Q, now+2, Q);
+    p.addColumn(F, Q, now + 2, Q);
     t.put(p);
     g = new Get(R);
     g.setMaxVersions(10);
@@ -172,14 +172,14 @@ public class TestCoprocessorScanPolicy {
     // Set the TTL override to 3s
     Put p = new Put(R);
     p.setAttribute("ttl", new byte[]{});
-    p.add(F, tableName.getName(), Bytes.toBytes(3000L));
+    p.addColumn(F, tableName.getName(), Bytes.toBytes(3000L));
     t.put(p);
 
     p = new Put(R);
-    p.add(F, Q, ts, Q);
+    p.addColumn(F, Q, ts, Q);
     t.put(p);
     p = new Put(R);
-    p.add(F, Q, ts+1, Q);
+    p.addColumn(F, Q, ts + 1, Q);
     t.put(p);
 
     // these two should be expired but for the override

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
index 5c08f1f..0ed8711 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckEncryption.java
@@ -109,8 +109,8 @@ public class TestHBaseFsckEncryption {
       for (int i = 0; i < values.length; i++) {
         for (int j = 0; j < values.length; j++) {
           Put put = new Put(new byte[] { values[i], values[j] });
-          put.add(Bytes.toBytes("cf"), new byte[] {}, new byte[] { values[i],
-            values[j] });
+          put.addColumn(Bytes.toBytes("cf"), new byte[]{}, new 
byte[]{values[i],
+                  values[j]});
           table.put(put);
         }
       }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
index df3c69c..c1c49e2 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckOneRS.java
@@ -1363,10 +1363,12 @@ public class TestHBaseFsckOneRS extends 
BaseTestHBaseFsck {
       meta.delete(deletes);
 
       // Mess it up by creating a fake hbase:meta entry with no associated 
RegionInfo
-      meta.put(new Put(Bytes.toBytes(table + 
",,1361911384013.810e28f59a57da91c66")).add(
-          HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER, 
Bytes.toBytes("node1:60020")));
-      meta.put(new Put(Bytes.toBytes(table + 
",,1361911384013.810e28f59a57da91c66")).add(
-          HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER, 
Bytes.toBytes(1362150791183L)));
+      meta.put(new Put(Bytes.toBytes(table + 
",,1361911384013.810e28f59a57da91c66"))
+          .addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
+              Bytes.toBytes("node1:60020")));
+      meta.put(new Put(Bytes.toBytes(table + 
",,1361911384013.810e28f59a57da91c66"))
+          .addColumn(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER,
+              Bytes.toBytes(1362150791183L)));
       meta.close();
 
       HBaseFsck hbck = doFsck(conf, false);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
index 4eb1dd8..b3bd355 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHBaseFsckTwoRS.java
@@ -363,7 +363,7 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
       assertNotNull(serverName);
       try (Table meta = connection.getTable(TableName.META_TABLE_NAME, 
tableExecutorService)) {
         Put put = new Put(regionName);
-        put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
+        put.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
             Bytes.toBytes(serverName.getHostAndPort()));
         meta.put(put);
       }
@@ -432,9 +432,9 @@ public class TestHBaseFsckTwoRS extends BaseTestHBaseFsck {
           startCode != sn.getStartcode()) {
         Put put = new Put(res.getRow());
         put.setDurability(Durability.SKIP_WAL);
-        put.add(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
+        put.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER,
             Bytes.toBytes(sn.getHostAndPort()));
-        put.add(HConstants.CATALOG_FAMILY, HConstants.STARTCODE_QUALIFIER,
+        put.addColumn(HConstants.CATALOG_FAMILY, 
HConstants.STARTCODE_QUALIFIER,
             Bytes.toBytes(sn.getStartcode()));
         meta.put(put);
         hri = MetaTableAccessor.getHRegionInfo(res);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
index 480ae91..661af14 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTable.java
@@ -146,7 +146,7 @@ public class TestMergeTable {
     for(int i = firstRow; i < firstRow + nrows; i++) {
       Put put = new Put(Bytes.toBytes("row_" + String.format("%1$05d", i)));
       put.setDurability(Durability.SKIP_WAL);
-      put.add(COLUMN_NAME, null,  VALUE);
+      put.addColumn(COLUMN_NAME, null, VALUE);
       region.put(put);
       if (i % 10000 == 0) {
         LOG.info("Flushing write #" + i);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
index a1b2aa1..cb51fb2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestMergeTool.java
@@ -164,7 +164,7 @@ public class TestMergeTool extends HBaseTestCase {
         for (int j = 0; j < rows[i].length; j++) {
           byte [] row = rows[i][j];
           Put put = new Put(row);
-          put.add(FAMILY, QUALIFIER, row);
+          put.addColumn(FAMILY, QUALIFIER, row);
           regions[i].put(put);
         }
         HRegion.addRegionToMETA(meta, regions[i]);

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
index 973965c..ecda77f 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/hbck/OfflineMetaRebuildTestCore.java
@@ -150,8 +150,8 @@ public class OfflineMetaRebuildTestCore {
     for (int i = 0; i < values.length; i++) {
       for (int j = 0; j < values.length; j++) {
         Put put = new Put(new byte[] { values[i], values[j] });
-        put.add(Bytes.toBytes("fam"), new byte[] {}, new byte[] { values[i],
-            values[j] });
+        put.addColumn(Bytes.toBytes("fam"), new byte[]{}, new byte[]{values[i],
+                values[j]});
         puts.add(put);
       }
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
index 8833eda..bde3e49 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestWALFiltering.java
@@ -89,7 +89,7 @@ public class TestWALFiltering {
             final byte[] value = Bytes.toBytes("value_for_row_" + iRow +
                 "_cf_" + Bytes.toStringBinary(cf) + "_col_" + iCol + "_ts_" +
                 ts + "_random_" + rand.nextLong());
-            put.add(cf, qual, ts, value);
+            put.addColumn(cf, qual, ts, value);
           } else if (rand.nextDouble() < 0.8) {
             del.addColumn(cf, qual, ts);
           } else {

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
index 3af853b..7996c17 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/WALPerformanceEvaluation.java
@@ -545,7 +545,8 @@ public final class WALPerformanceEvaluation extends 
Configured implements Tool {
     for (int cf = 0; cf < numFamilies; ++cf) {
       for (int q = 0; q < numQualifiers; ++q) {
         rand.nextBytes(value);
-        put.add(Bytes.toBytes(FAMILY_PREFIX + cf), 
Bytes.toBytes(QUALIFIER_PREFIX + q), value);
+        put.addColumn(Bytes.toBytes(FAMILY_PREFIX + cf),
+            Bytes.toBytes(QUALIFIER_PREFIX + q), value);
       }
     }
     return put;

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-shell/src/main/ruby/hbase/admin.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/hbase/admin.rb 
b/hbase-shell/src/main/ruby/hbase/admin.rb
index 2f91ba7..e680e9c 100644
--- a/hbase-shell/src/main/ruby/hbase/admin.rb
+++ b/hbase-shell/src/main/ruby/hbase/admin.rb
@@ -853,7 +853,9 @@ module Hbase
 
       # Write it back
       put = org.apache.hadoop.hbase.client.Put.new(region_bytes)
-      put.add(org.apache.hadoop.hbase.HConstants::CATALOG_FAMILY, 
org.apache.hadoop.hbase.HConstants::REGIONINFO_QUALIFIER, 
org.apache.hadoop.hbase.util.Writables.getBytes(hri))
+      put.addColumn(org.apache.hadoop.hbase.HConstants::CATALOG_FAMILY,
+        org.apache.hadoop.hbase.HConstants::REGIONINFO_QUALIFIER,
+        org.apache.hadoop.hbase.util.Writables.getBytes(hri))
       meta.put(put)
     end
     # Apply user metadata to table/column descriptor

http://git-wip-us.apache.org/repos/asf/hbase/blob/de9555ce/hbase-shell/src/main/ruby/hbase/table.rb
----------------------------------------------------------------------
diff --git a/hbase-shell/src/main/ruby/hbase/table.rb 
b/hbase-shell/src/main/ruby/hbase/table.rb
index 9f8b673..153f07e 100644
--- a/hbase-shell/src/main/ruby/hbase/table.rb
+++ b/hbase-shell/src/main/ruby/hbase/table.rb
@@ -152,9 +152,9 @@ EOF
         timestamp = nil
       end
       if timestamp
-        p.add(family, qualifier, timestamp, value.to_s.to_java_bytes)
+        p.addColumn(family, qualifier, timestamp, value.to_s.to_java_bytes)
       else
-        p.add(family, qualifier, value.to_s.to_java_bytes)
+        p.addColumn(family, qualifier, value.to_s.to_java_bytes)
       end
       @table.put(p)
     end

Reply via email to