HBASE-12802 Remove unnecessary Table.flushCommits() (Solomon Duskis)

Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/0311cc86
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/0311cc86
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/0311cc86

Branch: refs/heads/branch-1
Commit: 0311cc860c91a11b4ad63eca06c4feb3f939f874
Parents: 071c302
Author: tedyu <yuzhih...@gmail.com>
Authored: Mon Jan 5 15:55:10 2015 -0800
Committer: tedyu <yuzhih...@gmail.com>
Committed: Mon Jan 5 15:55:10 2015 -0800

----------------------------------------------------------------------
 .../hadoop/hbase/mttr/IntegrationTestMTTR.java  |   1 -
 .../test/IntegrationTestBigLinkedList.java      |   2 -
 .../hadoop/hbase/rest/TestScannerResource.java  |  35 +-
 .../hbase/rest/TestScannersWithLabels.java      |  15 +-
 .../org/apache/hadoop/hbase/util/HBaseFsck.java |   2 -
 .../hadoop/hbase/HBaseTestingUtility.java       |  20 +-
 .../hadoop/hbase/client/TestFastFail.java       |  29 +-
 .../hadoop/hbase/client/TestFromClientSide.java |   6 -
 .../hbase/client/TestFromClientSide3.java       |   1 -
 .../client/TestSnapshotCloneIndependence.java   |  18 +-
 .../coprocessor/TestOpenTableInCoprocessor.java |   2 -
 .../TestRegionObserverInterface.java            |   6 +-
 .../TestRegionObserverScannerOpenHook.java      |   2 -
 .../hbase/io/encoding/TestChangingEncoding.java |  13 +-
 .../hbase/io/encoding/TestPrefixTree.java       |   5 -
 .../hbase/mapreduce/TestImportExport.java       |  23 +-
 .../hbase/mapreduce/TestTimeRangeMapRed.java    |  21 +-
 .../hbase/regionserver/TestCompactionState.java |   1 -
 .../TestEndToEndSplitTransaction.java           |   7 +-
 .../regionserver/TestRegionServerMetrics.java   |  12 -
 .../replication/TestReplicationSmallTests.java  |  20 +-
 .../visibility/TestVisibilityLabels.java        |  25 +-
 .../TestVisibilityLabelsWithDeletes.java        | 783 ++++++++-----------
 .../apache/hadoop/hbase/util/TestHBaseFsck.java |   8 +-
 .../util/hbck/OfflineMetaRebuildTestCore.java   |   8 +-
 .../hadoop/hbase/wal/TestWALFiltering.java      |   5 +-
 26 files changed, 427 insertions(+), 643 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
----------------------------------------------------------------------
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
index f64528b..63430a1 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/mttr/IntegrationTestMTTR.java
@@ -486,7 +486,6 @@ public class IntegrationTestMTTR {
       Put p = new Put(Bytes.toBytes(RandomStringUtils.randomAlphanumeric(5)));
       p.add(FAMILY, Bytes.toBytes("\0"), 
Bytes.toBytes(RandomStringUtils.randomAscii(5)));
       table.put(p);
-      table.flushCommits();
       return true;
     }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
----------------------------------------------------------------------
diff --git 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
index f116a66..0a62966 100644
--- 
a/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
+++ 
b/hbase-it/src/test/java/org/apache/hadoop/hbase/test/IntegrationTestBigLinkedList.java
@@ -927,9 +927,7 @@ public class IntegrationTestBigLinkedList extends 
IntegrationTestBase {
         = new org.apache.hadoop.hbase.client.Delete(val);
 
       Table table = new HTable(getConf(), getTableName(getConf()));
-
       table.delete(delete);
-      table.flushCommits();
       table.close();
 
       System.out.println("Delete successful");

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
index f5c83ab..d2975da 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
@@ -19,10 +19,17 @@
 
 package org.apache.hadoop.hbase.rest;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
+import java.util.ArrayList;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Random;
 
 import javax.xml.bind.JAXBContext;
@@ -32,11 +39,16 @@ import javax.xml.bind.Unmarshaller;
 
 import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.*;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.HTable;
-import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Durability;
+import org.apache.hadoop.hbase.client.Put;
 import org.apache.hadoop.hbase.client.Table;
 import org.apache.hadoop.hbase.rest.client.Client;
 import org.apache.hadoop.hbase.rest.client.Cluster;
@@ -47,9 +59,6 @@ import org.apache.hadoop.hbase.rest.model.RowModel;
 import org.apache.hadoop.hbase.rest.model.ScannerModel;
 import org.apache.hadoop.hbase.testclassification.MediumTests;
 import org.apache.hadoop.hbase.util.Bytes;
-
-import static org.junit.Assert.*;
-
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -78,10 +87,9 @@ public class TestScannerResource {
   static int insertData(Configuration conf, TableName tableName, String 
column, double prob)
       throws IOException {
     Random rng = new Random();
-    int count = 0;
-    Table table = new HTable(conf, tableName);
     byte[] k = new byte[3];
     byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column));
+    List<Put> puts = new ArrayList<>();
     for (byte b1 = 'a'; b1 < 'z'; b1++) {
       for (byte b2 = 'a'; b2 < 'z'; b2++) {
         for (byte b3 = 'a'; b3 < 'z'; b3++) {
@@ -92,15 +100,16 @@ public class TestScannerResource {
             Put put = new Put(k);
             put.setDurability(Durability.SKIP_WAL);
             put.add(famAndQf[0], famAndQf[1], k);
-            table.put(put);
-            count++;
+            puts.add(put);
           }
         }
       }
     }
-    table.flushCommits();
-    table.close();
-    return count;
+    try (Connection conn = ConnectionFactory.createConnection(conf);
+        Table table = conn.getTable(tableName)) {
+      table.put(puts);
+    }
+    return puts.size();
   }
 
   static int countCellSet(CellSetModel model) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
index 3620efd..1be14f2 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannersWithLabels.java
@@ -23,7 +23,9 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
 import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
 import java.util.Iterator;
+import java.util.List;
 import java.util.Random;
 
 import javax.xml.bind.JAXBContext;
@@ -88,23 +90,22 @@ public class TestScannersWithLabels {
   private static Configuration conf;
 
   private static int insertData(TableName tableName, String column, double 
prob) throws IOException {
-    Random rng = new Random();
-    int count = 0;
-    Table table = new HTable(TEST_UTIL.getConfiguration(), tableName);
     byte[] k = new byte[3];
     byte[][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(column));
 
+    List<Put> puts = new ArrayList<>();
     for (int i = 0; i < 9; i++) {
       Put put = new Put(Bytes.toBytes("row" + i));
       put.setDurability(Durability.SKIP_WAL);
       put.add(famAndQf[0], famAndQf[1], k);
       put.setCellVisibility(new CellVisibility("(" + SECRET + "|" + 
CONFIDENTIAL + ")" + "&" + "!"
           + TOPSECRET));
-      table.put(put);
-      count++;
+      puts.add(put);
     }
-    table.flushCommits();
-    return count;
+    try (Table table = new HTable(TEST_UTIL.getConfiguration(), tableName)) {
+      table.put(puts);
+    }
+    return puts.size();
   }
 
   private static int countCellSet(CellSetModel model) {

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
index aad8956..1b22946 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
@@ -1730,7 +1730,6 @@ public class HBaseFsck extends Configured implements 
Closeable {
   private void deleteMetaRegion(byte[] metaKey) throws IOException {
     Delete d = new Delete(metaKey);
     meta.delete(d);
-    meta.flushCommits();
     LOG.info("Deleted " + Bytes.toString(metaKey) + " from META" );
   }
 
@@ -1751,7 +1750,6 @@ public class HBaseFsck extends Configured implements 
Closeable {
     mutations.add(p);
 
     meta.mutateRow(mutations);
-    meta.flushCommits();
     LOG.info("Reset split parent " + hi.metaEntry.getRegionNameAsString() + " 
in META" );
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
index 711d0d5..ee67629 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java
@@ -1816,7 +1816,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @return Count of rows loaded.
    * @throws IOException
    */
-  public int loadTable(final HTable t, final byte[] f) throws IOException {
+  public int loadTable(final Table t, final byte[] f) throws IOException {
     return loadTable(t, new byte[][] {f});
   }
 
@@ -1827,7 +1827,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @return Count of rows loaded.
    * @throws IOException
    */
-  public int loadTable(final HTable t, final byte[] f, boolean writeToWAL) 
throws IOException {
+  public int loadTable(final Table t, final byte[] f, boolean writeToWAL) 
throws IOException {
     return loadTable(t, new byte[][] {f}, null, writeToWAL);
   }
 
@@ -1838,7 +1838,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @return Count of rows loaded.
    * @throws IOException
    */
-  public int loadTable(final HTable t, final byte[][] f) throws IOException {
+  public int loadTable(final Table t, final byte[][] f) throws IOException {
     return loadTable(t, f, null);
   }
 
@@ -1850,7 +1850,7 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @return Count of rows loaded.
    * @throws IOException
    */
-  public int loadTable(final HTable t, final byte[][] f, byte[] value) throws 
IOException {
+  public int loadTable(final Table t, final byte[][] f, byte[] value) throws 
IOException {
     return loadTable(t, f, value, true);
   }
 
@@ -1862,20 +1862,18 @@ public class HBaseTestingUtility extends 
HBaseCommonTestingUtility {
    * @return Count of rows loaded.
    * @throws IOException
    */
-  public int loadTable(final HTable t, final byte[][] f, byte[] value, boolean 
writeToWAL) throws IOException {
-    t.setAutoFlushTo(false);
-    int rowCount = 0;
+  public int loadTable(final Table t, final byte[][] f, byte[] value, boolean 
writeToWAL) throws IOException {
+    List<Put> puts = new ArrayList<>();
     for (byte[] row : HBaseTestingUtility.ROWS) {
       Put put = new Put(row);
       put.setDurability(writeToWAL ? Durability.USE_DEFAULT : 
Durability.SKIP_WAL);
       for (int i = 0; i < f.length; i++) {
         put.add(f[i], null, value != null ? value : row);
       }
-      t.put(put);
-      rowCount++;
+      puts.add(put);
     }
-    t.flushCommits();
-    return rowCount;
+    t.put(puts);
+    return puts.size();
   }
 
   /** A tracker for tracking and validating table rows

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
index 2bc72b9..75334d3 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFastFail.java
@@ -17,7 +17,8 @@
  */
 package org.apache.hadoop.hbase.client;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
 import java.util.ArrayList;
@@ -118,8 +119,17 @@ public class TestFastFail {
     /**
      * Write numRows worth of data, so that the workers can arbitrarily read.
      */
-    try (Table table = connection.getTable(TableName.valueOf(tableName));) {
-      writeData(table, numRows);
+    List<Put> puts = new ArrayList<>();
+    for (long i = 0; i < numRows; i++) {
+      byte[] rowKey = longToByteArrayKey(i);
+      Put put = new Put(rowKey);
+      byte[] value = rowKey; // value is the same as the row key
+      put.add(FAMILY, QUALIFIER, value);
+      puts.add(put);
+    }
+    try (Table table = connection.getTable(TableName.valueOf(tableName))) {
+      table.put(puts);
+      LOG.info("Written all puts.");
     }
 
     /**
@@ -296,17 +306,4 @@ public class TestFastFail {
   private byte[] longToByteArrayKey(long rowKey) {
     return LoadTestKVGenerator.md5PrefixedKey(rowKey).getBytes();
   }
-
-  public void writeData(Table table, long numRows) throws IOException,
-      InterruptedException {
-    table.flushCommits();
-    for (long i = 0; i < numRows; i++) {
-      byte[] rowKey = longToByteArrayKey(i);
-      Put put = new Put(rowKey);
-      byte[] value = rowKey; // value is the same as the row key
-      put.add(FAMILY, QUALIFIER, value);
-      table.put(put);
-    }
-    LOG.info("Written all puts.");
-  }
 }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
index 8511f88..bc805fe 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java
@@ -5506,22 +5506,17 @@ public class TestFromClientSide {
     Put p = new Put(row);
     p.add(FAMILY, QUALIFIER, 10, VALUE);
     table.put(p);
-    table.flushCommits();
-
     p = new Put(row);
     p.add(FAMILY, QUALIFIER, 11, ArrayUtils.add(VALUE, (byte) 2));
     table.put(p);
-    table.flushCommits();
 
     p = new Put(row);
     p.add(FAMILY, QUALIFIER, 12, ArrayUtils.add(VALUE, (byte) 3));
     table.put(p);
-    table.flushCommits();
 
     p = new Put(row);
     p.add(FAMILY, QUALIFIER, 13, ArrayUtils.add(VALUE, (byte) 4));
     table.put(p);
-    table.flushCommits();
 
     int versions = 4;
     Scan s = new Scan(row);
@@ -5642,7 +5637,6 @@ public class TestFromClientSide {
     put = new Put(Bytes.toBytes("0-b22222-0000000000000000009"));
     put.add(FAMILY, QUALIFIER, VALUE);
     ht.put(put);
-    ht.flushCommits();
     Scan scan = new Scan(Bytes.toBytes("0-b11111-9223372036854775807"),
         Bytes.toBytes("0-b11111-0000000000000000000"));
     scan.setReversed(true);

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
index 75cfb3a..39379e4 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide3.java
@@ -412,7 +412,6 @@ public class TestFromClientSide3 {
     Put put = new Put(ROW_BYTES);
     put.add(FAMILY, COL_QUAL, VAL_BYTES);
     table.put(put);
-    table.flushCommits();
 
     //Try getting the row with an empty row key
     Result res = null;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
index d57654f..9efe0b1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestSnapshotCloneIndependence.java
@@ -172,7 +172,7 @@ public class TestSnapshotCloneIndependence {
         // Restore the interrupted status
         Thread.currentThread().interrupt();
       }
-      if (t.getRegionLocations().size() > originalCount) {
+      if (t.getAllRegionLocations().size() > originalCount) {
         return;
       }
     }
@@ -193,9 +193,7 @@ public class TestSnapshotCloneIndependence {
     final TableName localTableName =
         TableName.valueOf(STRING_TABLE_NAME + startTime);
 
-    HTable original = UTIL.createTable(localTableName, TEST_FAM);
-    try {
-
+    try (Table original = UTIL.createTable(localTableName, TEST_FAM)) {
       UTIL.loadTable(original, TEST_FAM);
       final int origTableRowCount = UTIL.countRows(original);
 
@@ -212,9 +210,7 @@ public class TestSnapshotCloneIndependence {
       TableName cloneTableName = TableName.valueOf("test-clone-" + 
localTableName);
       admin.cloneSnapshot(snapshotName, cloneTableName);
 
-      Table clonedTable = new HTable(UTIL.getConfiguration(), cloneTableName);
-
-      try {
+      try (Table clonedTable = new HTable(UTIL.getConfiguration(), 
cloneTableName)){
         final int clonedTableRowCount = UTIL.countRows(clonedTable);
 
         Assert.assertEquals(
@@ -227,7 +223,6 @@ public class TestSnapshotCloneIndependence {
         Put p = new Put(Bytes.toBytes(rowKey));
         p.add(TEST_FAM, Bytes.toBytes("someQualifier"), 
Bytes.toBytes("someString"));
         original.put(p);
-        original.flushCommits();
 
         // Verify that it is not present in the original table
         Assert.assertEquals("The row count of the original table was not 
modified by the put",
@@ -239,7 +234,6 @@ public class TestSnapshotCloneIndependence {
         p = new Put(Bytes.toBytes(rowKey));
         p.add(TEST_FAM, Bytes.toBytes("someQualifier"), 
Bytes.toBytes("someString"));
         clonedTable.put(p);
-        clonedTable.flushCommits();
 
         // Verify that the new family is not in the restored table's 
description
         Assert.assertEquals(
@@ -247,13 +241,7 @@ public class TestSnapshotCloneIndependence {
           origTableRowCount + 1, UTIL.countRows(original));
         Assert.assertEquals("The row count of the cloned table was not 
modified by the put",
           clonedTableRowCount + 1, UTIL.countRows(clonedTable));
-      } finally {
-
-        clonedTable.close();
       }
-    } finally {
-
-      original.close();
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
index fc60c80..17488a9 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestOpenTableInCoprocessor.java
@@ -70,7 +70,6 @@ public class TestOpenTableInCoprocessor {
         final WALEdit edit, final Durability durability) throws IOException {
       Table table = e.getEnvironment().getTable(otherTable);
       table.put(put);
-      table.flushCommits();
       completed[0] = true;
       table.close();
     }
@@ -165,7 +164,6 @@ public class TestOpenTableInCoprocessor {
     Put p = new Put(new byte[] { 'a' });
     p.add(family, null, new byte[] { 'a' });
     table.put(p);
-    table.flushCommits();
     table.close();
 
     Table target = new HTable(UTIL.getConfiguration(), otherTable);

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
index 388a15f..af0f641 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverInterface.java
@@ -243,12 +243,10 @@ public class TestRegionObserverInterface {
   public void testCheckAndPutHooks() throws IOException {
     TableName tableName =
         TableName.valueOf(TEST_TABLE.getNameAsString() + 
".testCheckAndPutHooks");
-    Table table = util.createTable(tableName, new byte[][] {A, B, C});
-    try {
+    try (Table table = util.createTable(tableName, new byte[][] {A, B, C})) {
       Put p = new Put(Bytes.toBytes(0));
       p.add(A, A, A);
       table.put(p);
-      table.flushCommits();
       p = new Put(Bytes.toBytes(0));
       p.add(A, A, A);
       verifyMethodResult(SimpleRegionObserver.class,
@@ -266,7 +264,6 @@ public class TestRegionObserverInterface {
           );
     } finally {
       util.deleteTable(tableName);
-      table.close();
     }
   }
 
@@ -279,7 +276,6 @@ public class TestRegionObserverInterface {
       Put p = new Put(Bytes.toBytes(0));
       p.add(A, A, A);
       table.put(p);
-      table.flushCommits();
       Delete d = new Delete(Bytes.toBytes(0));
       table.delete(d);
       verifyMethodResult(SimpleRegionObserver.class,

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
index ea1b660..e7860d9 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/coprocessor/TestRegionObserverScannerOpenHook.java
@@ -264,7 +264,6 @@ public class TestRegionObserverScannerOpenHook {
     Put put = new Put(ROW);
     put.add(A, A, A);
     table.put(put);
-    table.flushCommits();
 
     HRegionServer rs = UTIL.getRSForFirstRegionInTable(desc.getTableName());
     List<HRegion> regions = rs.getOnlineRegions(desc.getTableName());
@@ -278,7 +277,6 @@ public class TestRegionObserverScannerOpenHook {
     put = new Put(Bytes.toBytes("anotherrow"));
     put.add(A, A, A);
     table.put(put);
-    table.flushCommits();
     admin.flushRegion(region.getRegionName());
 
     // run a compaction, which normally would should get rid of the data

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
index 0e0c15d..621d3f8 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestChangingEncoding.java
@@ -37,6 +37,8 @@ import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.testclassification.LargeTests;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Durability;
 import org.apache.hadoop.hbase.client.Get;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
@@ -128,8 +130,7 @@ public class TestChangingEncoding {
   static void writeTestDataBatch(Configuration conf, TableName tableName,
       int batchId) throws Exception {
     LOG.debug("Writing test data batch " + batchId);
-    Table table = new HTable(conf, tableName);
-    table.setAutoFlushTo(false);
+    List<Put> puts = new ArrayList<>();
     for (int i = 0; i < NUM_ROWS_PER_BATCH; ++i) {
       Put put = new Put(getRowKey(batchId, i));
       for (int j = 0; j < NUM_COLS_PER_ROW; ++j) {
@@ -137,10 +138,12 @@ public class TestChangingEncoding {
             getValue(batchId, i, j));
       }
       put.setDurability(Durability.SKIP_WAL);
-      table.put(put);
+      puts.add(put);
+    }
+    try (Connection conn = ConnectionFactory.createConnection(conf);
+        Table table = conn.getTable(tableName)) {
+      table.put(puts);
     }
-    table.flushCommits();
-    table.close();
   }
 
   static void verifyTestDataBatch(Configuration conf, TableName tableName,

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
index aca9b78..5245cc1 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestPrefixTree.java
@@ -93,23 +93,18 @@ public class TestPrefixTree {
       Put put = new Put(Bytes.toBytes("a-b-0-0"));
       put.add(fam, qual1, Bytes.toBytes("c1-value"));
       table.put(put);
-      table.flushCommits();
       put = new Put(row1_bytes);
       put.add(fam, qual1, Bytes.toBytes("c1-value"));
       table.put(put);
-      table.flushCommits();
       put = new Put(row2_bytes);
       put.add(fam, qual2, Bytes.toBytes("c2-value"));
       table.put(put);
-      table.flushCommits();
       put = new Put(row3_bytes);
       put.add(fam, qual2, Bytes.toBytes("c2-value-2"));
       table.put(put);
-      table.flushCommits();
       put = new Put(row4_bytes);
       put.add(fam, qual2, Bytes.toBytes("c2-value-3"));
       table.put(put);
-      table.flushCommits();
       hBaseAdmin.flush(tableName);
       String[] rows = new String[3];
       rows[0] = row1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
index 9448d30..ea6f5e0 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java
@@ -31,6 +31,7 @@ import java.io.IOException;
 import java.io.PrintStream;
 import java.net.URL;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
@@ -428,20 +429,18 @@ public class TestImportExport {
     UTIL.getHBaseAdmin().createTable(desc);
     Table exportTable = new HTable(UTIL.getConfiguration(), 
desc.getTableName());
 
-    Put p = new Put(ROW1);
-    p.add(FAMILYA, QUAL, now, QUAL);
-    p.add(FAMILYA, QUAL, now + 1, QUAL);
-    p.add(FAMILYA, QUAL, now + 2, QUAL);
-    p.add(FAMILYA, QUAL, now + 3, QUAL);
-    p.add(FAMILYA, QUAL, now + 4, QUAL);
-    exportTable.put(p);
+    Put p1 = new Put(ROW1);
+    p1.add(FAMILYA, QUAL, now, QUAL);
+    p1.add(FAMILYA, QUAL, now + 1, QUAL);
+    p1.add(FAMILYA, QUAL, now + 2, QUAL);
+    p1.add(FAMILYA, QUAL, now + 3, QUAL);
+    p1.add(FAMILYA, QUAL, now + 4, QUAL);
 
     // Having another row would actually test the filter.
-    p = new Put(ROW2);
-    p.add(FAMILYA, QUAL, now, QUAL);
-    exportTable.put(p);
-    // Flush the commits.
-    exportTable.flushCommits();
+    Put p2 = new Put(ROW2);
+    p2.add(FAMILYA, QUAL, now, QUAL);
+
+    exportTable.put(Arrays.asList(p1, p2));
 
     // Export the simple table
     String[] args = new String[] { EXPORT_TABLE, FQ_OUTPUT_DIR, "1000" };

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
index 9efc77e..cd85756 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTimeRangeMapRed.java
@@ -117,13 +117,14 @@ public class TestTimeRangeMapRed {
         tsList.add(kv.getTimestamp());
       }
 
+      List<Put> puts = new ArrayList<>();
       for (Long ts : tsList) {
         Put put = new Put(key.get());
         put.setDurability(Durability.SKIP_WAL);
         put.add(FAMILY_NAME, COLUMN_NAME, ts, Bytes.toBytes(true));
-        table.put(put);
+        puts.add(put);
       }
-      table.flushCommits();
+      table.put(puts);
     }
 
     @Override
@@ -150,20 +151,18 @@ public class TestTimeRangeMapRed {
     col.setMaxVersions(Integer.MAX_VALUE);
     desc.addFamily(col);
     admin.createTable(desc);
-    Table table = new HTable(UTIL.getConfiguration(), desc.getTableName());
-    prepareTest(table);
-    runTestOnTable();
-    verify(table);
-  }
-
-  private void prepareTest(final Table table) throws IOException {
+    List<Put> puts = new ArrayList<Put>();
     for (Map.Entry<Long, Boolean> entry : TIMESTAMP.entrySet()) {
       Put put = new Put(KEY);
       put.setDurability(Durability.SKIP_WAL);
       put.add(FAMILY_NAME, COLUMN_NAME, entry.getKey(), Bytes.toBytes(false));
-      table.put(put);
+      puts.add(put);
     }
-    table.flushCommits();
+    Table table = new HTable(UTIL.getConfiguration(), desc.getTableName());
+    table.put(puts);
+    runTestOnTable();
+    verify(table);
+    table.close();
   }
 
   private void runTestOnTable()

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
index 6d49864..e4bf0bf 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactionState.java
@@ -226,7 +226,6 @@ public class TestCompactionState {
         puts.add(p);
       }
       ht.put(puts);
-      ht.flushCommits();
       TEST_UTIL.flush();
       puts.clear();
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
index f762dfc..6d2b172 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestEndToEndSplitTransaction.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.NavigableMap;
@@ -301,13 +302,13 @@ public class TestEndToEndSplitTransaction {
     }
 
     void addData(int start) throws IOException {
+      List<Put> puts = new ArrayList<>();
       for (int i=start; i< start + 100; i++) {
         Put put = new Put(Bytes.toBytes(i));
-
         put.add(family, family, Bytes.toBytes(i));
-        table.put(put);
+        puts.add(put);
       }
-      table.flushCommits();
+      table.put(puts);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
index a6e38f5..9ba224a 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRegionServerMetrics.java
@@ -202,7 +202,6 @@ public class TestRegionServerMetrics {
     p.setDurability(Durability.SKIP_WAL);
 
     t.put(p);
-    t.flushCommits();
 
     metricsRegionServer.getRegionServerWrapper().forceRecompute();
     metricsHelper.assertGauge("mutationsWithoutWALCount", 1, serverSource);
@@ -231,7 +230,6 @@ public class TestRegionServerMetrics {
     Put p = new Put(row);
     p.add(cf, qualifier, val);
     t.put(p);
-    t.flushCommits();
     TEST_UTIL.getHBaseAdmin().flush(tableName);
 
     metricsRegionServer.getRegionServerWrapper().forceRecompute();
@@ -257,18 +255,14 @@ public class TestRegionServerMetrics {
     Put p = new Put(row);
     p.add(cf, qualifier, valOne);
     t.put(p);
-    t.flushCommits();
 
     Put pTwo = new Put(row);
     pTwo.add(cf, qualifier, valTwo);
     t.checkAndPut(row, cf, qualifier, valOne, pTwo);
-    t.flushCommits();
 
     Put pThree = new Put(row);
     pThree.add(cf, qualifier, valThree);
     t.checkAndPut(row, cf, qualifier, valOne, pThree);
-    t.flushCommits();
-
 
     metricsRegionServer.getRegionServerWrapper().forceRecompute();
     metricsHelper.assertCounter("checkMutateFailedCount", 1, serverSource);
@@ -293,7 +287,6 @@ public class TestRegionServerMetrics {
     Put p = new Put(row);
     p.add(cf, qualifier, val);
     t.put(p);
-    t.flushCommits();
 
     for(int count = 0; count< 13; count++) {
       Increment inc = new Increment(row);
@@ -301,8 +294,6 @@ public class TestRegionServerMetrics {
       t.increment(inc);
     }
 
-    t.flushCommits();
-
     metricsRegionServer.getRegionServerWrapper().forceRecompute();
     metricsHelper.assertCounter("incrementNumOps", 13, serverSource);
 
@@ -325,7 +316,6 @@ public class TestRegionServerMetrics {
     Put p = new Put(row);
     p.add(cf, qualifier, val);
     t.put(p);
-    t.flushCommits();
 
     for(int count = 0; count< 73; count++) {
       Append append = new Append(row);
@@ -333,8 +323,6 @@ public class TestRegionServerMetrics {
       t.append(append);
     }
 
-    t.flushCommits();
-
     metricsRegionServer.getRegionServerWrapper().forceRecompute();
     metricsHelper.assertCounter("appendNumOps", 73, serverSource);
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
index d1c42f2..7ecdaf7 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/replication/TestReplicationSmallTests.java
@@ -23,6 +23,7 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
+import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 
@@ -244,15 +245,14 @@ public class TestReplicationSmallTests extends 
TestReplicationBase {
   @Test(timeout=300000)
   public void testSmallBatch() throws Exception {
     LOG.info("testSmallBatch");
-    Put put;
     // normal Batch tests
-    htable1.setAutoFlushTo(false);
+    List<Put> puts = new ArrayList<>();
     for (int i = 0; i < NB_ROWS_IN_BATCH; i++) {
-      put = new Put(Bytes.toBytes(i));
+      Put put = new Put(Bytes.toBytes(i));
       put.add(famName, row, row);
-      htable1.put(put);
+      puts.add(put);
     }
-    htable1.flushCommits();
+    htable1.put(puts);
 
     Scan scan = new Scan();
 
@@ -385,14 +385,16 @@ public class TestReplicationSmallTests extends 
TestReplicationBase {
   @Test(timeout=300000)
   public void testLoading() throws Exception {
     LOG.info("Writing out rows to table1 in testLoading");
-    htable1.setWriteBufferSize(1024);
-    htable1.setAutoFlushTo(false);
+    List<Put> puts = new ArrayList<Put>();
     for (int i = 0; i < NB_ROWS_IN_BIG_BATCH; i++) {
       Put put = new Put(Bytes.toBytes(i));
       put.add(famName, row, row);
-      htable1.put(put);
+      puts.add(put);
     }
-    htable1.flushCommits();
+    htable1.setWriteBufferSize(1024);
+    // The puts will be iterated through and flushed only when the buffer
+    // size is reached.
+    htable1.put(puts);
 
     Scan scan = new Scan();
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/0311cc86/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
index 8684531..455d632 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/visibility/TestVisibilityLabels.java
@@ -895,24 +895,15 @@ public abstract class TestVisibilityLabels {
 
   static Table createTableAndWriteDataWithLabels(TableName tableName, 
String... labelExps)
       throws Exception {
-    Table table = null;
-    try {
-      table = TEST_UTIL.createTable(tableName, fam);
-      int i = 1;
-      List<Put> puts = new ArrayList<Put>();
-      for (String labelExp : labelExps) {
-        Put put = new Put(Bytes.toBytes("row" + i));
-        put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
-        put.setCellVisibility(new CellVisibility(labelExp));
-        puts.add(put);
-        i++;
-      }
-      table.put(puts);
-    } finally {
-      if (table != null) {
-        table.flushCommits();
-      }
+    List<Put> puts = new ArrayList<Put>();
+    for (int i = 0; i < labelExps.length; i++) {
+      Put put = new Put(Bytes.toBytes("row" + (i+1)));
+      put.add(fam, qual, HConstants.LATEST_TIMESTAMP, value);
+      put.setCellVisibility(new CellVisibility(labelExps[i]));
+      puts.add(put);
     }
+    Table table = TEST_UTIL.createTable(tableName, fam);
+    table.put(puts);
     return table;
   }
 

Reply via email to