Repository: hbase
Updated Branches:
  refs/heads/branch-1.4 976a24092 -> a8228eaad


HBASE-20146 Regions are stuck while opening when WAL is disabled

Signed-off-by: zhangduo <zhang...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/a8228eaa
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/a8228eaa
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/a8228eaa

Branch: refs/heads/branch-1.4
Commit: a8228eaadb00b4ea93df30d3add5a32a414a38f3
Parents: 976a240
Author: Ashish Singhi <ashishsin...@apache.org>
Authored: Thu Mar 8 18:47:16 2018 +0530
Committer: zhangduo <zhang...@apache.org>
Committed: Fri Mar 9 21:27:37 2018 +0800

----------------------------------------------------------------------
 .../hadoop/hbase/wal/DisabledWALProvider.java   |   3 +-
 .../hadoop/hbase/wal/TestDisabledWAL.java       | 103 +++++++++++++++++++
 2 files changed, 104 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/a8228eaa/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
index 0c0f6dd..30ea651 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/wal/DisabledWALProvider.java
@@ -23,8 +23,6 @@ import java.util.List;
 import java.util.Set;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -164,6 +162,7 @@ class DisabledWALProvider implements WALProvider {
     @Override
     public long append(HTableDescriptor htd, HRegionInfo info, WALKey key, 
WALEdit edits,
         boolean inMemstore) throws IOException {
+      key.setWriteEntry(key.getMvcc().begin());
       if (!this.listeners.isEmpty()) {
         final long start = System.nanoTime();
         long len = 0;

http://git-wip-us.apache.org/repos/asf/hbase/blob/a8228eaa/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java
new file mode 100644
index 0000000..d87bdea
--- /dev/null
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/wal/TestDisabledWAL.java
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.wal;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.TableName;
+import org.apache.hadoop.hbase.client.Get;
+import org.apache.hadoop.hbase.client.Put;
+import org.apache.hadoop.hbase.client.Result;
+import org.apache.hadoop.hbase.client.Table;
+import org.apache.hadoop.hbase.testclassification.MediumTests;
+import org.apache.hadoop.hbase.testclassification.RegionServerTests;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.rules.TestName;
+
+@Category({ RegionServerTests.class, MediumTests.class })
+public class TestDisabledWAL {
+
+  @Rule
+  public TestName name = new TestName();
+
+  private static final Log LOG = LogFactory.getLog(TestDisabledWAL.class);
+  static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  private Table table;
+  private TableName tableName;
+  private byte[] fam = Bytes.toBytes("f1");
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    Configuration conf = TEST_UTIL.getConfiguration();
+    conf.setBoolean("hbase.regionserver.hlog.enabled", false);
+    try {
+      TEST_UTIL.startMiniCluster();
+    } catch (RuntimeException | IOException e) {
+      LOG.error("Master failed to start.", e);
+      fail("Failed to start cluster. Reason being: " + 
e.getCause().getMessage());
+    }
+  }
+
+  @AfterClass
+  public static void afterClass() throws Exception {
+    TEST_UTIL.shutdownMiniCluster();
+  }
+
+  @Before
+  public void setup() throws Exception {
+    tableName = 
TableName.valueOf(name.getMethodName().replaceAll("[^a-zA-Z0-9]", "_"));
+    LOG.info("Creating table " + tableName);
+    table = TEST_UTIL.createTable(tableName, fam);
+  }
+
+  @After
+  public void cleanup() throws Exception {
+    LOG.info("Deleting table " + tableName);
+    TEST_UTIL.deleteTable(tableName);
+  }
+
+  @Test
+  public void testDisabledWAL() throws Exception {
+    LOG.info("Writing data to table " + tableName);
+    Put p = new Put(Bytes.toBytes("row"));
+    p.addColumn(fam, Bytes.toBytes("qual"), Bytes.toBytes("val"));
+    table.put(p);
+
+    LOG.info("Flushing table " + tableName);
+    TEST_UTIL.flush(tableName);
+
+    LOG.info("Getting data from table " + tableName);
+    Get get = new Get(Bytes.toBytes("row"));
+
+    Result result = table.get(get);
+    assertNotNull(result.getValue(fam, Bytes.toBytes("qual")));
+  }
+}

Reply via email to