Author: apurtell
Date: Sat Nov 28 22:59:09 2009
New Revision: 885162

URL: http://svn.apache.org/viewvc?rev=885162&view=rev
Log:
HBASE-1995 Add configurable max value size check

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/conf/hbase-default.xml
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
    
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=885162&r1=885161&r2=885162&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Sat Nov 28 22:59:09 2009
@@ -209,6 +209,8 @@
    HBASE-2012  [EC2] LZO support
    HBASE-2011  Add zktop like output to HBase's master UI (Lars George via
                Andrew Purtell)
+   HBASE-1995  Add configurable max value size check (Lars George via Andrew
+               Purtell)
 
   NEW FEATURES
    HBASE-1901  "General" partitioner for "hbase-48" bulk (behind the api, write

Modified: hadoop/hbase/trunk/conf/hbase-default.xml
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/hbase-default.xml?rev=885162&r1=885161&r2=885162&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/hbase-default.xml (original)
+++ hadoop/hbase/trunk/conf/hbase-default.xml Sat Nov 28 22:59:09 2009
@@ -137,6 +137,17 @@
     </description>
   </property>
   <property>
+    <name>hbase.client.keyvalue.maxsize</name>
+    <value>-1</value>
+    <description>Specifies the combined maximum allowed size of a KeyValue
+    instance. This is to set an upper boundary for a single entry saved in a
+    storage file. Since they cannot be split it helps avoiding that a region
+    cannot be split any further because the data is too large. It seems wise
+    to set this to a fraction of the maximum region size. Setting it to zero 
+    or less disables the check.
+    </description>
+  </property>
+  <property>
     <name>hbase.regionserver.lease.period</name>
     <value>60000</value>
     <description>HRegion server lease period in milliseconds. Default is

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=885162&r1=885161&r2=885162&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java 
(original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/client/HTable.java Sat 
Nov 28 22:59:09 2009
@@ -36,6 +36,7 @@
 import org.apache.hadoop.hbase.HRegionLocation;
 import org.apache.hadoop.hbase.HServerAddress;
 import org.apache.hadoop.hbase.HTableDescriptor;
+import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop.hbase.NotServingRegionException;
 import org.apache.hadoop.hbase.UnknownScannerException;
 import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor;
@@ -62,6 +63,7 @@
   private boolean autoFlush;
   private long currentWriteBufferSize;
   protected int scannerCaching;
+  private int maxKeyValueSize;
 
   /**
    * Creates an object to access a HBase table
@@ -121,6 +123,7 @@
     this.autoFlush = true;
     this.currentWriteBufferSize = 0;
     this.scannerCaching = conf.getInt("hbase.client.scanner.caching", 1);
+    this.maxKeyValueSize = conf.getInt("hbase.client.keyvalue.maxsize", -1);
   }
 
   /**
@@ -602,9 +605,18 @@
    * @throws IllegalArgumentException
    */
   private void validatePut(final Put put) throws IllegalArgumentException{
-    if(put.isEmpty()) {
+    if (put.isEmpty()) {
       throw new IllegalArgumentException("No columns to insert");
     }
+    if (maxKeyValueSize > 0) {
+      for (List<KeyValue> list : put.getFamilyMap().values()) {
+        for (KeyValue kv : list) {
+          if (kv.getLength() > maxKeyValueSize) {
+            throw new IllegalArgumentException("KeyValue size too large");
+          }
+        }
+      }
+    }
   }
 
   /**

Modified: 
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java
URL: 
http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java?rev=885162&r1=885161&r2=885162&view=diff
==============================================================================
--- 
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java
 (original)
+++ 
hadoop/hbase/trunk/src/test/org/apache/hadoop/hbase/client/TestFromClientSide.java
 Sat Nov 28 22:59:09 2009
@@ -32,6 +32,7 @@
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
@@ -39,8 +40,6 @@
 import org.apache.hadoop.hbase.HServerAddress;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.TableExistsException;
-import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.filter.BinaryComparator;
 import org.apache.hadoop.hbase.filter.CompareFilter;
 import org.apache.hadoop.hbase.filter.Filter;
@@ -401,6 +400,26 @@
   }
 
   @Test
+  public void testMaxKeyValueSize() throws Exception {
+    byte [] TABLE = Bytes.toBytes("testMaxKeyValueSize");
+    HBaseConfiguration conf = TEST_UTIL.getConfiguration();
+    String oldMaxSize = conf.get("hbase.client.keyvalue.maxsize");
+    HTable ht = TEST_UTIL.createTable(TABLE, FAMILY);
+    byte[] value = new byte[4 * 1024 * 1024];
+    Put put = new Put(ROW);
+    put.add(FAMILY, QUALIFIER, value);
+    ht.put(put);
+    try {
+      conf.setInt("hbase.client.keyvalue.maxsize", 2 * 1024 * 1024);
+      put = new Put(ROW);
+      put.add(FAMILY, QUALIFIER, VALUE);
+      ht.put(put);
+      throw new IOException("Inserting a too large KeyValue worked, should 
throw exception");
+    } catch(Exception e) {}
+    conf.set("hbase.client.keyvalue.maxsize", oldMaxSize);
+  }
+
+  @Test
   public void testFilters() throws Exception {
     byte [] TABLE = Bytes.toBytes("testFilters");
     HTable ht = TEST_UTIL.createTable(TABLE, FAMILY);


Reply via email to