hbase git commit: HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman)
Repository: hbase Updated Branches: refs/heads/master ba3d474f8 -> 940e5404d HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/940e5404 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/940e5404 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/940e5404 Branch: refs/heads/master Commit: 940e5404df5b79f81dbb7c04a615141b02fc2b22 Parents: ba3d474 Author: stack Authored: Wed Oct 14 13:13:01 2015 -0700 Committer: stack Committed: Wed Oct 14 13:13:01 2015 -0700 -- .../org/apache/hadoop/hbase/ipc/IPCUtil.java| 4 +++ .../hadoop/hbase/io/ByteBufferOutputStream.java | 26 ++-- 2 files changed, 23 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/940e5404/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index 056ecbc..734227c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -22,6 +22,7 @@ import java.io.DataInput; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import org.apache.commons.io.IOUtils; @@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.BoundedByteBufferPool; @@ -154,6 +156,8 @@ public class IPCUtil { // If no cells, don't mess around. Just return null (could be a bunch of existence checking // gets or something -- stuff that does not return a cell). if (count == 0) return null; +} catch (BufferOverflowException e) { + throw new DoNotRetryIOException(e); } finally { os.close(); if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor); http://git-wip-us.apache.org/repos/asf/hbase/blob/940e5404/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java index 1b2ab5d..d91513e 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io; import java.io.IOException; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.channels.Channels; @@ -37,6 +38,10 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Evolving public class ByteBufferOutputStream extends OutputStream { + + // Borrowed from openJDK: + // http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/util/ArrayList.java#221 + private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; protected ByteBuffer buf; @@ -69,6 +74,9 @@ public class ByteBufferOutputStream extends OutputStream { } private static ByteBuffer allocate(final int capacity, final boolean useDirectByteBuffer) { +if (capacity > MAX_ARRAY_SIZE) { // avoid OutOfMemoryError + throw new BufferOverflowException(); +} return useDirectByteBuffer? ByteBuffer.allocateDirect(capacity): ByteBuffer.allocate(capacity); } @@ -82,13 +90,17 @@ public class ByteBufferOutputStream extends OutputStream { } private void checkSizeAndGrow(int extra) { -if ( (buf.position() + extra) > buf.limit()) { - // size calculation is complex, because we could overflow negative, - // and/or not allocate enough space. this fixes that. - int newSize = (int)Math.minlong)buf.capacity()) * 2), - (long)(Integer.MAX_VALUE)); - newSize = Math.max(newSize, buf.position() + extra); - ByteBuffer newBuf = allocate(newSize, buf.isDirect()); +long capacityNeeded = buf.position() + (long) extra; +if (capacityNeeded > buf.limit()) { + // guarantee it's possible to fit +
hbase git commit: HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman)
Repository: hbase Updated Branches: refs/heads/branch-1 0818df79d -> 11066d045 HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/11066d04 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/11066d04 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/11066d04 Branch: refs/heads/branch-1 Commit: 11066d04593a85742124c3e22fef17b4d9ec6883 Parents: 0818df7 Author: stack Authored: Wed Oct 14 13:13:01 2015 -0700 Committer: stack Committed: Wed Oct 14 13:13:35 2015 -0700 -- .../org/apache/hadoop/hbase/ipc/IPCUtil.java| 4 +++ .../hadoop/hbase/io/ByteBufferOutputStream.java | 26 ++-- 2 files changed, 23 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/11066d04/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index 056ecbc..734227c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -22,6 +22,7 @@ import java.io.DataInput; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import org.apache.commons.io.IOUtils; @@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.BoundedByteBufferPool; @@ -154,6 +156,8 @@ public class IPCUtil { // If no cells, don't mess around. Just return null (could be a bunch of existence checking // gets or something -- stuff that does not return a cell). if (count == 0) return null; +} catch (BufferOverflowException e) { + throw new DoNotRetryIOException(e); } finally { os.close(); if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor); http://git-wip-us.apache.org/repos/asf/hbase/blob/11066d04/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java index af12113..a6647f6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io; import java.io.IOException; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; @@ -35,6 +36,10 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Evolving public class ByteBufferOutputStream extends OutputStream { + + // Borrowed from openJDK: + // http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/util/ArrayList.java#221 + private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; protected ByteBuffer buf; @@ -66,6 +71,9 @@ public class ByteBufferOutputStream extends OutputStream { } private static ByteBuffer allocate(final int capacity, final boolean useDirectByteBuffer) { +if (capacity > MAX_ARRAY_SIZE) { // avoid OutOfMemoryError + throw new BufferOverflowException(); +} return useDirectByteBuffer? ByteBuffer.allocateDirect(capacity): ByteBuffer.allocate(capacity); } @@ -79,13 +87,17 @@ public class ByteBufferOutputStream extends OutputStream { } private void checkSizeAndGrow(int extra) { -if ( (buf.position() + extra) > buf.limit()) { - // size calculation is complex, because we could overflow negative, - // and/or not allocate enough space. this fixes that. - int newSize = (int)Math.minlong)buf.capacity()) * 2), - (long)(Integer.MAX_VALUE)); - newSize = Math.max(newSize, buf.position() + extra); - ByteBuffer newBuf = allocate(newSize, buf.isDirect()); +long capacityNeeded = buf.position() + (long) extra; +if (capacityNeeded > buf.limit()) { + // guarantee
hbase git commit: HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman)
Repository: hbase Updated Branches: refs/heads/branch-1.2 6387a72a5 -> 34d7971d1 HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/34d7971d Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/34d7971d Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/34d7971d Branch: refs/heads/branch-1.2 Commit: 34d7971d1a7bed8f732cceb382e2cc52e4a6954c Parents: 6387a72 Author: stack Authored: Wed Oct 14 13:13:01 2015 -0700 Committer: stack Committed: Wed Oct 14 13:14:06 2015 -0700 -- .../org/apache/hadoop/hbase/ipc/IPCUtil.java| 4 +++ .../hadoop/hbase/io/ByteBufferOutputStream.java | 26 ++-- 2 files changed, 23 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/34d7971d/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index 056ecbc..734227c 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -22,6 +22,7 @@ import java.io.DataInput; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import org.apache.commons.io.IOUtils; @@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.BoundedByteBufferPool; @@ -154,6 +156,8 @@ public class IPCUtil { // If no cells, don't mess around. Just return null (could be a bunch of existence checking // gets or something -- stuff that does not return a cell). if (count == 0) return null; +} catch (BufferOverflowException e) { + throw new DoNotRetryIOException(e); } finally { os.close(); if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor); http://git-wip-us.apache.org/repos/asf/hbase/blob/34d7971d/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java index af12113..a6647f6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io; import java.io.IOException; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; @@ -35,6 +36,10 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Evolving public class ByteBufferOutputStream extends OutputStream { + + // Borrowed from openJDK: + // http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/util/ArrayList.java#221 + private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; protected ByteBuffer buf; @@ -66,6 +71,9 @@ public class ByteBufferOutputStream extends OutputStream { } private static ByteBuffer allocate(final int capacity, final boolean useDirectByteBuffer) { +if (capacity > MAX_ARRAY_SIZE) { // avoid OutOfMemoryError + throw new BufferOverflowException(); +} return useDirectByteBuffer? ByteBuffer.allocateDirect(capacity): ByteBuffer.allocate(capacity); } @@ -79,13 +87,17 @@ public class ByteBufferOutputStream extends OutputStream { } private void checkSizeAndGrow(int extra) { -if ( (buf.position() + extra) > buf.limit()) { - // size calculation is complex, because we could overflow negative, - // and/or not allocate enough space. this fixes that. - int newSize = (int)Math.minlong)buf.capacity()) * 2), - (long)(Integer.MAX_VALUE)); - newSize = Math.max(newSize, buf.position() + extra); - ByteBuffer newBuf = allocate(newSize, buf.isDirect()); +long capacityNeeded = buf.position() + (long) extra; +if (capacityNeeded > buf.limit()) { + // guaran
hbase git commit: HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman)
Repository: hbase Updated Branches: refs/heads/branch-1.1 8ad3bf046 -> 294ae7487 HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/294ae748 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/294ae748 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/294ae748 Branch: refs/heads/branch-1.1 Commit: 294ae74874e1f06c7df74ba205cd5168f4b58d30 Parents: 8ad3bf0 Author: stack Authored: Wed Oct 14 13:13:01 2015 -0700 Committer: stack Committed: Wed Oct 14 13:14:25 2015 -0700 -- .../org/apache/hadoop/hbase/ipc/IPCUtil.java| 4 +++ .../hadoop/hbase/io/ByteBufferOutputStream.java | 26 ++-- 2 files changed, 23 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/294ae748/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index 7c6c9ba..4af7262 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -22,6 +22,7 @@ import java.io.DataInput; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import org.apache.commons.io.IOUtils; @@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.BoundedByteBufferPool; @@ -153,6 +155,8 @@ public class IPCUtil { // If no cells, don't mess around. Just return null (could be a bunch of existence checking // gets or something -- stuff that does not return a cell). if (count == 0) return null; +} catch (BufferOverflowException e) { + throw new DoNotRetryIOException(e); } finally { os.close(); if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor); http://git-wip-us.apache.org/repos/asf/hbase/blob/294ae748/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java index af12113..a6647f6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io; import java.io.IOException; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; @@ -35,6 +36,10 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Evolving public class ByteBufferOutputStream extends OutputStream { + + // Borrowed from openJDK: + // http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/util/ArrayList.java#221 + private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; protected ByteBuffer buf; @@ -66,6 +71,9 @@ public class ByteBufferOutputStream extends OutputStream { } private static ByteBuffer allocate(final int capacity, final boolean useDirectByteBuffer) { +if (capacity > MAX_ARRAY_SIZE) { // avoid OutOfMemoryError + throw new BufferOverflowException(); +} return useDirectByteBuffer? ByteBuffer.allocateDirect(capacity): ByteBuffer.allocate(capacity); } @@ -79,13 +87,17 @@ public class ByteBufferOutputStream extends OutputStream { } private void checkSizeAndGrow(int extra) { -if ( (buf.position() + extra) > buf.limit()) { - // size calculation is complex, because we could overflow negative, - // and/or not allocate enough space. this fixes that. - int newSize = (int)Math.minlong)buf.capacity()) * 2), - (long)(Integer.MAX_VALUE)); - newSize = Math.max(newSize, buf.position() + extra); - ByteBuffer newBuf = allocate(newSize, buf.isDirect()); +long capacityNeeded = buf.position() + (long) extra; +if (capacityNeeded > buf.limit()) { + // guaran
hbase git commit: HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman)
Repository: hbase Updated Branches: refs/heads/0.98 73e7722b8 -> 5f3a43a2c HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman) Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5f3a43a2 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5f3a43a2 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5f3a43a2 Branch: refs/heads/0.98 Commit: 5f3a43a2c412ec8fb23f92b120c9783a8ad5f0e7 Parents: 73e7722 Author: stack Authored: Wed Oct 14 13:13:01 2015 -0700 Committer: Andrew Purtell Committed: Fri Oct 23 16:44:35 2015 -0700 -- .../org/apache/hadoop/hbase/ipc/IPCUtil.java| 4 +++ .../hadoop/hbase/io/ByteBufferOutputStream.java | 26 ++-- 2 files changed, 23 insertions(+), 7 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/5f3a43a2/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index 2210cec..f143203 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -22,6 +22,7 @@ import java.io.DataInput; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import org.apache.commons.io.IOUtils; @@ -30,6 +31,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.codec.Codec; @@ -154,6 +156,8 @@ class IPCUtil { // If no cells, don't mess around. Just return null (could be a bunch of existence checking // gets or something -- stuff that does not return a cell). if (count == 0) return null; +} catch (BufferOverflowException e) { + throw new DoNotRetryIOException(e); } finally { os.close(); if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor); http://git-wip-us.apache.org/repos/asf/hbase/blob/5f3a43a2/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java index af12113..a6647f6 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io; import java.io.IOException; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; @@ -35,6 +36,10 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Evolving public class ByteBufferOutputStream extends OutputStream { + + // Borrowed from openJDK: + // http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/util/ArrayList.java#221 + private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; protected ByteBuffer buf; @@ -66,6 +71,9 @@ public class ByteBufferOutputStream extends OutputStream { } private static ByteBuffer allocate(final int capacity, final boolean useDirectByteBuffer) { +if (capacity > MAX_ARRAY_SIZE) { // avoid OutOfMemoryError + throw new BufferOverflowException(); +} return useDirectByteBuffer? ByteBuffer.allocateDirect(capacity): ByteBuffer.allocate(capacity); } @@ -79,13 +87,17 @@ public class ByteBufferOutputStream extends OutputStream { } private void checkSizeAndGrow(int extra) { -if ( (buf.position() + extra) > buf.limit()) { - // size calculation is complex, because we could overflow negative, - // and/or not allocate enough space. this fixes that. - int newSize = (int)Math.minlong)buf.capacity()) * 2), - (long)(Integer.MAX_VALUE)); - newSize = Math.max(newSize, buf.position() + extra); - ByteBuffer newBuf = allocate(newSize, buf.isDirect()); +long capacityNeeded = buf.position() + (long) extra; +if (capacityNeeded > buf.limit()) { + // guarantee it's possible to f
hbase git commit: HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman)
Repository: hbase Updated Branches: refs/heads/branch-1.0 c39d066fc -> 1a7e4c073 HBASE-14598 ByteBufferOutputStream grows its HeapByteBuffer beyond JVM limitations (Ian Friedman) Conflicts: hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/1a7e4c07 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/1a7e4c07 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/1a7e4c07 Branch: refs/heads/branch-1.0 Commit: 1a7e4c07368f39c5f63894bbb25d03800aefe814 Parents: c39d066 Author: stack Authored: Wed Oct 14 13:13:01 2015 -0700 Committer: Andrew Purtell Committed: Fri Oct 23 16:47:27 2015 -0700 -- .../org/apache/hadoop/hbase/ipc/IPCUtil.java| 4 +++ .../hadoop/hbase/io/ByteBufferOutputStream.java | 33 +--- 2 files changed, 26 insertions(+), 11 deletions(-) -- http://git-wip-us.apache.org/repos/asf/hbase/blob/1a7e4c07/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java -- diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java index b7e7728..8b7be44 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/IPCUtil.java @@ -22,6 +22,7 @@ import java.io.DataInput; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import org.apache.commons.io.IOUtils; @@ -31,6 +32,7 @@ import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; +import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.io.ByteBufferOutputStream; @@ -126,6 +128,8 @@ public class IPCUtil { // If no cells, don't mess around. Just return null (could be a bunch of existence checking // gets or something -- stuff that does not return a cell). if (count == 0) return null; +} catch (BufferOverflowException e) { + throw new DoNotRetryIOException(e); } finally { os.close(); if (poolCompressor != null) CodecPool.returnCompressor(poolCompressor); http://git-wip-us.apache.org/repos/asf/hbase/blob/1a7e4c07/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java -- diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java index 257b850..9eee6b2 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/ByteBufferOutputStream.java @@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.io; import java.io.IOException; import java.io.OutputStream; +import java.nio.BufferOverflowException; import java.nio.ByteBuffer; import java.nio.channels.Channels; import java.nio.channels.WritableByteChannel; @@ -35,6 +36,10 @@ import org.apache.hadoop.hbase.util.Bytes; @InterfaceAudience.Public @InterfaceStability.Evolving public class ByteBufferOutputStream extends OutputStream { + + // Borrowed from openJDK: + // http://grepcode.com/file/repository.grepcode.com/java/root/jdk/openjdk/8-b132/java/util/ArrayList.java#221 + private static final int MAX_ARRAY_SIZE = Integer.MAX_VALUE - 8; protected ByteBuffer buf; @@ -54,6 +59,13 @@ public class ByteBufferOutputStream extends OutputStream { return buf.position(); } + private static ByteBuffer allocate(final int capacity, final boolean useDirectByteBuffer) { +if (capacity > MAX_ARRAY_SIZE) { // avoid OutOfMemoryError + throw new BufferOverflowException(); +} +return useDirectByteBuffer? ByteBuffer.allocateDirect(capacity): ByteBuffer.allocate(capacity); + } + /** * This flips the underlying BB so be sure to use it _last_! * @return ByteBuffer @@ -64,18 +76,17 @@ public class ByteBufferOutputStream extends OutputStream { } private void checkSizeAndGrow(int extra) { -if ( (buf.position() + extra) > buf.limit()) { - // size calculation is complex, because we could overflow negative, - // and/or not allocate enough space. this fixes that. - int newSize = (int)Math.minlong)buf.capacity()) * 2), - (long)(Integer.MAX_VALUE))