Author: suresh
Date: Wed Oct 17 19:01:30 2012
New Revision: 1399393

URL: http://svn.apache.org/viewvc?rev=1399393&view=rev
Log:
HADOOP-8900. Merging change r1399377 from trunk.

Modified:
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
    
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1399393&r1=1399392&r2=1399393&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
(original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/CHANGES.txt 
Wed Oct 17 19:01:30 2012
@@ -89,10 +89,21 @@ Release 2.0.3-alpha - Unreleased 
 
     HADOOP-8881. FileBasedKeyStoresFactory initialization logging should be 
debug not info. (tucu)
 
+    HADOOP-8913. hadoop-metrics2.properties should give units in comment 
+    for sampling period. (Sandy Ryza via suresh)
+
+    HADOOP-8878. Uppercase namenode hostname causes hadoop dfs calls with 
+    webhdfs filesystem and fsck to fail when security is on.
+    (Arpit Gupta via suresh)
+
     HADOOP-8901. GZip and Snappy support may not work without unversioned
     libraries (Colin Patrick McCabe via todd)
 
-    HADOOP-8883. Anonymous fallback in KerberosAuthenticator is broken. 
(rkanter via tucu)
+    HADOOP-8883. Anonymous fallback in KerberosAuthenticator is broken.
+    (rkanter via tucu)
+
+    HADOOP-8900. BuiltInGzipDecompressor throws IOException - stored gzip size
+    doesn't match decompressed size. (Slavik Krassovsky via suresh)
 
 Release 2.0.2-alpha - 2012-09-07 
 

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java?rev=1399393&r1=1399392&r2=1399393&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java
 Wed Oct 17 19:01:30 2012
@@ -387,7 +387,7 @@ public class BuiltInGzipDecompressor imp
       copyBytesToLocal(n);       // modifies userBufLen, etc.
       if (localBufOff >= 4) {    // should be strictly ==
         long inputSize = readUIntLE(localBuf, 0);
-        if (inputSize != (inflater.getBytesWritten() & 0xffffffff)) {
+        if (inputSize != (inflater.getBytesWritten() & 0xffffffffL)) {
           throw new IOException(
             "stored gzip size doesn't match decompressed size");
         }
@@ -571,7 +571,7 @@ public class BuiltInGzipDecompressor imp
     return ((((long)(b[off+3] & 0xff) << 24) |
              ((long)(b[off+2] & 0xff) << 16) |
              ((long)(b[off+1] & 0xff) <<  8) |
-             ((long)(b[off]   & 0xff)      )) & 0xffffffff);
+             ((long)(b[off]   & 0xff)      )) & 0xffffffffL);
   }
 
 }

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1399393&r1=1399392&r2=1399393&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
 Wed Oct 17 19:01:30 2012
@@ -719,6 +719,55 @@ public class TestCodec {
     }
   }
 
+  @Test
+  public void testGzipLongOverflow() throws IOException {
+    LOG.info("testGzipLongOverflow");
+
+    // Don't use native libs for this test.
+    Configuration conf = new Configuration();
+    conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, 
false);
+    assertFalse("ZlibFactory is using native libs against request",
+        ZlibFactory.isNativeZlibLoaded(conf));
+
+    // Ensure that the CodecPool has a BuiltInZlibInflater in it.
+    Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
+    assertNotNull("zlibDecompressor is null!", zlibDecompressor);
+    assertTrue("ZlibFactory returned unexpected inflator",
+        zlibDecompressor instanceof BuiltInZlibInflater);
+    CodecPool.returnDecompressor(zlibDecompressor);
+
+    // Now create a GZip text file.
+    String tmpDir = System.getProperty("test.build.data", "/tmp/");
+    Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
+    BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
+      new GZIPOutputStream(new FileOutputStream(f.toString()))));
+
+    final int NBUF = 1024 * 4 + 1;
+    final char[] buf = new char[1024 * 1024];
+    for (int i = 0; i < buf.length; i++) buf[i] = '\0';
+    for (int i = 0; i < NBUF; i++) {
+      bw.write(buf);
+    }
+    bw.close();
+
+    // Now read it back, using the CodecPool to establish the
+    // decompressor to use.
+    CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
+    CompressionCodec codec = ccf.getCodec(f);
+    Decompressor decompressor = CodecPool.getDecompressor(codec);
+    FileSystem fs = FileSystem.getLocal(conf);
+    InputStream is = fs.open(f);
+    is = codec.createInputStream(is, decompressor);
+    BufferedReader br = new BufferedReader(new InputStreamReader(is));
+    for (int j = 0; j < NBUF; j++) {
+      int n = br.read(buf);
+      assertEquals("got wrong read length!", n, buf.length);
+      for (int i = 0; i < buf.length; i++)
+        assertEquals("got wrong byte!", buf[i], '\0');
+    }
+    br.close();
+  }
+
   public void testGzipCodecWrite(boolean useNative) throws IOException {
     // Create a gzipped file using a compressor from the CodecPool,
     // and try to read it back via the regular GZIPInputStream.

Modified: 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java?rev=1399393&r1=1399392&r2=1399393&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java
 Wed Oct 17 19:01:30 2012
@@ -141,7 +141,7 @@ public class TestVLong extends TestCase 
       int shift = rng.nextInt(Long.SIZE) + 1;
       long mask = (1L << shift) - 1;
       long a = ((long) rng.nextInt()) << 32;
-      long b = ((long) rng.nextInt()) & 0xffffffff;
+      long b = ((long) rng.nextInt()) & 0xffffffffL;
       data[i] = (a + b) & mask;
     }
     


Reply via email to