[hadoop] branch trunk updated: HDFS-14535. The default 8KB buffer in requestFileDescriptors#BufferedOutputStream is causing lots of heap allocation in HBase when using short-circut read

2019-06-04 Thread todd
This is an automated email from the ASF dual-hosted git repository.

todd pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new ea3b0a1  HDFS-14535. The default 8KB buffer in 
requestFileDescriptors#BufferedOutputStream is causing lots of heap allocation 
in HBase when using short-circut read
ea3b0a1 is described below

commit ea3b0a184405c3feca024a560807ea215b6858b9
Author: huzheng 
AuthorDate: Tue Jun 4 10:54:46 2019 +0800

HDFS-14535. The default 8KB buffer in 
requestFileDescriptors#BufferedOutputStream is causing lots of heap allocation 
in HBase when using short-circut read
---
 .../java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java| 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
index 8e592f4..88b1686 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/impl/BlockReaderFactory.java
@@ -86,6 +86,7 @@ import org.slf4j.LoggerFactory;
 @InterfaceAudience.Private
 public class BlockReaderFactory implements ShortCircuitReplicaCreator {
   static final Logger LOG = LoggerFactory.getLogger(BlockReaderFactory.class);
+  private static final int SMALL_BUFFER_SIZE = 512;
 
   public static class FailureInjector {
 public void injectRequestFileDescriptorsFailure() throws IOException {
@@ -582,7 +583,7 @@ public class BlockReaderFactory implements 
ShortCircuitReplicaCreator {
   Slot slot) throws IOException {
 ShortCircuitCache cache = clientContext.getShortCircuitCache();
 final DataOutputStream out =
-new DataOutputStream(new BufferedOutputStream(peer.getOutputStream()));
+new DataOutputStream(new BufferedOutputStream(peer.getOutputStream(), 
SMALL_BUFFER_SIZE));
 SlotId slotId = slot == null ? null : slot.getSlotId();
 new Sender(out).requestShortCircuitFds(block, token, slotId, 1,
 failureInjector.getSupportsReceiptVerification());


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[hadoop] branch trunk updated: HDFS-14482: Crash when using libhdfs with bad classpath

2019-05-14 Thread todd
This is an automated email from the ASF dual-hosted git repository.

todd pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new e2dfdae  HDFS-14482: Crash when using libhdfs with bad classpath
e2dfdae is described below

commit e2dfdaee7c23cf18bdd2a6dacc979c248407c23a
Author: Sahil Takiar 
AuthorDate: Mon May 13 13:29:52 2019 -0500

HDFS-14482: Crash when using libhdfs with bad classpath
---
 .../src/main/native/libhdfs/jni_helper.c  | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
index 405ee3f..837c7e0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/jni_helper.c
@@ -551,6 +551,10 @@ JNIEnv* getJNIEnv(void)
 state->env = getGlobalJNIEnv();
 mutexUnlock();
 
+if (!state->env) {
+goto fail;
+}
+
 jthrowable jthr = NULL;
 jthr = initCachedClasses(state->env);
 if (jthr) {
@@ -558,10 +562,6 @@ JNIEnv* getJNIEnv(void)
 "initCachedClasses failed");
   goto fail;
 }
-
-if (!state->env) {
-  goto fail;
-}
 return state->env;
 
 fail:


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[hadoop] branch trunk updated: HDFS-3246: pRead equivalent for direct read path (#597)

2019-04-30 Thread todd
This is an automated email from the ASF dual-hosted git repository.

todd pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 4877f0a  HDFS-3246: pRead equivalent for direct read path (#597)
4877f0a is described below

commit 4877f0aa518832c37a06e6d3bd2c9552fc3141dc
Author: Sahil Takiar 
AuthorDate: Tue Apr 30 16:52:16 2019 -0500

HDFS-3246: pRead equivalent for direct read path (#597)

HDFS-3246: pRead equivalent for direct read path

Contributed by Sahil Takiar
---
 .../apache/hadoop/crypto/CryptoInputStream.java| 292 ++---
 .../hadoop/fs/ByteBufferPositionedReadable.java|  66 +
 .../org/apache/hadoop/fs/ByteBufferReadable.java   |  17 +-
 .../org/apache/hadoop/fs/FSDataInputStream.java|  15 +-
 .../org/apache/hadoop/fs/StreamCapabilities.java   |   6 +
 .../hadoop/crypto/CryptoStreamsTestBase.java   | 185 -
 .../apache/hadoop/crypto/TestCryptoStreams.java|  35 ++-
 .../hadoop/crypto/TestCryptoStreamsForLocalFS.java |  10 +
 .../hadoop/crypto/TestCryptoStreamsNormal.java |  10 +
 .../org/apache/hadoop/hdfs/DFSInputStream.java |  14 +-
 .../src/main/native/libhdfs-tests/hdfs_test.h  |  18 ++
 .../main/native/libhdfs-tests/test_libhdfs_ops.c   | 142 +-
 .../src/main/native/libhdfs/hdfs.c | 210 +--
 .../apache/hadoop/hdfs/TestByteBufferPread.java| 269 +++
 14 files changed, 1134 insertions(+), 155 deletions(-)

diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
index 67e8690..80364ce 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
@@ -33,6 +33,7 @@ import java.util.concurrent.ConcurrentLinkedQueue;
 import com.google.common.base.Preconditions;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.ByteBufferPositionedReadable;
 import org.apache.hadoop.fs.ByteBufferReadable;
 import org.apache.hadoop.fs.CanSetDropBehind;
 import org.apache.hadoop.fs.CanSetReadahead;
@@ -64,7 +65,8 @@ import org.apache.hadoop.util.StringUtils;
 public class CryptoInputStream extends FilterInputStream implements 
 Seekable, PositionedReadable, ByteBufferReadable, HasFileDescriptor, 
 CanSetDropBehind, CanSetReadahead, HasEnhancedByteBufferAccess, 
-ReadableByteChannel, CanUnbuffer, StreamCapabilities {
+ReadableByteChannel, CanUnbuffer, StreamCapabilities,
+ByteBufferPositionedReadable {
   private final byte[] oneByteBuf = new byte[1];
   private final CryptoCodec codec;
   private final Decryptor decryptor;
@@ -327,19 +329,39 @@ public class CryptoInputStream extends FilterInputStream 
implements
   public int read(long position, byte[] buffer, int offset, int length)
   throws IOException {
 checkStream();
-try {
-  final int n = ((PositionedReadable) in).read(position, buffer, offset, 
-  length);
-  if (n > 0) {
-// This operation does not change the current offset of the file
-decrypt(position, buffer, offset, n);
-  }
-  
-  return n;
-} catch (ClassCastException e) {
+if (!(in instanceof PositionedReadable)) {
   throw new UnsupportedOperationException("This stream does not support " +
   "positioned read.");
 }
+final int n = ((PositionedReadable) in).read(position, buffer, offset,
+length);
+if (n > 0) {
+  // This operation does not change the current offset of the file
+  decrypt(position, buffer, offset, n);
+}
+
+return n;
+  }
+
+  /**
+   * Positioned read using {@link ByteBuffer}s. This method is thread-safe.
+   */
+  @Override
+  public int read(long position, final ByteBuffer buf)
+  throws IOException {
+checkStream();
+if (!(in instanceof ByteBufferPositionedReadable)) {
+  throw new UnsupportedOperationException("This stream does not support " +
+  "positioned reads with byte buffers.");
+}
+int bufPos = buf.position();
+final int n = ((ByteBufferPositionedReadable) in).read(position, buf);
+if (n > 0) {
+  // This operation does not change the current offset of the file
+  decrypt(position, buf, n, bufPos);
+}
+
+return n;
   }
   
   /**
@@ -348,49 +370,124 @@ public class CryptoInputStream extends FilterInputStream 
implements
*/
   private void decrypt(long position, byte[] buffer, int offset, int length) 
   throws IOException {
-ByteBuffer inBuffer = getBuffer();
-By

[hadoop] branch trunk updated: HADOOP-16179. hadoop-common pom should not depend on kerb-simplekdc

2019-04-10 Thread todd
This is an automated email from the ASF dual-hosted git repository.

todd pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 65deb1a  HADOOP-16179. hadoop-common pom should not depend on 
kerb-simplekdc
65deb1a is described below

commit 65deb1ac42a9e2a19cfff0b394f00cb0d3b75d38
Author: Todd Lipcon 
AuthorDate: Wed Apr 10 08:47:37 2019 -0700

HADOOP-16179. hadoop-common pom should not depend on kerb-simplekdc

The hadoop-common pom currently has a dependency on kerb-simplekdc. In
fact, the only classes used from Kerby are in kerb-core and kerb-util
(which is a transitive dependency frmo kerb-core). Depending on
kerb-simplekdc pulls a bunch of other unnecessary classes into the
hadoop-common classpath.

This changes the hadoop-common pom to depend only on kerb-core.

hadoop-minikdc already had the appropriate dependency on kerb-simplekdc
so it continues to pull in what it needs.

Signed-off-by: Todd Lipcon 
---
 hadoop-common-project/hadoop-common/pom.xml | 2 +-
 hadoop-project/pom.xml  | 8 +++-
 2 files changed, 8 insertions(+), 2 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/pom.xml 
b/hadoop-common-project/hadoop-common/pom.xml
index a62f7ae..19044a5 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -308,7 +308,7 @@
 
 
   org.apache.kerby
-  kerb-simplekdc
+  kerb-core
 
 
   com.fasterxml.jackson.core
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index aac0315..7380f15 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -102,6 +102,7 @@
 2.0.0-M21
 1.0.0-M33
 
+1.0.1
 1.0-alpha-1
 3.3.1
 2.4.12
@@ -1463,7 +1464,12 @@
 
   org.apache.kerby
   kerb-simplekdc
-  1.0.1
+  ${kerby.version}
+
+
+  org.apache.kerby
+  kerb-core
+  ${kerby.version}
 
 
   org.apache.geronimo.specs


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[hadoop] branch trunk updated: HDFS-14394: Add -std=c99 / -std=gnu99 to libhdfs compile flags

2019-04-03 Thread todd
This is an automated email from the ASF dual-hosted git repository.

todd pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 3b0c501  HDFS-14394: Add -std=c99 / -std=gnu99 to libhdfs compile flags
3b0c501 is described below

commit 3b0c5016b2931e3b87153c1cf368314d229e79ff
Author: Sahil Takiar 
AuthorDate: Wed Apr 3 10:55:36 2019 -0700

HDFS-14394: Add -std=c99 / -std=gnu99 to libhdfs compile flags

Signed-off-by: Todd Lipcon 
---
 hadoop-common-project/hadoop-common/HadoopCommon.cmake | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/hadoop-common-project/hadoop-common/HadoopCommon.cmake 
b/hadoop-common-project/hadoop-common/HadoopCommon.cmake
index 63de1de..4de70ac 100644
--- a/hadoop-common-project/hadoop-common/HadoopCommon.cmake
+++ b/hadoop-common-project/hadoop-common/HadoopCommon.cmake
@@ -193,7 +193,6 @@ if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
 elseif(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
 # Solaris flags. 64-bit compilation is mandatory, and is checked earlier.
 hadoop_add_compiler_flags("-m64 -D_POSIX_C_SOURCE=200112L -D__EXTENSIONS__ 
-D_POSIX_PTHREAD_SEMANTICS")
-set(CMAKE_C_FLAGS "-std=gnu99 ${CMAKE_C_FLAGS}")
 set(CMAKE_CXX_FLAGS "-std=gnu++98 ${CMAKE_CXX_FLAGS}")
 hadoop_add_linker_flags("-m64")
 
@@ -211,3 +210,6 @@ elseif(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
 message(FATAL_ERROR "Unrecognised CMAKE_SYSTEM_PROCESSOR 
${CMAKE_SYSTEM_PROCESSOR}")
 endif()
 endif()
+
+# Set GNU99 as the C standard to use
+set(CMAKE_C_FLAGS "-std=gnu99 ${CMAKE_C_FLAGS}")
\ No newline at end of file


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[hadoop] branch trunk updated: HDFS-14304: High lock contention on hdfsHashMutex in libhdfs

2019-03-26 Thread todd
This is an automated email from the ASF dual-hosted git repository.

todd pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 18c57cf  HDFS-14304: High lock contention on hdfsHashMutex in libhdfs
18c57cf is described below

commit 18c57cf0464f4d1fa95899d75b2f59cae33c7c33
Author: Sahil Takiar 
AuthorDate: Tue Mar 19 12:20:56 2019 -0500

HDFS-14304: High lock contention on hdfsHashMutex in libhdfs

This closes #595

Signed-off-by: Todd Lipcon 
---
 .../src/main/native/libhdfs-tests/CMakeLists.txt   |   5 +-
 .../main/native/libhdfs-tests/native_mini_dfs.c|  42 +-
 .../src/main/native/libhdfs-tests/test_htable.c| 100 -
 .../src/main/native/libhdfs/CMakeLists.txt |   2 +-
 .../src/main/native/libhdfs/common/htable.c| 287 -
 .../src/main/native/libhdfs/common/htable.h| 161 ---
 .../src/main/native/libhdfs/exception.c|   6 +-
 .../src/main/native/libhdfs/hdfs.c | 464 ++---
 .../src/main/native/libhdfs/jclasses.c | 136 ++
 .../src/main/native/libhdfs/jclasses.h | 112 +
 .../src/main/native/libhdfs/jni_helper.c   | 223 +-
 .../src/main/native/libhdfs/jni_helper.h   |  37 +-
 .../src/main/native/libhdfs/os/mutexes.h   |   6 +-
 .../src/main/native/libhdfs/os/posix/mutexes.c |   2 +-
 .../native/libhdfs/os/posix/thread_local_storage.c |  10 +-
 .../src/main/native/libhdfs/os/windows/mutexes.c   |   4 +-
 16 files changed, 656 insertions(+), 941 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs-tests/CMakeLists.txt
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs-tests/CMakeLists.txt
index 08fc030..f16cc9e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs-tests/CMakeLists.txt
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs-tests/CMakeLists.txt
@@ -29,8 +29,8 @@ include_directories(
 
 add_library(native_mini_dfs
 native_mini_dfs.c
-../libhdfs/common/htable.c
 ../libhdfs/exception.c
+../libhdfs/jclasses.c
 ../libhdfs/jni_helper.c
 ${OS_DIR}/mutexes.c
 ${OS_DIR}/thread_local_storage.c
@@ -39,6 +39,3 @@ add_library(native_mini_dfs
 add_executable(test_native_mini_dfs test_native_mini_dfs.c)
 target_link_libraries(test_native_mini_dfs native_mini_dfs ${JAVA_JVM_LIBRARY})
 add_test(test_test_native_mini_dfs test_native_mini_dfs)
-
-add_executable(test_htable ../libhdfs/common/htable.c test_htable.c)
-target_link_libraries(test_htable ${OS_LINK_LIBRARIES})
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs-tests/native_mini_dfs.c
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs-tests/native_mini_dfs.c
index 6938109..3af56f1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs-tests/native_mini_dfs.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs-tests/native_mini_dfs.c
@@ -17,6 +17,7 @@
  */
 
 #include "exception.h"
+#include "jclasses.h"
 #include "jni_helper.h"
 #include "native_mini_dfs.h"
 #include "platform.h"
@@ -36,9 +37,7 @@
 
 #define MINIDFS_CLUSTER_BUILDER "org/apache/hadoop/hdfs/MiniDFSCluster$Builder"
 #define MINIDFS_CLUSTER "org/apache/hadoop/hdfs/MiniDFSCluster"
-#define HADOOP_CONF "org/apache/hadoop/conf/Configuration"
 #define HADOOP_NAMENODE "org/apache/hadoop/hdfs/server/namenode/NameNode"
-#define JAVA_INETSOCKETADDRESS "java/net/InetSocketAddress"
 
 struct NativeMiniDfsCluster {
 /**
@@ -60,8 +59,7 @@ static int hdfsDisableDomainSocketSecurity(void)
   errno = EINTERNAL;
   return -1;
 }
-jthr = invokeMethod(env, NULL, STATIC, NULL,
-"org/apache/hadoop/net/unix/DomainSocket",
+jthr = invokeMethod(env, NULL, STATIC, NULL, JC_DOMAIN_SOCKET,
 "disableBindPathValidation", "()V");
 if (jthr) {
 errno = printExceptionAndFree(env, jthr, PRINT_EXC_ALL,
@@ -126,11 +124,6 @@ struct NativeMiniDfsCluster* nmdCreate(struct 
NativeMiniDfsConf *conf)
 "nmdCreate: new Configuration");
 goto error;
 }
-if (jthr) {
-printExceptionAndFree(env, jthr, PRINT_EXC_ALL,
-  "nmdCreate: Configuration::setBoolean");
-goto error;
-}
 // Disable 'minimum block size' -- it's annoying in tests.
 (*env)->DeleteLocalRef(env, jconfStr);
 jconfStr = NULL;
@@ -140,8 +133,9 @@ struct NativeMiniDfsCluster* nmdCreate(struct 
NativeMiniDfsConf *conf)
   "nmdCreate: new String");
 goto error;
 }
-jthr = in

[hadoop] branch trunk updated: HDFS-14348: Fix JNI exception handling issues in libhdfs

2019-03-26 Thread todd
This is an automated email from the ASF dual-hosted git repository.

todd pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new fe29b39  HDFS-14348: Fix JNI exception handling issues in libhdfs
fe29b39 is described below

commit fe29b3901be1b06db92379c7b7fac4954253e6e2
Author: Sahil Takiar 
AuthorDate: Thu Mar 21 20:53:01 2019 -0700

HDFS-14348: Fix JNI exception handling issues in libhdfs

This closes #600

Signed-off-by: Todd Lipcon 
---
 .../src/main/native/libhdfs/hdfs.c | 55 +
 .../src/main/native/libhdfs/jni_helper.c   |  8 +-
 .../native/libhdfs/os/posix/thread_local_storage.c | 94 ++
 3 files changed, 108 insertions(+), 49 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/hdfs.c 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/hdfs.c
index 41caffd..ec0ad4b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/hdfs.c
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/hdfs.c
@@ -2491,6 +2491,8 @@ int hadoopRzOptionsSetByteBufferPool(
 JNIEnv *env;
 jthrowable jthr;
 jobject byteBufferPool = NULL;
+jobject globalByteBufferPool = NULL;
+int ret;
 
 env = getJNIEnv();
 if (!env) {
@@ -2507,15 +2509,37 @@ int hadoopRzOptionsSetByteBufferPool(
   if (jthr) {
   printExceptionAndFree(env, jthr, PRINT_EXC_ALL,
   "hadoopRzOptionsSetByteBufferPool(className=%s): ", className);
-  errno = EINVAL;
-  return -1;
+  ret = EINVAL;
+  goto done;
   }
-}
-if (opts->byteBufferPool) {
-// Delete any previous ByteBufferPool we had.
+  // Only set opts->byteBufferPool if creating a global reference is
+  // successful
+  globalByteBufferPool = (*env)->NewGlobalRef(env, byteBufferPool);
+  if (!globalByteBufferPool) {
+  printPendingExceptionAndFree(env, PRINT_EXC_ALL,
+  "hadoopRzOptionsSetByteBufferPool(className=%s): ",
+  className);
+  ret = EINVAL;
+  goto done;
+  }
+  // Delete any previous ByteBufferPool we had before setting a new one.
+  if (opts->byteBufferPool) {
+  (*env)->DeleteGlobalRef(env, opts->byteBufferPool);
+  }
+  opts->byteBufferPool = globalByteBufferPool;
+} else if (opts->byteBufferPool) {
+// If the specified className is NULL, delete any previous
+// ByteBufferPool we had.
 (*env)->DeleteGlobalRef(env, opts->byteBufferPool);
+opts->byteBufferPool = NULL;
+}
+ret = 0;
+done:
+destroyLocalReference(env, byteBufferPool);
+if (ret) {
+errno = ret;
+return -1;
 }
-opts->byteBufferPool = (*env)->NewGlobalRef(env, byteBufferPool);
 return 0;
 }
 
@@ -2570,8 +2594,7 @@ static jthrowable hadoopRzOptionsGetEnumSet(JNIEnv *env,
 } else {
 jclass clazz = (*env)->FindClass(env, READ_OPTION);
 if (!clazz) {
-jthr = newRuntimeError(env, "failed "
-"to find class for %s", READ_OPTION);
+jthr = getPendingExceptionAndClear(env);
 goto done;
 }
 jthr = invokeMethod(env, , STATIC, NULL,
@@ -2697,6 +2720,7 @@ static int translateZCRException(JNIEnv *env, jthrowable 
exc)
 }
 if (!strcmp(className, "java.lang.UnsupportedOperationException")) {
 ret = EPROTONOSUPPORT;
+destroyLocalReference(env, exc);
 goto done;
 }
 ret = printExceptionAndFree(env, exc, PRINT_EXC_ALL,
@@ -2896,8 +2920,9 @@ hdfsGetHosts(hdfsFS fs, const char *path, tOffset start, 
tOffset length)
 for (i = 0; i < jNumFileBlocks; ++i) {
 jFileBlock =
 (*env)->GetObjectArrayElement(env, jBlockLocations, i);
-if (!jFileBlock) {
-ret = printPendingExceptionAndFree(env, PRINT_EXC_ALL,
+jthr = (*env)->ExceptionOccurred(env);
+if (jthr || !jFileBlock) {
+ret = printExceptionAndFree(env, jthr, PRINT_EXC_ALL,
 "hdfsGetHosts(path=%s, start=%"PRId64", length=%"PRId64"):"
 "GetObjectArrayElement(%d)", path, start, length, i);
 goto done;
@@ -2930,8 +2955,9 @@ hdfsGetHosts(hdfsFS fs, const char *path, tOffset start, 
tOffset length)
 //Now parse each hostname
 for (j = 0; j < jNumBlockHosts; ++j) {
 jHost = (*env)->GetObjectArrayElement(env, jFileBlockHosts, j);
-if (!jHost) {
-ret = printPendingExceptionAndFree(env, PRINT_EXC_ALL,
+jthr = (*env)->ExceptionOccurred(env);
+if (jthr || !jHost)

hadoop git commit: HADOOP-15550. Avoid static initialization of ObjectMappers

2018-06-25 Thread todd
Repository: hadoop
Updated Branches:
  refs/heads/trunk c687a6617 -> 7a3c6e9c3


HADOOP-15550. Avoid static initialization of ObjectMappers


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7a3c6e9c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7a3c6e9c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7a3c6e9c

Branch: refs/heads/trunk
Commit: 7a3c6e9c3cd9ffdc71946fd12f5c3d59718c4939
Parents: c687a66
Author: Todd Lipcon 
Authored: Mon Jun 25 15:36:45 2018 -0700
Committer: Todd Lipcon 
Committed: Mon Jun 25 15:36:45 2018 -0700

--
 .../crypto/key/kms/KMSClientProvider.java   |  7 ++
 .../web/DelegationTokenAuthenticator.java   |  8 ++-
 .../apache/hadoop/util/HttpExceptionUtils.java  | 12 ++
 .../apache/hadoop/util/JsonSerialization.java   | 24 
 .../crypto/key/kms/server/KMSJSONWriter.java|  6 ++---
 .../hadoop/hdfs/web/WebHdfsFileSystem.java  |  7 ++
 ...onfRefreshTokenBasedAccessTokenProvider.java |  8 +++
 .../CredentialBasedAccessTokenProvider.java |  8 +++
 .../apache/hadoop/mapreduce/JobSubmitter.java   |  8 +++
 .../hadoop/fs/azure/security/JsonUtils.java |  4 ++--
 10 files changed, 45 insertions(+), 47 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a3c6e9c/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
index edbf897..7b46075 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.security.token.TokenRenewer;
 import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
 import 
org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
 import org.apache.hadoop.util.HttpExceptionUtils;
+import org.apache.hadoop.util.JsonSerialization;
 import org.apache.hadoop.util.KMSUtil;
 import org.apache.http.client.utils.URIBuilder;
 import org.slf4j.Logger;
@@ -79,7 +80,6 @@ import 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
 import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectWriter;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
@@ -132,9 +132,6 @@ public class KMSClientProvider extends KeyProvider 
implements CryptoExtension,
 
   private final ValueQueue encKeyVersionQueue;
 
-  private static final ObjectWriter WRITER =
-  new ObjectMapper().writerWithDefaultPrettyPrinter();
-
   private final Text dtService;
 
   // Allow fallback to default kms server port 9600 for certain tests that do
@@ -237,7 +234,7 @@ public class KMSClientProvider extends KeyProvider 
implements CryptoExtension,
   private static void writeJson(Object obj, OutputStream os)
   throws IOException {
 Writer writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
-WRITER.writeValue(writer, obj);
+JsonSerialization.writer().writeValue(writer, obj);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a3c6e9c/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
index 617773b..0ae2af3 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticator.java
@@ -17,8 +17,6 @@
  */
 package org.apache.hadoop.security.token.delegation.web;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectReader;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 imp

hadoop git commit: HADOOP-15549. Upgrade to commons-configuration 2.1 regresses task CPU consumption

2018-06-21 Thread todd
Repository: hadoop
Updated Branches:
  refs/heads/trunk 9f15483c5 -> 59de96795


HADOOP-15549. Upgrade to commons-configuration 2.1 regresses task CPU 
consumption


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/59de9679
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/59de9679
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/59de9679

Branch: refs/heads/trunk
Commit: 59de9679540f6d0edfb34cf9f88e52b51d94b4f4
Parents: 9f15483
Author: Todd Lipcon 
Authored: Thu Jun 21 10:32:52 2018 -0700
Committer: Todd Lipcon 
Committed: Thu Jun 21 10:32:52 2018 -0700

--
 .../apache/hadoop/metrics2/impl/MetricsConfig.java   | 15 ++-
 1 file changed, 6 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/59de9679/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
index 027450c..976f16b 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/impl/MetricsConfig.java
@@ -37,10 +37,8 @@ import com.google.common.collect.Maps;
 import org.apache.commons.configuration2.Configuration;
 import org.apache.commons.configuration2.PropertiesConfiguration;
 import org.apache.commons.configuration2.SubsetConfiguration;
-import org.apache.commons.configuration2.builder.fluent.Configurations;
-import org.apache.commons.configuration2.builder.fluent.Parameters;
-import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
 import org.apache.commons.configuration2.ex.ConfigurationException;
+import org.apache.commons.configuration2.io.FileHandler;
 import org.apache.hadoop.metrics2.MetricsFilter;
 import org.apache.hadoop.metrics2.MetricsPlugin;
 import org.apache.hadoop.metrics2.filter.GlobFilter;
@@ -112,12 +110,11 @@ class MetricsConfig extends SubsetConfiguration {
   static MetricsConfig loadFirst(String prefix, String... fileNames) {
 for (String fname : fileNames) {
   try {
-Configuration cf = new Configurations().propertiesBuilder(fname)
-.configure(new Parameters().properties()
-.setFileName(fname)
-.setListDelimiterHandler(new DefaultListDelimiterHandler(',')))
-  .getConfiguration()
-  .interpolatedConfiguration();
+PropertiesConfiguration pcf = new PropertiesConfiguration();
+FileHandler fh = new FileHandler(pcf);
+fh.setFileName(fname);
+fh.load();
+Configuration cf = pcf.interpolatedConfiguration();
 LOG.info("Loaded properties from {}", fname);
 if (LOG.isDebugEnabled()) {
   LOG.debug("Properties: {}", toString(cf));


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: HADOOP-15551. Avoid use of Arrays.stream in Configuration.addTags

2018-06-20 Thread todd
Repository: hadoop
Updated Branches:
  refs/heads/trunk 32f867a6a -> 43541a189


HADOOP-15551. Avoid use of Arrays.stream in Configuration.addTags


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43541a18
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43541a18
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43541a18

Branch: refs/heads/trunk
Commit: 43541a18907d2303b708ae27a9a2cb5df891da4f
Parents: 32f867a
Author: Todd Lipcon 
Authored: Wed Jun 20 12:38:59 2018 -0700
Committer: Todd Lipcon 
Committed: Wed Jun 20 16:43:10 2018 -0700

--
 .../src/main/java/org/apache/hadoop/conf/Configuration.java  | 8 
 1 file changed, 4 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43541a18/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 19bd5da..b1125e5 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -3189,25 +3189,25 @@ public class Configuration implements 
Iterable>,
   if (prop.containsKey(CommonConfigurationKeys.HADOOP_TAGS_SYSTEM)) {
 String systemTags = prop.getProperty(CommonConfigurationKeys
 .HADOOP_TAGS_SYSTEM);
-Arrays.stream(systemTags.split(",")).forEach(tag -> TAGS.add(tag));
+TAGS.addAll(Arrays.asList(systemTags.split(",")));
   }
   // Get all custom tags
   if (prop.containsKey(CommonConfigurationKeys.HADOOP_TAGS_CUSTOM)) {
 String customTags = prop.getProperty(CommonConfigurationKeys
 .HADOOP_TAGS_CUSTOM);
-Arrays.stream(customTags.split(",")).forEach(tag -> TAGS.add(tag));
+TAGS.addAll(Arrays.asList(customTags.split(",")));
   }
 
   if (prop.containsKey(CommonConfigurationKeys.HADOOP_SYSTEM_TAGS)) {
 String systemTags = prop.getProperty(CommonConfigurationKeys
 .HADOOP_SYSTEM_TAGS);
-Arrays.stream(systemTags.split(",")).forEach(tag -> TAGS.add(tag));
+TAGS.addAll(Arrays.asList(systemTags.split(",")));
   }
   // Get all custom tags
   if (prop.containsKey(CommonConfigurationKeys.HADOOP_CUSTOM_TAGS)) {
 String customTags = prop.getProperty(CommonConfigurationKeys
 .HADOOP_CUSTOM_TAGS);
-Arrays.stream(customTags.split(",")).forEach(tag -> TAGS.add(tag));
+TAGS.addAll(Arrays.asList(customTags.split(",")));
   }
 
 } catch (Exception ex) {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[10/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Streams.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Streams.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Streams.cc
new file mode 100644
index 000..c2d64c9
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Streams.cc
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include commons.h
+#include Checksum.h
+#include Streams.h
+
+namespace NativeTask {
+
+/
+
+void InputStream::seek(uint64_t position) {
+  THROW_EXCEPTION(UnsupportException, seek not support);
+}
+
+uint64_t InputStream::tell() {
+  THROW_EXCEPTION(UnsupportException, tell not support);
+}
+
+int32_t InputStream::readFully(void * buff, uint32_t length) {
+  int32_t ret = 0;
+  while (length  0) {
+int32_t rd = read(buff, length);
+if (rd = 0) {
+  return ret  0 ? ret : -1;
+}
+ret += rd;
+buff = ((char *)buff) + rd;
+length -= rd;
+  }
+  return ret;
+}
+
+void InputStream::readAllTo(OutputStream  out, uint32_t bufferHint) {
+  char * buffer = new char[bufferHint];
+  while (true) {
+int32_t rd = read(buffer, bufferHint);
+if (rd = 0) {
+  break;
+}
+out.write(buffer, rd);
+  }
+  delete buffer;
+}
+
+/
+
+uint64_t OutputStream::tell() {
+  THROW_EXCEPTION(UnsupportException, tell not support);
+}
+
+///
+
+ChecksumInputStream::ChecksumInputStream(InputStream * stream, ChecksumType 
type)
+: FilterInputStream(stream), _type(type), _limit(-1) {
+  resetChecksum();
+}
+
+void ChecksumInputStream::resetChecksum() {
+  _checksum = Checksum::init(_type);
+}
+
+uint32_t ChecksumInputStream::getChecksum() {
+  return Checksum::getValue(_type, _checksum);
+}
+
+int32_t ChecksumInputStream::read(void * buff, uint32_t length) {
+  if (_limit  0) {
+int32_t ret = _stream-read(buff, length);
+if (ret  0) {
+  Checksum::update(_type, _checksum, buff, ret);
+}
+return ret;
+  } else if (_limit == 0) {
+return -1;
+  } else {
+int64_t rd = _limit  length ? _limit : length;
+int32_t ret = _stream-read(buff, rd);
+if (ret  0) {
+  _limit -= ret;
+  Checksum::update(_type, _checksum, buff, ret);
+}
+return ret;
+  }
+}
+
+///
+
+ChecksumOutputStream::ChecksumOutputStream(OutputStream * stream, ChecksumType 
type)
+: FilterOutputStream(stream), _type(type) {
+  resetChecksum();
+}
+
+void ChecksumOutputStream::resetChecksum() {
+  _checksum = Checksum::init(_type);
+}
+
+uint32_t ChecksumOutputStream::getChecksum() {
+  return Checksum::getValue(_type, _checksum);
+}
+
+void ChecksumOutputStream::write(const void * buff, uint32_t length) {
+  Checksum::update(_type, _checksum, buff, length);
+  _stream-write(buff, length);
+}
+
+} // namespace NativeTask

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Streams.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Streams.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Streams.h
new file mode 100644
index 000..199762b
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Streams.h
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * 

[05/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/testData/testGlibCBugSpill.out
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/testData/testGlibCBugSpill.out
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/testData/testGlibCBugSpill.out
new file mode 100644
index 000..168a65d
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/testData/testGlibCBugSpill.out
@@ -0,0 +1,2 @@
+��*�PKM��:\xBD\xC8\xCA\xCB\xAA\xB4c`\xD5|A1+\xCB\xDC\xAA\x7F\x1D\xA4\x1Bx\x1B\xD1k\xE1\x01\x0E\xEA\xC82\xB7\xF8
 
\xBD.z\xEFH\xFB*\xE8\xBD\xD4\x15o\x82\xA1\x0F}\x03\x00\x17\xC7\xF7Ce*T\x1D\xC4tk\xE7\x0CS\xBF[\xADW{\xB9\xFFLM\xB2\x87\x03\xFC\xC2\xE2\x9C%\xCB\xFF\x9A\x97]\x15_N\xA3\x8FDb\xA6\x172\x99\xF8+\xA4\xA5\xAC4n\x0D}\xA0/[\xBD\xFD\xC4\xA7\xBCj\xF8\x85\xC6V\x87B\xBA\x85\x8A\xCDZ\xFBU\x84\xD7\xBC\xAB\x84H}_v\xC4\xB2\x11\x19\xDB\x06\x93\xB5\xBE\x92E\xCE\xFA\x02RG\xE4\xA3HcI4\xAB.X\xA5\xB8a\x06\x1E.\xADS\xFB\x8AW\xDA\xCA2^\\x90\x87\x95l]r\x99\x88b\x1EU\xC8\xE0l\x1F#h\x08\xA8\x80I\x9B\xC0E\x19%\xDE\xE5\xA6?\xC2\x83\xA0\xD6O1\xFB_\x94\xF8\x8F\xD6\xD9\x16\xE2\x91\x1E\x8EL\x1F\xA2Y\x05\x9CoMN\x0C\x86\xE2\xE9\x9A\xB16o\xF6\xF3Q\xA2\xD8\xB8\xD0\xDB\x86\x93\xAFesc\xA0h\x12\xB9E\x1BU\x12\x02OXS\x8BXn\x0EQ\xABJ\xCE\xB2k\xB1\xD7Z'\xE4\xB7[`~4\x11\xB4\xCD`\xF5%\x1F\xF9\xEEW\x88\xC5\x14+O\x1B9B\x11\x95\xDE\xD5\xA4Co\x8E\xDB\xEAz\x9FGi\xAD\xFE\xF8\xB63\xED\x04\xA1\xE2v\x86G\
 
xAE\x09\x91\xE4\x03JS=\\xD1\x81}\xEE\xA3?\x87\xDB\xC1\x8B\xFF)\xE8L\xC3\xC7\x8B\x05\x1B'\xCC)\xD4\x0D\x90{\xB9\xB7C6\xA9\x16\xE2\xF9D%\xE9!Bt]N\xFB\xF3\xCE\x7F\xB7d8\xBCN\x191,Nm\xABb\x92\x00\xEA\xBC\xD0\xD5!\x956\xF9\x9D\x98\x86\x81QDnL\xB1\xEFxX\xA4\x1FZ\xE9uf\x7F\xF7\x8F\xCD\xC5P\x81\x17\x17\xB0\xCB\xAF\x9C\x05\x8E\xC6\xDB\x09\x1Ak\xA6\xF822\xF4\xA4t\xA1;2\xCC\xEA\xFD?0k]\xF8\xE4\x13\xD2'Z\x9F~5\x9C\xFC+k\x07\xE4k\xB9\xEDx\x9B\x17\xA2\xA8\x85R\x90\x1CV\xD3T\xB7b[\x81)8\xEA\xE6\x12\xE0\x88~\xB8\x87\xA9X\xB0\x88\x19\xA5,\x88cm@\xEE\xF9.\x8A?\xF8!\xD8oR\xAB\x05\x93h3\x13\x0A\x98_E\x11\x81\xD6\xB8;P\xD8u\x9DTv]\\xF1\x0C\xD5\xF4\x0E#\x87}\xE3\x89\xA2\xC2\xEA\x86\x9D\xE7\xAF\xA1\xC3;\xD2\xFF\xA6\xB2!\xAB\\x90i|n\xDE\xBB:\xC6\x08\x1D,Q\xC1;\x15\x9DUV\x8F\xD3;\xFA\x12\xD0U\xA9\xD7\xC6\xFDX\x8F\x10vX\xF1\xF9H7\x12Z\x1FoB\x8B\xD6\xF0\x04\xEB\xA2\xC1\xACH\xFE\xC9_R\xF5\x1D\xA8\x82\xBD\xC6\x11\x19#\x9A\xA8\x09\x95\x06\x98\xA9f\xD9\x12\xA0i\xFA\xD3\xB2\x01\xE9\x06\x14~.z\xDF\xF2YV
 
#z\xEB-\xFA$[U\xA6\xE2.\xD6\xD0kf\xA3\x83\xE0\x86\xD9\xC1\x7FO\xE9\xF4\xEF\x81\x06\xA3\xDE\xC8\xACt\x8B\xCAr\xE6\x82$8\x0F\xE1\x06\xF9\xBB\x0C\x9F\xAA8\x94\xBBU\x8DQ\xC3\xE9@'\xF9\xBF\xCF\x8B\xD4\x995\xEB\xE9t3Q\x10\xD7\x0D\x9D\x94\xCA\x84\xC2\xE2{\x0B\x89r\xA6\xC6\xAA\xE5C\xC6U\xA6\xD1tqa\xA0\xD7RO\x92\xC9\xBE\xF9\xD1\xDE\x93b\x06\xD3ae:\xB7\x8C\x99\xD6\xFFI\x86\x8CvtX@k\xE4m\x93wz\x8A\xC2U\xFBb\xA2\x9Ao\xAF\x8D\x19k\xA2pP\x83s\xFE\x0E\x0FY\xA0\xA7E'\xC0\x02\xF4\x98A5\xF2\x8A?\x04$\x89\xC7]\x0A\xFBX\x97*\xAEN\x13#\xB3a\xD2y\xD3#_B\xAC\x05:\xAC\x92\xEAO\x08H\x88N\x1A\xB9\xDC\xFA\x11ikJ\x12!\xE8u\xCD+\x88\x98\xE3c\xCB\xD91%\x98KDN\xC6\xF2\xB7\x86o6\x91P`\x9B\xA1\x0B\x82\xEB\x15H\xA0\xC4\x85\xDF\xAC\xA1b\xD9\xA3b\xB8E\xB59_\xF4\xCCWH\x095\xE6\xBE\xF2\x19FC\x0E\xAB\xEA6\x0C\xAD5\x90/$D\xB3\x9E\x81[9j\x8A\xC4\x85\xAAA\x7Fe\xDCG8\x00\xDA\xCFi\xBDp\x18?\xF5\xA8~@\xC1\x08\xDF\xE5\xAE,\xDF0t\xCB\x92W8V\x01F\x1A./\x8D\xAF\xD8\x87\xCE\x80w*\x18Is\x17\x15\x17DI\xB4a_\N\xB77\xA7n\x16\xDF
 
IE\xEF\x9E\x8Cd7\x1B\xF9\x97\xF9E\x86\x98\x9F\x1D\xB6\x9F\x94\xF7\x8AJ\x1A\xCD\x88\xD3\xD3\xDEw\x92Q\\xF5\xC6\xD6\x11c\x81\x00\xE8\xD9'\xE1\x9D5\xFC\x11},\xB8\xB2V\xE6\xC0\xB7a/\x18~=G\xAC\x9EGxR,\x9B\x91\xA0\xE9\x85\x14J\xB3\xB2O\xEA3\xB2F\xA7vo\x88\xFEm\x18*g%\xA4l\x9B\xF0\xA5`$\xEBo\xFC?\x13s\x0D\x91y\x92\xE0u\xFA\xD1p+)\xACpi\xE3\xB4L-\x0A\xF1#\xCF\x1A\x82\x8A\xE5\xEF80\xC9\x17z@\xD1\x9AoK\xCE\xE42\x92M\xEF\x85\xBE(z\x860\xC6\x03t\x02\xA1\xD2\x09\x1E\xB3\x80t\x86|\x8E~F\xFD1i|\x84^\x07\xC9Z\xBE\x91\xA7\x06\x9B\xC7\x8F\xFB\xD4\xB84\xED\xA2\x108/X\x89\xF4W\xF6\xE3\xEE\x94Q,H\xFFo3E\xA7q\xE5\x15\x86\xCF\x0F_\xF9\xE8N\xCD}\xEB\xFD\x0E\x03EZi\x83
 
\xA7D|{]\xEE\xBA\xE4\x00RR\x1C\xFBj\x81\xF0{w\x9F\xA6F\xBB\x00\x0Cw\x01\xE1\xFE\xC8\xE8\xAC\xD18\x19,\xE9\x9E\xFE\xF0\xA45ov[K\x86UT\x00\xC0*\xEF\x9De\xE7pN[\xA5~\xF3\xDA\xAD\xE0\x85\xB0Nb\x09I_\xA8B:b\x9A\x10\xC2\xCF\xE83|\xB1\xCD\x17\xE8\x95\xA6!\xD72DR\x03\xB7\xF4\xC2\x88\xF1Rl+t\xA7x\x04\x10\xC1@\\xC3\xE5}\xDD`\xA2\x91W\x7F%S\xB7\x1
 

[12/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Combiner.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Combiner.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Combiner.cc
new file mode 100644
index 000..b426fc3
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Combiner.cc
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include Combiner.h
+#include StringUtil.h
+
+namespace NativeTask {
+
+NativeCombineRunner::NativeCombineRunner(Config * config, ObjectCreatorFunc 
combinerCreator)
+: _config(config), _combinerCreator(combinerCreator), _keyGroupCount(0) {
+  if (NULL == _combinerCreator) {
+THROW_EXCEPTION_EX(UnsupportException, Create combiner failed);
+  }
+}
+
+KeyGroupIterator * NativeCombineRunner::createKeyGroupIterator(KVIterator * 
iter) {
+  return new KeyGroupIteratorImpl(iter);
+}
+
+void NativeCombineRunner::combine(CombineContext context, KVIterator * 
iterator,
+IFileWriter * writer) {
+  Configurable * combiner = (Configurable *)(_combinerCreator());
+  if (NULL != combiner) {
+combiner-configure(_config);
+  }
+
+  NativeObjectType type = combiner-type();
+  switch (type) {
+  case MapperType: {
+Mapper * mapper = (Mapper*)combiner;
+mapper-setCollector(writer);
+
+Buffer key;
+Buffer value;
+while (iterator-next(key, value)) {
+  mapper-map(key.data(), key.length(), value.data(), value.length());
+}
+mapper-close();
+delete mapper;
+  }
+break;
+  case ReducerType: {
+Reducer * reducer = (Reducer*)combiner;
+reducer-setCollector(writer);
+KeyGroupIterator * kg = createKeyGroupIterator(iterator);
+while (kg-nextKey()) {
+  _keyGroupCount++;
+  reducer-reduce(*kg);
+}
+reducer-close();
+delete reducer;
+  }
+break;
+  default:
+THROW_EXCEPTION(UnsupportException, Combiner type not support);
+  }
+}
+
+} /* namespace NativeTask */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Combiner.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Combiner.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Combiner.h
new file mode 100644
index 000..09f4eb0
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Combiner.h
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef COMBINER_H_
+#define COMBINER_H_
+#include commons.h
+#include IFile.h
+
+namespace NativeTask {
+
+class MemoryBufferKVIterator : public KVIterator {
+public:
+  virtual const char * getBase() = 0;
+  virtual std::vectoruint32_t * getKVOffsets() = 0;
+};
+
+enum CombineContextType {
+  UNKNOWN = 0,
+  CONTINUOUS_MEMORY_BUFFER = 1,
+};
+
+class CombineContext {
+
+private:

[09/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Checksum.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Checksum.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Checksum.cc
new file mode 100644
index 000..4c4f3cc
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/util/Checksum.cc
@@ -0,0 +1,749 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include assert.h
+#include Checksum.h
+
+namespace NativeTask {
+
+const uint32_t CRC32_T8_0[] = {0x, 0x77073096, 0xEE0E612C, 0x990951BA, 
0x076DC419,
+0x706AF48F, 0xE963A535, 0x9E6495A3, 0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 
0x97D2D988, 0x09B64C2B,
+0x7EB17CBD, 0xE7B82D07, 0x90BF1D91, 0x1DB71064, 0x6AB020F2, 0xF3B97148, 
0x84BE41DE, 0x1ADAD47D,
+0x6DDDE4EB, 0xF4D4B551, 0x83D385C7, 0x136C9856, 0x646BA8C0, 0xFD62F97A, 
0x8A65C9EC, 0x14015C4F,
+0x63066CD9, 0xFA0F3D63, 0x8D080DF5, 0x3B6E20C8, 0x4C69105E, 0xD56041E4, 
0xA2677172, 0x3C03E4D1,
+0x4B04D447, 0xD20D85FD, 0xA50AB56B, 0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 
0xACBCF940, 0x32D86CE3,
+0x45DF5C75, 0xDCD60DCF, 0xABD13D59, 0x26D930AC, 0x51DE003A, 0xC8D75180, 
0xBFD06116, 0x21B4F4B5,
+0x56B3C423, 0xCFBA9599, 0xB8BDA50F, 0x2802B89E, 0x5F058808, 0xC60CD9B2, 
0xB10BE924, 0x2F6F7C87,
+0x58684C11, 0xC1611DAB, 0xB6662D3D, 0x76DC4190, 0x01DB7106, 0x98D220BC, 
0xEFD5102A, 0x71B18589,
+0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433, 0x7807C9A2, 0x0F00F934, 0x9609A88E, 
0xE10E9818, 0x7F6A0DBB,
+0x086D3D2D, 0x91646C97, 0xE6635C01, 0x6B6B51F4, 0x1C6C6162, 0x856530D8, 
0xF262004E, 0x6C0695ED,
+0x1B01A57B, 0x8208F4C1, 0xF50FC457, 0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 
0xFCB9887C, 0x62DD1DDF,
+0x15DA2D49, 0x8CD37CF3, 0xFBD44C65, 0x4DB26158, 0x3AB551CE, 0xA3BC0074, 
0xD4BB30E2, 0x4ADFA541,
+0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB, 0x4369E96A, 0x346ED9FC, 0xAD678846, 
0xDA60B8D0, 0x44042D73,
+0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9, 0x5005713C, 0x270241AA, 0xBE0B1010, 
0xC90C2086, 0x5768B525,
+0x206F85B3, 0xB966D409, 0xCE61E49F, 0x5EDEF90E, 0x29D9C998, 0xB0D09822, 
0xC7D7A8B4, 0x59B33D17,
+0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD, 0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 
0x74B1D29A, 0xEAD54739,
+0x9DD277AF, 0x04DB2615, 0x73DC1683, 0xE3630B12, 0x94643B84, 0x0D6D6A3E, 
0x7A6A5AA8, 0xE40ECF0B,
+0x9309FF9D, 0x0A00AE27, 0x7D079EB1, 0xF00F9344, 0x8708A3D2, 0x1E01F268, 
0x6906C2FE, 0xF762575D,
+0x806567CB, 0x196C3671, 0x6E6B06E7, 0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 
0x67DD4ACC, 0xF9B9DF6F,
+0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5, 0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 
0x4FDFF252, 0xD1BB67F1,
+0xA6BC5767, 0x3FB506DD, 0x48B2364B, 0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 
0x41047A60, 0xDF60EFC3,
+0xA867DF55, 0x316E8EEF, 0x4669BE79, 0xCB61B38C, 0xBC66831A, 0x256FD2A0, 
0x5268E236, 0xCC0C7795,
+0xBB0B4703, 0x220216B9, 0x5505262F, 0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 
0x5CB36A04, 0xC2D7FFA7,
+0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D, 0x9B64C2B0, 0xEC63F226, 0x756AA39C, 
0x026D930A, 0x9C0906A9,
+0xEB0E363F, 0x72076785, 0x05005713, 0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 
0x0CB61B38, 0x92D28E9B,
+0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21, 0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 
0x1FDA836E, 0x81BE16CD,
+0xF6B9265B, 0x6FB077E1, 0x18B74777, 0x88085AE6, 0xFF0F6A70, 0x66063BCA, 
0x11010B5C, 0x8F659EFF,
+0xF862AE69, 0x616BFFD3, 0x166CCF45, 0xA00AE278, 0xD70DD2EE, 0x4E048354, 
0x3903B3C2, 0xA7672661,
+0xD06016F7, 0x4969474D, 0x3E6E77DB, 0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 
0x37D83BF0, 0xA9BCAE53,
+0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9, 0xBDBDF21C, 0xCABAC28A, 0x53B39330, 
0x24B4A3A6, 0xBAD03605,
+0xCDD70693, 0x54DE5729, 0x23D967BF, 0xB3667A2E, 0xC4614AB8, 0x5D681B02, 
0x2A6F2B94, 0xB40BBE37,
+0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D};
+const uint32_t CRC32_T8_1[] = {0x, 0x191B3141, 0x32366282, 0x2B2D53C3, 
0x646CC504,
+  

[01/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
Repository: hadoop
Updated Branches:
  refs/heads/trunk a0ad975ea - 3c9181722


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
new file mode 100644
index 000..b665971
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.nativetask.testutil;
+
+import java.io.IOException;
+import java.util.zip.CRC32;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+
+public class ResultVerifier {
+  /**
+   * verify the result
+   * 
+   * @param sample
+   *  :nativetask output
+   * @param source
+   *  :yuanwenjian
+   * @throws Exception
+   */
+  public static boolean verify(String sample, String source) throws Exception {
+FSDataInputStream sourcein = null;
+FSDataInputStream samplein = null;
+
+final Configuration conf = new Configuration();
+final FileSystem fs = FileSystem.get(conf);
+final Path hdfssource = new Path(source);
+final Path[] sourcepaths = FileUtil.stat2Paths(fs.listStatus(hdfssource));
+
+final Path hdfssample = new Path(sample);
+final Path[] samplepaths = FileUtil.stat2Paths(fs.listStatus(hdfssample));
+if (sourcepaths == null) {
+  throw new Exception(source file can not be found);
+}
+if (samplepaths == null) {
+  throw new Exception(sample file can not be found);
+}
+if (sourcepaths.length != samplepaths.length) {
+  return false;
+}
+for (int i = 0; i  sourcepaths.length; i++) {
+  final Path sourcepath = sourcepaths[i];
+  // op result file start with part-r like part-r-0
+
+  if (!sourcepath.getName().startsWith(part-r)) {
+continue;
+  }
+  Path samplepath = null;
+  for (int j = 0; j  samplepaths.length; j++) {
+if (samplepaths[i].getName().equals(sourcepath.getName())) {
+  samplepath = samplepaths[i];
+  break;
+}
+  }
+  if (samplepath == null) {
+throw new Exception(cound not found file  + 
samplepaths[0].getParent() + / + sourcepath.getName()
++  , as sourcepaths has such file);
+  }
+
+  // compare
+  try {
+if (fs.exists(sourcepath)  fs.exists(samplepath)) {
+  sourcein = fs.open(sourcepath);
+  samplein = fs.open(samplepath);
+} else {
+  System.err.println(result file not found: + sourcepath +  or  + 
samplepath);
+  return false;
+}
+
+CRC32 sourcecrc, samplecrc;
+samplecrc = new CRC32();
+sourcecrc = new CRC32();
+final byte[] bufin = new byte[1  16];
+int readnum = 0;
+int totalRead = 0;
+while (samplein.available()  0) {
+  readnum = samplein.read(bufin);
+  totalRead += readnum;
+  samplecrc.update(bufin, 0, readnum);
+}
+
+if (0 == totalRead) {
+  throw new Exception(source  + sample +  is empty file);
+}
+
+totalRead = 0;
+while (sourcein.available()  0) {
+  readnum = sourcein.read(bufin);
+  totalRead += readnum;
+  sourcecrc.update(bufin, 0, readnum);
+}
+if (0 == totalRead) {
+  throw new Exception(source  + sample +  is empty file);
+}
+
+if 

[02/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
new file mode 100644
index 000..8a4aa6f
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.nativetask.combinertest;
+
+import static org.junit.Assert.assertEquals;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.Task;
+import 
org.apache.hadoop.mapred.nativetask.combinertest.WordCount.IntSumReducer;
+import 
org.apache.hadoop.mapred.nativetask.combinertest.WordCount.TokenizerMapper;
+import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
+import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
+import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
+import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.junit.Before;
+import org.junit.Test;
+
+public class CombinerTest {
+  private FileSystem fs;
+  private String inputpath;
+  private String nativeoutputpath;
+  private String hadoopoutputpath;
+
+  @Test
+  public void testWordCountCombiner() {
+try {
+
+  final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
+  nativeConf.addResource(TestConstants.COMBINER_CONF_PATH);
+  final Job nativejob = getJob(nativewordcount, nativeConf, inputpath, 
nativeoutputpath);
+
+  final Configuration commonConf = 
ScenarioConfiguration.getNormalConfiguration();
+  commonConf.addResource(TestConstants.COMBINER_CONF_PATH);
+
+  final Job normaljob = getJob(normalwordcount, commonConf, inputpath, 
hadoopoutputpath);
+
+  nativejob.waitForCompletion(true);
+
+  Counter nativeReduceGroups = 
nativejob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
+  
+  normaljob.waitForCompletion(true);
+  Counter normalReduceGroups = 
normaljob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
+   
+  assertEquals(true, ResultVerifier.verify(nativeoutputpath, 
hadoopoutputpath));
+  assertEquals(Native Reduce reduce group counter should equal orignal 
reduce group counter, 
+  nativeReduceGroups.getValue(), normalReduceGroups.getValue());
+  
+} catch (final Exception e) {
+  e.printStackTrace();
+  assertEquals(run exception, true, false);
+}
+  }
+
+  @Before
+  public void startUp() throws Exception {
+final ScenarioConfiguration conf = new ScenarioConfiguration();
+conf.addcombinerConf();
+
+this.fs = FileSystem.get(conf);
+
+this.inputpath = 
conf.get(TestConstants.NATIVETASK_TEST_COMBINER_INPUTPATH_KEY,
+TestConstants.NATIVETASK_TEST_COMBINER_INPUTPATH_DEFAULTV) + 
/wordcount;
+
+if (!fs.exists(new Path(inputpath))) {
+  new TestInputFile(
+  conf.getInt(TestConstants.NATIVETASK_COMBINER_WORDCOUNT_FILESIZE, 
100),
+  Text.class.getName(),
+  Text.class.getName(), conf).createSequenceTestFile(inputpath, 1, 

[04/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/resources/META-INF/services/org.apache.hadoop.mapred.nativetask.Platform
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/resources/META-INF/services/org.apache.hadoop.mapred.nativetask.Platform
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/resources/META-INF/services/org.apache.hadoop.mapred.nativetask.Platform
new file mode 100644
index 000..eef215d
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/resources/META-INF/services/org.apache.hadoop.mapred.nativetask.Platform
@@ -0,0 +1,14 @@
+#
+#   Licensed under the Apache License, Version 2.0 (the License);
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an AS IS BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+org.apache.hadoop.mapred.nativetask.HadoopPlatform

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java
new file mode 100644
index 000..a76b1b2
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/TestTaskContext.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.nativetask;
+
+import org.apache.hadoop.io.FloatWritable;
+import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+
+import junit.framework.TestCase;
+
+public class TestTaskContext extends TestCase {
+  
+  public void testTaskContext() {
+TaskContext context = new TaskContext(null, null, null, null, null, null, 
null);
+
+context.setInputKeyClass(IntWritable.class);
+assertEquals(IntWritable.class.getName(), 
context.getInputKeyClass().getName()); 
+ 
+context.setInputValueClass(Text.class);
+assertEquals(Text.class.getName(), 
context.getInputValueClass().getName()); 
+   
+context.setOutputKeyClass(LongWritable.class);
+assertEquals(LongWritable.class.getName(), 
context.getOuputKeyClass().getName()); 
+
+context.setOutputValueClass(FloatWritable.class);
+assertEquals(FloatWritable.class.getName(), 
context.getOutputValueClass().getName()); 
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestBufferPushPull.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestBufferPushPull.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/buffer/TestBufferPushPull.java
new file mode 100644
index 000..5dcac35
--- /dev/null
+++ 

[06/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/test_commons.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/test_commons.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/test_commons.cc
new file mode 100644
index 000..4ec1f48
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/test_commons.cc
@@ -0,0 +1,344 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include stdarg.h
+#include commons.h
+#include util/Random.h
+#include FileSystem.h
+#include test_commons.h
+
+Config TestConfig = Config();
+
+const char * GenerateSeed = generate.seed;
+const char * GenerateChoice = generate.choice;
+const char * GenerateLen = generate.len;
+const char * GenerateKeyLen = generate.key.len;
+const char * GenerateValueLen = generate.value.len;
+const char * GenerateRange = generate.range;
+const char * GenerateKeyRange = generate.key.range;
+const char * GenerateValueRange = generate.value.range;
+
+vectorstring  MakeStringArray(vectorstring  dest, ...) {
+  va_list al;
+  va_start(al, dest);
+  while (true) {
+const char * s = va_arg(al, const char *);
+if (s == NULL) {
+  break;
+}
+dest.push_back(s);
+  }
+  va_end(al);
+  return dest;
+}
+
+GenerateType GetGenerateType(const string  type) {
+  if (type == word) {
+return GenWord;
+  } else if (type == number) {
+return GenNumber;
+  } else if (type == bytes) {
+return GenBytes;
+  } else {
+THROW_EXCEPTION(UnsupportException, GenerateType not support);
+  }
+}
+
+string  GenerateOne(string  dest, Random  r, GenerateType gtype, int64_t 
choice, int64_t len,
+int64_t range) {
+  switch (gtype) {
+  case GenWord:
+r.nextWord(dest, choice);
+break;
+  case GenNumber:
+uint64_t v;
+if (choice  0) {
+  v = r.next_int32(choice);
+} else {
+  v = r.next_uint64();
+}
+if (len  0) {
+  dest = StringUtil::ToString(v, '0', len);
+} else {
+  dest = StringUtil::ToString(v);
+}
+break;
+  case GenBytes:
+if (range  2) {
+  if (len  0) {
+dest = r.nextBytes(len, ABCDEFGHIJKLMNOPQRSTUVWXYZ);
+  } else {
+dest = r.nextBytes(r.next_int32(32), ABCDEFGHIJKLMNOPQRSTUVWXYZ);
+  }
+} else {
+  if (len  0) {
+int64_t nlen = len - range / 2 + r.next_int32(range);
+if (nlen  0) {
+  dest = r.nextBytes(nlen, ABCDEFGHIJKLMNOPQRSTUVWXYZ);
+} else {
+  dest = ;
+}
+  } else {
+dest = r.nextBytes(r.next_int32(range), ABCDEFGHIJKLMNOPQRSTUVWXYZ);
+  }
+}
+break;
+  default:
+THROW_EXCEPTION(IOException, GenerateType not support);
+  }
+  return dest;
+}
+
+/**
+ * Generate random string sequences
+ * @param dest dest array
+ * @param size output array size
+ * @param type string type (word|number|bytes|tera)
+ */
+vectorstring  Generate(vectorstring  dest, uint64_t size, const string  
type) {
+  Random r;
+  if (TestConfig.get(GenerateSeed) != NULL) {
+r.setSeed(TestConfig.getInt(GenerateSeed, 0));
+  }
+  GenerateType gtype = GetGenerateType(type);
+  int64_t choice = TestConfig.getInt(GenerateChoice, -1);
+  int64_t len = TestConfig.getInt(GenerateLen, -1);
+  int64_t range = TestConfig.getInt(GenerateRange, 1);
+  string temp;
+  for (uint64_t i = 0; i  size; i++) {
+dest.push_back(GenerateOne(temp, r, gtype, choice, len, range));
+  }
+  return dest;
+}
+
+/**
+ * Generate random string pair sequences
+ * @param dest dest array
+ * @param size output array size
+ * @param type string type (word|number|bytes|tera)
+ */
+vectorpairstring, string   Generate(vectorpairstring, string   dest, 
uint64_t size,
+const string  type) {
+  Random r;
+  if (TestConfig.get(GenerateSeed) != NULL) {
+r.setSeed(TestConfig.getInt(GenerateSeed, 0));
+  }
+  GenerateType gtype = GetGenerateType(type);
+  int64_t choice = 

[22/64] [abbrv] git commit: Merge trunk into branch

2014-09-12 Thread todd
Merge trunk into branch


git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1612742 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/341695e7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/341695e7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/341695e7

Branch: refs/heads/trunk
Commit: 341695e7317d156c8cd869f52978100d4b88a6ea
Parents: e788e8b e0f00e1
Author: Todd Lipcon t...@apache.org
Authored: Wed Jul 23 01:47:28 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Wed Jul 23 01:47:28 2014 +

--
 hadoop-common-project/hadoop-auth/pom.xml   |  11 +
 hadoop-common-project/hadoop-common/CHANGES.txt |  81 +-
 .../hadoop-common/src/main/bin/hadoop-daemon.sh |   1 +
 .../hadoop/crypto/key/CachingKeyProvider.java   | 174 +
 .../crypto/key/KeyProviderCryptoExtension.java  | 235 --
 .../hadoop/crypto/key/KeyProviderFactory.java   |  10 +
 .../org/apache/hadoop/crypto/key/KeyShell.java  |  41 +-
 .../crypto/key/kms/KMSClientProvider.java   | 161 +++-
 .../hadoop/crypto/key/kms/KMSRESTConstants.java |   8 +
 .../hadoop/crypto/key/kms/ValueQueue.java   | 317 
 .../fs/CommonConfigurationKeysPublic.java   |  33 +
 .../main/java/org/apache/hadoop/fs/Stat.java|  13 +-
 .../hadoop/fs/s3native/NativeS3FileSystem.java  |   9 +-
 .../org/apache/hadoop/fs/viewfs/ChRootedFs.java |  34 +
 .../apache/hadoop/fs/viewfs/ViewFileSystem.java |  42 +
 .../org/apache/hadoop/fs/viewfs/ViewFs.java |  89 +++
 .../main/java/org/apache/hadoop/io/Text.java|  19 +-
 .../apache/hadoop/io/compress/BZip2Codec.java   |   6 +-
 .../hadoop/io/compress/CompressionCodec.java|  55 ++
 .../io/compress/CompressionInputStream.java |  10 +
 .../io/compress/CompressionOutputStream.java|  18 +-
 .../apache/hadoop/io/compress/DefaultCodec.java |  14 +-
 .../apache/hadoop/io/compress/GzipCodec.java|  14 +-
 .../org/apache/hadoop/io/compress/Lz4Codec.java |   6 +-
 .../apache/hadoop/io/compress/SnappyCodec.java  |   6 +-
 .../apache/hadoop/ipc/ProtobufRpcEngine.java|  29 +-
 .../main/java/org/apache/hadoop/ipc/Server.java |  12 +-
 .../apache/hadoop/ipc/WritableRpcEngine.java|  41 +-
 .../apache/hadoop/metrics2/MetricsSystem.java   |   6 +
 .../hadoop/metrics2/impl/MetricsConfig.java |   2 +-
 .../hadoop/metrics2/impl/MetricsSystemImpl.java |  11 +
 .../org/apache/hadoop/net/NetworkTopology.java  |   9 +-
 .../net/NetworkTopologyWithNodeGroup.java   |   7 +-
 .../java/org/apache/hadoop/security/Groups.java |  52 +-
 .../hadoop/security/LdapGroupsMapping.java  |   6 +-
 .../security/ShellBasedUnixGroupsMapping.java   |   3 +-
 .../hadoop/security/alias/CredentialShell.java  |   4 +-
 .../security/alias/JavaKeyStoreProvider.java|   3 +
 .../authorize/DefaultImpersonationProvider.java |  65 +-
 .../authorize/ImpersonationProvider.java|  15 +
 .../hadoop/security/authorize/ProxyUsers.java   |  29 +-
 .../main/java/org/apache/hadoop/util/Timer.java |  51 ++
 .../main/java/org/apache/hadoop/util/Tool.java  |   2 +-
 .../src/main/resources/core-default.xml |  47 ++
 .../src/site/apt/CommandsManual.apt.vm  |   4 +-
 .../src/site/apt/NativeLibraries.apt.vm |   2 +
 .../crypto/key/TestCachingKeyProvider.java  | 152 
 .../key/TestKeyProviderCryptoExtension.java | 124 ++-
 .../apache/hadoop/crypto/key/TestKeyShell.java  |  16 +-
 .../hadoop/crypto/key/TestValueQueue.java   | 190 +
 .../java/org/apache/hadoop/fs/TestStat.java |  28 +-
 .../fs/viewfs/ViewFileSystemBaseTest.java   |  55 +-
 .../apache/hadoop/fs/viewfs/ViewFsBaseTest.java |  52 ++
 .../java/org/apache/hadoop/io/TestText.java |  22 +
 .../org/apache/hadoop/ipc/MiniRPCBenchmark.java |   8 +-
 .../java/org/apache/hadoop/ipc/TestRPC.java |   2 +
 .../metrics2/impl/TestGangliaMetrics.java   |  12 +-
 .../metrics2/impl/TestMetricsSystemImpl.java|  50 +-
 .../net/TestNetworkTopologyWithNodeGroup.java   |   8 +-
 .../hadoop/security/TestDoAsEffectiveUser.java  |  23 +-
 .../hadoop/security/TestGroupsCaching.java  |  56 ++
 .../hadoop/security/alias/TestCredShell.java|  20 +-
 .../security/authorize/TestProxyUsers.java  | 126 ++-
 .../java/org/apache/hadoop/util/FakeTimer.java  |  52 ++
 .../hadoop-kms/src/main/conf/kms-acls.xml   |  15 +
 ...rKeyGeneratorKeyProviderCryptoExtension.java | 149 
 .../hadoop/crypto/key/kms/server/KMS.java   |  97 ++-
 .../hadoop/crypto/key/kms/server/KMSACLs.java   |  36 +-
 .../key/kms/server/KMSCacheKeyProvider.java | 177 -
 .../crypto/key/kms/server/KMSConfiguration.java |  14 +-
 .../key/kms/server/KMSServerJSONUtils.java  |  21 +-
 .../hadoop/crypto/key/kms/server/KMSWebApp.java |  50 +-
 .../hadoop-kms/src/site/apt/index.apt.vm| 110

[27/64] [abbrv] MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/MockValueClass.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/MockValueClass.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/MockValueClass.java
new file mode 100644
index 000..b27f00f
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/MockValueClass.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred.nativetask.testutil;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.util.Random;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.mapred.nativetask.util.BytesUtil;
+
+public class MockValueClass implements Writable {
+  private final static int DEFAULT_ARRAY_LENGTH = 16;
+  private int a = 0;
+  private byte[] array;
+  private final LongWritable longWritable;
+  private final Text txt;
+  private final Random rand = new Random();
+
+  public MockValueClass() {
+a = rand.nextInt();
+array = new byte[DEFAULT_ARRAY_LENGTH];
+rand.nextBytes(array);
+longWritable = new LongWritable(rand.nextLong());
+txt = new Text(BytesUtil.toStringBinary(array));
+  }
+
+  public MockValueClass(byte[] seed) {
+a = seed.length;
+array = new byte[seed.length];
+System.arraycopy(seed, 0, array, 0, seed.length);
+longWritable = new LongWritable(a);
+txt = new Text(BytesUtil.toStringBinary(array));
+  }
+
+  @Override
+  public void write(DataOutput out) throws IOException {
+out.writeInt(a);
+out.writeInt(array.length);
+out.write(array);
+longWritable.write(out);
+txt.write(out);
+  }
+
+  @Override
+  public void readFields(DataInput in) throws IOException {
+a = in.readInt();
+final int length = in.readInt();
+array = new byte[length];
+in.readFully(array);
+longWritable.readFields(in);
+txt.readFields(in);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
new file mode 100644
index 000..b665971
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
@@ -0,0 +1,141 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */

[44/64] [abbrv] MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu Zhang.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/include/gtest/gtest.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/include/gtest/gtest.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/include/gtest/gtest.h
new file mode 100644
index 000..b467822
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/include/gtest/gtest.h
@@ -0,0 +1,19541 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: w...@google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file defines the public API for Google Test.  It should be
+// included by any test program that uses Google Test.
+//
+// IMPORTANT NOTE: Due to limitation of the C++ language, we have to
+// leave some internal implementation details in this header file.
+// They are clearly marked by comments like this:
+//
+//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+//
+// Such code is NOT meant to be used by a user directly, and is subject
+// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user
+// program!
+//
+// Acknowledgment: Google Test borrowed the idea of automatic test
+// registration from Barthelemy Dagenais' (barthel...@prologique.com)
+// easyUnit framework.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
+#define GTEST_INCLUDE_GTEST_GTEST_H_
+
+#include limits
+#include vector
+
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: w...@google.com (Zhanyong Wan), eef...@gmail.com (Sean Mcafee)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// 

[15/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
new file mode 100644
index 000..b467822
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
@@ -0,0 +1,19541 @@
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: w...@google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file defines the public API for Google Test.  It should be
+// included by any test program that uses Google Test.
+//
+// IMPORTANT NOTE: Due to limitation of the C++ language, we have to
+// leave some internal implementation details in this header file.
+// They are clearly marked by comments like this:
+//
+//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
+//
+// Such code is NOT meant to be used by a user directly, and is subject
+// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user
+// program!
+//
+// Acknowledgment: Google Test borrowed the idea of automatic test
+// registration from Barthelemy Dagenais' (barthel...@prologique.com)
+// easyUnit framework.
+
+#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
+#define GTEST_INCLUDE_GTEST_GTEST_H_
+
+#include limits
+#include vector
+
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Authors: w...@google.com (Zhanyong Wan), eef...@gmail.com (Sean Mcafee)
+//
+// The Google C++ Testing Framework (Google Test)
+//
+// This header file declares functions and macros used 

[11/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Merge.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Merge.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Merge.cc
new file mode 100644
index 000..a8d7e1b
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/lib/Merge.cc
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include commons.h
+#include util/Timer.h
+#include util/StringUtil.h
+#include Merge.h
+#include lib/FileSystem.h
+
+namespace NativeTask {
+
+IFileMergeEntry * IFileMergeEntry::create(SingleSpillInfo * spill) {
+  InputStream * fileOut = FileSystem::getLocal().open(spill-path);
+  IFileReader * reader = new IFileReader(fileOut, spill, true);
+  return new IFileMergeEntry(reader);
+}
+
+Merger::Merger(IFileWriter * writer, Config * config, ComparatorPtr comparator,
+ICombineRunner * combineRunner)
+: _writer(writer), _config(config), _combineRunner(combineRunner), 
_first(true),
+_comparator(comparator) {
+
+}
+
+Merger::~Merger() {
+  _heap.clear();
+  for (size_t i = 0; i  _entries.size(); i++) {
+delete _entries[i];
+  }
+  _entries.clear();
+}
+
+void Merger::addMergeEntry(MergeEntryPtr pme) {
+  _entries.push_back(pme);
+}
+
+/**
+ * 0 if success, have next partition
+ * 1 if failed, no more
+ */
+bool Merger::startPartition() {
+  bool firstPartitionState = false;
+  for (size_t i = 0; i  _entries.size(); i++) {
+bool partitionState = _entries[i]-nextPartition();
+if (i == 0) {
+  firstPartitionState = partitionState;
+}
+if (firstPartitionState != partitionState) {
+  THROW_EXCEPTION(IOException, MergeEntry partition number not equal);
+}
+  }
+  if (firstPartitionState) { // do have new partition
+_writer-startPartition();
+  }
+  return firstPartitionState;
+}
+
+/**
+ * finish one partition
+ */
+void Merger::endPartition() {
+  _writer-endPartition();
+}
+
+void Merger::initHeap() {
+  _heap.clear();
+  for (size_t i = 0; i  _entries.size(); i++) {
+MergeEntryPtr pme = _entries[i];
+if (pme-next()) {
+  _heap.push_back(pme);
+}
+  }
+  makeHeap((_heap[0]), (_heap[0]) + _heap.size(), _comparator);
+}
+
+bool Merger::next() {
+  size_t cur_heap_size = _heap.size();
+  if (cur_heap_size  0) {
+if (!_first) {
+  if (_heap[0]-next()) { // have more, adjust heap
+if (cur_heap_size == 1) {
+  return true;
+} else if (cur_heap_size == 2) {
+  MergeEntryPtr * base = (_heap[0]);
+
+  if (_comparator(base[1], base[0])) {
+std::swap(base[0], base[1]);
+  }
+} else {
+  MergeEntryPtr * base = (_heap[0]);
+  heapify(base, 1, cur_heap_size, _comparator);
+}
+  } else { // no more, pop heap
+MergeEntryPtr * base = (_heap[0]);
+popHeap(base, base + cur_heap_size, _comparator);
+_heap.pop_back();
+  }
+} else {
+  _first = false;
+}
+return _heap.size()  0;
+  }
+  return false;
+}
+
+bool Merger::next(Buffer  key, Buffer  value) {
+  bool result = next();
+  if (result) {
+MergeEntryPtr * base = (_heap[0]);
+key.reset(base[0]-getKey(), base[0]-getKeyLength());
+value.reset(base[0]-getValue(), base[0]-getValueLength());
+return true;
+  } else {
+return false;
+  }
+}
+
+void Merger::merge() {
+  Timer timer;
+  uint64_t total_record = 0;
+  _heap.reserve(_entries.size());
+  MergeEntryPtr * base = (_heap[0]);
+  while (startPartition()) {
+initHeap();
+if (_heap.size() == 0) {
+  endPartition();
+  continue;
+}
+_first = true;
+if (_combineRunner == NULL) {
+  while (next()) {
+_writer-write(base[0]-getKey(), base[0]-getKeyLength(), 
base[0]-getValue(),
+base[0]-getValueLength());
+total_record++;
+  }
+} else {
+  

[19/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/DataChannel.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/DataChannel.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/DataChannel.java
new file mode 100644
index 000..e8132bd
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/DataChannel.java
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred.nativetask;
+
+public enum DataChannel {
+  /**
+   * We will only read data from this channel
+   */
+  IN,
+  /**
+   * We will only write data from this channel
+   */
+  OUT,
+  /**
+   * We will do both read and write for this channel
+   */
+  INOUT,
+  /**
+   * There is no data exchange
+   */
+  NONE
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/DataReceiver.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/DataReceiver.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/DataReceiver.java
new file mode 100644
index 000..c47cdac
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/DataReceiver.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred.nativetask;
+
+import java.io.IOException;
+
+/**
+ * a DataReceiver pulls in arriving data, an example
+ * is {@link org.apache.hadoop.mapred.nativetask.handlers.BufferPuller}
+ */
+public interface DataReceiver {
+
+  /**
+   * Send a signal to the receiver that the data arrives.
+   * The data is transferred in another band.
+   * 
+   * @return
+   * @throws IOException
+   */
+  public boolean receiveData() throws IOException;
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
new file mode 100644
index 000..1c4ede5
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
@@ -0,0 +1,81 @@
+/**
+ * 

[36/64] [abbrv] git commit: MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common. Contributed by Binglin Chang

2014-09-12 Thread todd
MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common. Contributed by 
Binglin Chang

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1616105 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7ecaa81d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7ecaa81d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7ecaa81d

Branch: refs/heads/trunk
Commit: 7ecaa81d2746a805b9937fcd134ceaa8607ed7b5
Parents: 83a3967
Author: Binglin Chang bch...@apache.org
Authored: Wed Aug 6 06:01:12 2014 +
Committer: Binglin Chang bch...@apache.org
Committed: Wed Aug 6 06:01:12 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop-mapreduce-client-nativetask/pom.xml  |   6 +
 .../src/CMakeLists.txt  |   4 +-
 .../src/main/native/COPYING |  12 +-
 .../src/main/native/lz4/lz4.c   | 740 ---
 .../src/main/native/lz4/lz4.h   |  96 ---
 .../src/main/native/src/codec/Lz4Codec.cc   |  22 +-
 .../src/main/native/test/TestCompressions.cc|   6 +-
 .../src/main/native/test/TestIFile.cc   |   2 +-
 9 files changed, 21 insertions(+), 868 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ecaa81d/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index c70df12..9dddcd5 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -9,3 +9,4 @@ MAPREDUCE-6000. native-task: Simplify 
ByteBufferDataReader/Writer (todd)
 MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)
 MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
 MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
+MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ecaa81d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index 2cb483e..9727800 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -163,6 +163,12 @@
 /goals
 configuration
   target
+copy 
file=${basedir}/../../../hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.h
+  todir=${project.build.directory}/native/ /
+copy 
file=${basedir}/../../../hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4_encoder.h
+  todir=${project.build.directory}/native/ /
+copy 
file=${basedir}/../../../hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/lz4.c
+  todir=${project.build.directory}/native/ /
 copy 
todir=${project.build.directory}/native/test/testData
   overwrite=true
   fileset dir=${basedir}/src/main/native/testData /

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ecaa81d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
index 3094162..36dbd9c 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
@@ -155,7 +155,7 @@ include_directories(
 ${D}/test
 ${CMAKE_CURRENT_SOURCE_DIR}
 #${CMAKE_CURRENT_SOURCE_DIR}/src
-#${CMAKE_BINARY_DIR}
+${CMAKE_BINARY_DIR}
 ${JNI_INCLUDE_DIRS}
 ${SNAPPY_INCLUDE_DIR}
 )
@@ -174,7 +174,7 @@ else (${CMAKE_SYSTEM_NAME} MATCHES Darwin)
 endif (${CMAKE_SYSTEM_NAME} MATCHES Darwin

[33/64] [abbrv] git commit: Add entry to branch's CHANGES.txt for previous commit

2014-09-12 Thread todd
Add entry to branch's CHANGES.txt for previous commit


git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613827 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/43917e56
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/43917e56
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/43917e56

Branch: refs/heads/trunk
Commit: 43917e564d266b67c323f4657d11a8b70ae1d706
Parents: ea9e5b7
Author: Todd Lipcon t...@apache.org
Authored: Sun Jul 27 18:54:02 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Sun Jul 27 18:54:02 2014 +

--
 hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/43917e56/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index aa695cf..92c94a8 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -6,3 +6,4 @@ MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
 MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)
 MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
 MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
+MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)



[45/64] [abbrv] MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu Zhang.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
deleted file mode 100644
index b467822..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest.h
+++ /dev/null
@@ -1,19541 +0,0 @@
-// Copyright 2005, Google Inc.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// Author: w...@google.com (Zhanyong Wan)
-//
-// The Google C++ Testing Framework (Google Test)
-//
-// This header file defines the public API for Google Test.  It should be
-// included by any test program that uses Google Test.
-//
-// IMPORTANT NOTE: Due to limitation of the C++ language, we have to
-// leave some internal implementation details in this header file.
-// They are clearly marked by comments like this:
-//
-//   // INTERNAL IMPLEMENTATION - DO NOT USE IN A USER PROGRAM.
-//
-// Such code is NOT meant to be used by a user directly, and is subject
-// to CHANGE WITHOUT NOTICE.  Therefore DO NOT DEPEND ON IT in a user
-// program!
-//
-// Acknowledgment: Google Test borrowed the idea of automatic test
-// registration from Barthelemy Dagenais' (barthel...@prologique.com)
-// easyUnit framework.
-
-#ifndef GTEST_INCLUDE_GTEST_GTEST_H_
-#define GTEST_INCLUDE_GTEST_GTEST_H_
-
-#include limits
-#include vector
-
-// Copyright 2005, Google Inc.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-//
-// Authors: w...@google.com (Zhanyong Wan), eef...@gmail.com (Sean Mcafee)
-//
-// The Google C++ Testing Framework (Google Test)
-//
-// This header file declares functions and macros used 

[16/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest-all.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest-all.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest-all.cc
new file mode 100644
index 000..d17f17a
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest-all.cc
@@ -0,0 +1,9120 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: mhe...@google.com (Markus Heule)
+//
+// Google C++ Testing Framework (Google Test)
+//
+// Sometimes it's desirable to build Google Test by compiling a single file.
+// This file serves this purpose.
+
+// This line ensures that gtest.h can be compiled on its own, even
+// when it's fused.
+#include gtest/gtest.h
+
+// The following lines pull in the real gtest *.cc files.
+// Copyright 2005, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// Author: w...@google.com (Zhanyong Wan)
+//
+// The Google C++ Testing Framework (Google Test)
+
+// Copyright 2007, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. 

[25/64] [abbrv] MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
deleted file mode 100644
index 8a4aa6f..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mapred.nativetask.combinertest;
-
-import static org.junit.Assert.assertEquals;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.Task;
-import 
org.apache.hadoop.mapred.nativetask.combinertest.WordCount.IntSumReducer;
-import 
org.apache.hadoop.mapred.nativetask.combinertest.WordCount.TokenizerMapper;
-import org.apache.hadoop.mapred.nativetask.kvtest.TestInputFile;
-import org.apache.hadoop.mapred.nativetask.testutil.ResultVerifier;
-import org.apache.hadoop.mapred.nativetask.testutil.ScenarioConfiguration;
-import org.apache.hadoop.mapred.nativetask.testutil.TestConstants;
-import org.apache.hadoop.mapreduce.Counter;
-import org.apache.hadoop.mapreduce.Job;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.junit.Before;
-import org.junit.Test;
-
-public class CombinerTest {
-  private FileSystem fs;
-  private String inputpath;
-  private String nativeoutputpath;
-  private String hadoopoutputpath;
-
-  @Test
-  public void testWordCountCombiner() {
-try {
-
-  final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
-  nativeConf.addResource(TestConstants.COMBINER_CONF_PATH);
-  final Job nativejob = getJob(nativewordcount, nativeConf, inputpath, 
nativeoutputpath);
-
-  final Configuration commonConf = 
ScenarioConfiguration.getNormalConfiguration();
-  commonConf.addResource(TestConstants.COMBINER_CONF_PATH);
-
-  final Job normaljob = getJob(normalwordcount, commonConf, inputpath, 
hadoopoutputpath);
-
-  nativejob.waitForCompletion(true);
-
-  Counter nativeReduceGroups = 
nativejob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
-  
-  normaljob.waitForCompletion(true);
-  Counter normalReduceGroups = 
normaljob.getCounters().findCounter(Task.Counter.REDUCE_INPUT_RECORDS);
-   
-  assertEquals(true, ResultVerifier.verify(nativeoutputpath, 
hadoopoutputpath));
-  assertEquals(Native Reduce reduce group counter should equal orignal 
reduce group counter, 
-  nativeReduceGroups.getValue(), normalReduceGroups.getValue());
-  
-} catch (final Exception e) {
-  e.printStackTrace();
-  assertEquals(run exception, true, false);
-}
-  }
-
-  @Before
-  public void startUp() throws Exception {
-final ScenarioConfiguration conf = new ScenarioConfiguration();
-conf.addcombinerConf();
-
-this.fs = FileSystem.get(conf);
-
-this.inputpath = 
conf.get(TestConstants.NATIVETASK_TEST_COMBINER_INPUTPATH_KEY,
-TestConstants.NATIVETASK_TEST_COMBINER_INPUTPATH_DEFAULTV) + 
/wordcount;
-
-if (!fs.exists(new Path(inputpath))) {
-  new TestInputFile(
-  conf.getInt(TestConstants.NATIVETASK_COMBINER_WORDCOUNT_FILESIZE, 
100),
-  Text.class.getName(),
-  Text.class.getName(), conf).createSequenceTestFile(inputpath, 1, 

[52/64] [abbrv] MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc cleanup

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
index ec326ca..d8a6595 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
@@ -20,9 +20,11 @@ package org.apache.hadoop.mapred.nativetask.serde;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.nativetask.INativeComparable;
 
+@InterfaceAudience.Private
 public class LongWritableSerializer extends DefaultSerializer implements
 INativeComparable {
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
index f5a033d..5881a46 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
@@ -21,8 +21,10 @@ package org.apache.hadoop.mapred.nativetask.serde;
 import java.io.IOException;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 
+@InterfaceAudience.Private
 public class NativeSerialization {
 
   private final ConcurrentHashMapString, Class? map = new 
ConcurrentHashMapString, Class?();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
index afa4e8e..f6e7cf5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
@@ -20,9 +20,11 @@ package org.apache.hadoop.mapred.nativetask.serde;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.nativetask.INativeComparable;
 
+@InterfaceAudience.Private
 public class NullWritableSerializer extends DefaultSerializer implements
 INativeComparable {
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/SerializationFramework.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/SerializationFramework.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/SerializationFramework.java
index e95a0c4..8dee58f 100644
--- 

[13/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/snappy.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/snappy.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/snappy.h
new file mode 100644
index 000..03ef6ce
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/snappy.h
@@ -0,0 +1,163 @@
+// Copyright 2005 and onwards Google Inc.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// A light-weight compression algorithm.  It is designed for speed of
+// compression and decompression, rather than for the utmost in space
+// savings.
+//
+// For getting better compression ratios when you are compressing data
+// with long repeated sequences or compressing data that is similar to
+// other data, while still compressing fast, you might look at first
+// using BMDiff and then compressing the output of BMDiff with
+// Snappy.
+
+#ifndef UTIL_SNAPPY_SNAPPY_H__
+#define UTIL_SNAPPY_SNAPPY_H__
+
+#include stddef.h
+#include string
+
+#include snappy-stubs-public.h
+
+namespace snappy {
+  class Source;
+  class Sink;
+
+  // 
+  // Generic compression/decompression routines.
+  // 
+
+  // Compress the bytes read from *source and append to *sink. Return the
+  // number of bytes written.
+  size_t Compress(Source* source, Sink* sink);
+
+  // Find the uncompressed length of the given stream, as given by the header.
+  // Note that the true length could deviate from this; the stream could e.g.
+  // be truncated.
+  //
+  // Also note that this leaves *source in a state that is unsuitable for
+  // further operations, such as RawUncompress(). You will need to rewind
+  // or recreate the source yourself before attempting any further calls.
+  bool GetUncompressedLength(Source* source, uint32* result);
+
+  // 
+  // Higher-level string based routines (should be sufficient for most users)
+  // 
+
+  // Sets *output to the compressed version of input[0,input_length-1].
+  // Original contents of *output are lost.
+  //
+  // REQUIRES: input[] is not an alias of *output.
+  size_t Compress(const char* input, size_t input_length, string* output);
+
+  // Decompresses compressed[0,compressed_length-1] to *uncompressed.
+  // Original contents of *uncompressed are lost.
+  //
+  // REQUIRES: compressed[] is not an alias of *uncompressed.
+  //
+  // returns false if the message is corrupted and could not be decompressed
+  bool Uncompress(const char* compressed, size_t compressed_length,
+  string* uncompressed);
+
+
+  // 
+  // Lower-level character array based routines.  May be useful for
+  // efficiency reasons in certain circumstances.
+  // 
+
+  // REQUIRES: compressed must point to an area of memory that is at
+ 

[07/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestIFile.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestIFile.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestIFile.cc
new file mode 100644
index 000..1e0bae4
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/test/TestIFile.cc
@@ -0,0 +1,189 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include algorithm
+#include commons.h
+#include BufferStream.h
+#include FileSystem.h
+#include IFile.h
+#include test_commons.h
+
+SingleSpillInfo * writeIFile(int partition, vectorpairstring, string   
kvs,
+const string  path, KeyValueType type, const string  codec) {
+  FileOutputStream * fout = 
(FileOutputStream*)FileSystem::getLocal().create(path);
+  IFileWriter * iw = new IFileWriter(fout, CHECKSUM_CRC32, type, type, codec, 
NULL);
+  for (int i = 0; i  partition; i++) {
+iw-startPartition();
+for (size_t i = 0; i  kvs.size(); i++) {
+  pairstring, string  p = kvs[i];
+  iw-write(p.first.c_str(), p.first.length(), p.second.c_str(), 
p.second.length());
+}
+iw-endPartition();
+  }
+  SingleSpillInfo * info = iw-getSpillInfo();
+  delete iw;
+  delete fout;
+  return info;
+}
+
+void readIFile(vectorpairstring, string   kvs, const string  path, 
KeyValueType type,
+SingleSpillInfo * info, const string  codec) {
+  FileInputStream * fin = (FileInputStream*)FileSystem::getLocal().open(path);
+  IFileReader * ir = new IFileReader(fin, info);
+  while (ir-nextPartition()) {
+const char * key, *value;
+uint32_t keyLen, valueLen;
+while (NULL != (key = ir-nextKey(keyLen))) {
+  value = ir-value(valueLen);
+  string keyS(key, keyLen);
+  string valueS(value, valueLen);
+  kvs.push_back(std::make_pair(keyS, valueS));
+}
+  }
+  delete ir;
+  delete fin;
+}
+
+void TestIFileReadWrite(KeyValueType kvtype, int partition, int size,
+vectorpairstring, string   kvs, const string  codec = ) {
+  string outputpath = ifilewriter;
+  SingleSpillInfo * info = writeIFile(partition, kvs, outputpath, kvtype, 
codec);
+  LOG(write finished);
+  vectorpairstring, string  readkvs;
+  readIFile(readkvs, outputpath, kvtype, info, codec);
+  LOG(read finished);
+  ASSERT_EQ(kvs.size() * partition, readkvs.size());
+  for (int i = 0; i  partition; i++) {
+vectorpairstring, string  cur_part(readkvs.begin() + i * kvs.size(),
+readkvs.begin() + (i + 1) * kvs.size());
+ASSERT_EQ(kvs.size(), cur_part.size());
+//for (size_t j=0;jkvs.size();j++) {
+//  SCOPED_TRACE(j);
+//  ASSERT_EQ(kvs[j], cur_part[j]);
+//}
+ASSERT_EQ(kvs, cur_part);
+  }
+  FileSystem::getLocal().remove(outputpath);
+}
+
+TEST(IFile, WriteRead) {
+  int partition = TestConfig.getInt(ifile.partition, 7);
+  int size = TestConfig.getInt(partition.size, 2);
+  vectorpairstring, string  kvs;
+  Generate(kvs, size, bytes);
+  TestIFileReadWrite(TextType, partition, size, kvs);
+  TestIFileReadWrite(BytesType, partition, size, kvs);
+  TestIFileReadWrite(UnknownType, partition, size, kvs);
+  TestIFileReadWrite(TextType, partition, size, kvs, 
org.apache.hadoop.io.compress.SnappyCodec);
+}
+
+void TestIFileWriteRead2(vectorpairstring, string   kvs, char * buff, 
size_t buffsize,
+const string  codec, ChecksumType checksumType, KeyValueType type) {
+  int partition = TestConfig.getInt(ifile.partition, 50);
+  Timer timer;
+  OutputBuffer outputBuffer = OutputBuffer(buff, buffsize);
+  IFileWriter * iw = new IFileWriter(outputBuffer, checksumType, type, type, 
codec, NULL);
+  timer.reset();
+  for (int i = 0; i  partition; i++) {
+iw-startPartition();
+for (size_t j = 0; j  kvs.size(); j++) {
+  iw-write(kvs[j].first.c_str(), kvs[j].first.length(), 
kvs[j].second.c_str(),
+  kvs[j].second.length());
+}
+iw-endPartition();
+  }
+  

[46/64] [abbrv] git commit: MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu Zhang.

2014-09-12 Thread todd
MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu 
Zhang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fad4524c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fad4524c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fad4524c

Branch: refs/heads/trunk
Commit: fad4524c85848b8efe27dcf4a3e3a9ac1725bd2a
Parents: 6d39367
Author: Todd Lipcon t...@apache.org
Authored: Wed Aug 27 12:23:03 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Wed Aug 27 12:25:42 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  | 2 +
 .../src/CMakeLists.txt  | 9 +-
 .../src/main/native/gtest/gtest.h   | 19541 -
 .../src/main/native/gtest/include/gtest/gtest.h | 19541 +
 .../src/main/native/src/NativeTask.h| 2 -
 .../src/main/native/src/codec/BlockCodec.cc | 4 +-
 .../src/main/native/src/handler/BatchHandler.h  | 4 +-
 .../main/native/src/handler/CombineHandler.cc   |10 +-
 .../src/main/native/src/lib/Buffers.h   | 4 +-
 .../src/main/native/src/lib/IFile.cc|11 +-
 .../main/native/src/lib/MapOutputCollector.cc   |14 +-
 .../main/native/src/lib/MapOutputCollector.h| 4 +-
 .../src/main/native/src/lib/Merge.cc| 4 +-
 .../src/main/native/src/lib/PartitionBucket.h   |12 +-
 .../src/main/native/src/lib/commons.h   | 3 +
 .../src/main/native/src/util/StringUtil.cc  | 6 +-
 .../src/main/native/test/TestCompressions.cc| 7 +-
 .../src/main/native/test/TestIFile.cc   | 4 +
 .../src/main/native/test/lib/TestByteBuffer.cc  | 2 +-
 .../native/test/lib/TestFixSizeContainer.cc | 2 +-
 .../src/main/native/test/lib/TestIterator.cc| 2 -
 .../native/test/lib/TestMemBlockIterator.cc | 6 +-
 .../src/main/native/test/lib/TestMemoryBlock.cc | 1 +
 .../src/main/native/test/util/TestHash.cc   | 2 +-
 24 files changed, 19600 insertions(+), 19597 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 95f7858..4dc08cb 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -15,3 +15,5 @@ MAPREDUCE-5978. native-task: remove test case for not 
supported codec Bzip2Codec
 MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml 
(Binglin Chang via todd)
 MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
 MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)
+MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
index 77b6109..0ab99db 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/CMakeLists.txt
@@ -90,7 +90,7 @@ SET(CMAKE_FIND_LIBRARY_SUFFIXES 
STORED_CMAKE_FIND_LIBRARY_SUFFIXES)
 
 # primitive configs
 set(PRFLAGS -DSIMPLE_MEMCPY)
-set(CMAKE_C_FLAGS ${CMAKE_C_FLAGS} ${PRFLAGS} -Wall)
+set(CMAKE_C_FLAGS ${CMAKE_C_FLAGS} ${PRFLAGS} -fno-strict-aliasing -Wall 
-Wno-sign-compare)
 set(CMAKE_LD_FLAGS ${CMAKE_LD_FLAGS} -no-undefined -version-info 0:1:0
 -L${_JAVA_HOME}/jre/lib/amd64/server -ljvm)
 set(CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS} ${CMAKE_C_FLAGS} -g -O2 -DNDEBUG 
-fPIC)
@@ -150,7 +150,6 @@ CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake 
${CMAKE_BINARY_DIR}/config.h)
 
 include_directories(
 ${GENERATED_JAVAH}
-${D}
 ${D}/src
 ${D}/src/util
 ${D}/src/lib
@@ -160,6 +159,8 @@ include_directories(
 ${JNI_INCLUDE_DIRS}
 ${SNAPPY_INCLUDE_DIR}
 )
+# add gtest as system library to suppress gcc warnings
+include_directories(SYSTEM ${D}/gtest/include)
 
 
 SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
@@ -220,8 +221,9 @@ add_dual_library(nativetask
 
 target_link_libraries(nativetask ${NT_DEPEND_LIBRARY})
 
+add_library(gtest ${D}/gtest/gtest-all.cc)
+set_target_properties(gtest

[38/64] [abbrv] git commit: MAPREDUCE-5978. native-task: remove test case for not supported codec Bzip2Codec and DefaultCodec (Manu Zhang)

2014-09-12 Thread todd
MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1616116 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4d4fb172
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4d4fb172
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4d4fb172

Branch: refs/heads/trunk
Commit: 4d4fb1723fd825df4ad2488e4ecde0c69359e83a
Parents: 432f641
Author: Sean Zhong seanzh...@apache.org
Authored: Wed Aug 6 07:40:24 2014 +
Committer: Sean Zhong seanzh...@apache.org
Committed: Wed Aug 6 07:40:24 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |  3 +-
 .../nativetask/compresstest/CompressTest.java   | 33 -
 .../test/resources/test-bzip2-compress-conf.xml | 39 
 .../resources/test-default-compress-conf.xml| 39 
 4 files changed, 2 insertions(+), 112 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4d4fb172/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 4b77262..462ac8b 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -10,4 +10,5 @@ MAPREDUCE-5991. native-task should not run unit tests if 
native profile is not e
 MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
 MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
 MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
-MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
\ No newline at end of file
+MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
+MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4d4fb172/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
index 0406375..b98e2de 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/compresstest/CompressTest.java
@@ -68,23 +68,6 @@ public class CompressTest {
   }
 
   @Test
-  public void testBzip2Compress() throws Exception {
-final Configuration nativeconf = 
ScenarioConfiguration.getNativeConfiguration();
-nativeconf.addResource(TestConstants.BZIP2_COMPRESS_CONF_PATH);
-final Job nativejob = CompressMapper.getCompressJob(nativebzip2, 
nativeconf);
-nativejob.waitForCompletion(true);
-
-final Configuration hadoopconf = 
ScenarioConfiguration.getNormalConfiguration();
-hadoopconf.addResource(TestConstants.BZIP2_COMPRESS_CONF_PATH);
-final Job hadoopjob = CompressMapper.getCompressJob(hadoopbzip2, 
hadoopconf);
-hadoopjob.waitForCompletion(true);
-
-final boolean compareRet = 
ResultVerifier.verify(CompressMapper.outputFileDir + nativebzip2,
-CompressMapper.outputFileDir + hadoopbzip2);
-assertEquals(file compare result: if they are the same ,then return 
true, true, compareRet);
-  }
-
-  @Test
   public void testLz4Compress() throws Exception {
 final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
 nativeConf.addResource(TestConstants.LZ4_COMPRESS_CONF_PATH);
@@ -100,22 +83,6 @@ public class CompressTest {
 assertEquals(file compare result: if they are the same ,then return 
true, true, compareRet);
   }
 
-  @Test
-  public void testDefaultCompress() throws Exception {
-final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
-nativeConf.addResource(TestConstants.DEFAULT_COMPRESS_CONF_PATH);
-final Job nativeJob = CompressMapper.getCompressJob(nativedefault, 
nativeConf

[28/64] [abbrv] git commit: MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-12 Thread todd
MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout. Contributed by Todd Lipcon.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613004 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b2cba48f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b2cba48f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b2cba48f

Branch: refs/heads/trunk
Commit: b2cba48f36c96f190f6a25e65291873be7aee322
Parents: d468a92
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 24 06:17:33 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 24 06:17:33 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../nativetask/combinertest/CombinerTest.java   | 122 +++
 .../combinertest/LargeKVCombinerTest.java   |  96 +
 .../combinertest/OldAPICombinerTest.java| 107 ++
 .../nativetask/combinertest/WordCount.java  |  89 
 .../combinertest/WordCountWithOldAPI.java   |  65 ++
 .../nativetask/compresstest/CompressMapper.java |  69 ++
 .../nativetask/compresstest/CompressTest.java   | 134 
 .../nativetask/kvtest/HashSumReducer.java   |  48 +
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |  97 +
 .../hadoop/mapred/nativetask/kvtest/KVTest.java | 181 
 .../mapred/nativetask/kvtest/LargeKVTest.java   | 129 +++
 .../mapred/nativetask/kvtest/TestInputFile.java | 213 +++
 .../nativetask/nonsorttest/NonSortTest.java |  99 +
 .../nativetask/nonsorttest/NonSortTestMR.java   |  71 +++
 .../nativetask/testutil/BytesFactory.java   | 104 +
 .../EnforceNativeOutputCollectorDelegator.java  |  48 +
 .../nativetask/testutil/MockValueClass.java |  72 +++
 .../nativetask/testutil/ResultVerifier.java | 141 
 .../testutil/ScenarioConfiguration.java |  58 +
 .../nativetask/testutil/TestConstants.java  |  67 ++
 .../test/java/system/data/testGlibcBugSpill.out |   2 -
 .../nativetask/combinertest/CombinerTest.java   | 122 ---
 .../combinertest/LargeKVCombinerTest.java   |  96 -
 .../combinertest/OldAPICombinerTest.java| 107 --
 .../nativetask/combinertest/WordCount.java  |  89 
 .../combinertest/WordCountWithOldAPI.java   |  65 --
 .../nativetask/compresstest/CompressMapper.java |  69 --
 .../nativetask/compresstest/CompressTest.java   | 134 
 .../nativetask/kvtest/HashSumReducer.java   |  48 -
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |  97 -
 .../hadoop/mapred/nativetask/kvtest/KVTest.java | 181 
 .../mapred/nativetask/kvtest/LargeKVTest.java   | 129 ---
 .../mapred/nativetask/kvtest/TestInputFile.java | 213 ---
 .../nativetask/nonsorttest/NonSortTest.java |  99 -
 .../nativetask/nonsorttest/NonSortTestMR.java   |  71 ---
 .../nativetask/testutil/BytesFactory.java   | 104 -
 .../EnforceNativeOutputCollectorDelegator.java  |  48 -
 .../nativetask/testutil/MockValueClass.java |  72 ---
 .../nativetask/testutil/ResultVerifier.java | 141 
 .../testutil/ScenarioConfiguration.java |  58 -
 .../nativetask/testutil/TestConstants.java  |  67 --
 42 files changed, 2011 insertions(+), 2012 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 166d68d..cea5a76 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -3,3 +3,4 @@ Changes for Hadoop Native Map Output Collector
 
 MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang
 MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
+MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org

[17/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
new file mode 100644
index 000..9653baa
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
@@ -0,0 +1,267 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred.nativetask.util;
+
+import java.io.UnsupportedEncodingException;
+
+public class BytesUtil {
+
+  public static final int SIZEOF_INT = Integer.SIZE / Byte.SIZE;
+  public static final int SIZEOF_LONG = Long.SIZE / Byte.SIZE;
+
+  public static byte[] toBytes(String str) {
+if (str == null) {
+  return null;
+}
+try {
+  return str.getBytes(utf-8);
+} catch (final UnsupportedEncodingException e) {
+  throw new RuntimeException(e.getMessage());
+}
+  }
+
+  public static String fromBytes(byte[] data) {
+if (data == null) {
+  return null;
+}
+try {
+  return new String(data, utf-8);
+} catch (final UnsupportedEncodingException e) {
+  throw new RuntimeException(e.getMessage());
+}
+  }
+
+  /**
+   * Converts a byte array to an int value
+   * @param bytes byte array
+   * @return the int value
+   */
+  public static int toInt(byte[] bytes) {
+return toInt(bytes, 0, SIZEOF_INT);
+  }
+
+  /**
+   * Converts a byte array to an int value
+   * @param bytes byte array
+   * @param offset offset into array
+   * @param length length of int (has to be {@link #SIZEOF_INT})
+   * @return the int value
+   * @throws RuntimeException if length is not {@link #SIZEOF_INT} or
+   * if there's not enough room in the array at the offset indicated.
+   */
+  public static int toInt(byte[] bytes, int offset, final int length) {
+if (length != SIZEOF_INT || offset + length  bytes.length) {
+  throw new RuntimeException(
+toInt exception. length not equals to SIZE of Int or buffer 
overflow);
+}
+int n = 0;
+for (int i = offset; i offset + length; i++) {
+  n = 4;
+  n ^= bytes[i]  0xff;
+}
+return n;
+  }
+
+  /**
+   * Converts a byte array to a long value.
+   * @param bytes array
+   * @return the long value
+   */
+  public static long toLong(byte[] bytes) {
+return toLong(bytes, 0, SIZEOF_LONG);
+  }
+
+   /**
+   * Converts a byte array to a long value.
+   *
+   * @param bytes array of bytes
+   * @param offset offset into array
+   * @return the long value
+   */
+  public static long toLong(byte[] bytes, int offset) {
+return toLong(bytes, offset, SIZEOF_LONG);
+  }
+
+  /**
+   * Converts a byte array to a long value.
+   *
+   * @param bytes array of bytes
+   * @param offset offset into array
+   * @param length length of data (must be {@link #SIZEOF_LONG})
+   * @return the long value
+   * @throws RuntimeException if length is not {@link #SIZEOF_LONG} or
+   * if there's not enough room in the array at the offset indicated.
+   */
+  public static long toLong(byte[] bytes, int offset, final int length) {
+if (length != SIZEOF_LONG || offset + length  bytes.length) {
+  throw new RuntimeException(
+toLong exception. length not equals to SIZE of Long or buffer 
overflow);
+}
+long l = 0;
+for (int i = offset; i  offset + length; i++) {
+  l = 8;
+  l ^= bytes[i]  0xff;
+}
+return l;
+  }
+
+  /**
+   * Presumes float encoded as IEEE 754 floating-point single format
+   * @param bytes byte array
+   * @return Float made from passed byte array.
+   */
+  public static float toFloat(byte [] bytes) {
+return toFloat(bytes, 0);
+  }
+
+  /**
+   * Presumes float encoded as IEEE 

[14/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest_main.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest_main.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest_main.cc
new file mode 100644
index 000..a09bbe0
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest/gtest_main.cc
@@ -0,0 +1,39 @@
+// Copyright 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include iostream
+
+#include gtest/gtest.h
+
+GTEST_API_ int main(int argc, char **argv) {
+  std::cout  Running main() from gtest_main.cc\n;
+
+  testing::InitGoogleTest(argc, argv);
+  return RUN_ALL_TESTS();
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/lz4/lz4.c
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/lz4/lz4.c
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/lz4/lz4.c
new file mode 100644
index 000..345a436
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/lz4/lz4.c
@@ -0,0 +1,740 @@
+/*
+   LZ4 - Fast LZ compression algorithm
+   Copyright (C) 2011-2012, Yann Collet.
+   BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php)
+
+   Redistribution and use in source and binary forms, with or without
+   modification, are permitted provided that the following conditions are
+   met:
+  
+   * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+   * Redistributions in binary form must reproduce the above
+   copyright notice, this list of conditions and the following disclaimer
+   in the documentation and/or other materials provided with the
+   distribution.
+  
+   THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+   AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+   LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+   A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+   OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+   SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+   LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+   DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+   THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+   (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+   OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+//**
+// Compilation Directives
+//**
+#if __STDC_VERSION__ = 199901L
+  /* restrict is a known keyword */
+#else
+#define restrict  // Disable restrict
+#endif
+
+#ifdef _MSC_VER
+#define inline __forceinline
+#endif
+
+#ifdef __GNUC__
+#define 

[43/64] [abbrv] MAPREDUCE-5977. Fix or suppress native-task gcc warnings. Contributed by Manu Zhang.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
index 11712ab..60bb6f5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
@@ -622,7 +622,6 @@ typedef void (*ANY_FUNC_PTR)();
 #define DEFINE_NATIVE_LIBRARY(Library) \
   static std::mapstd::string, NativeTask::ObjectCreatorFunc 
Library##ClassMap__; \
   extern C void * Library##GetFunctionGetter(const std::string  name) { \
-  void * ret = NULL; \
   std::mapstd::string, NativeTask::ObjectCreatorFunc::iterator itr = 
Library##ClassMap__.find(name); \
   if (itr != Library##ClassMap__.end()) { \
 return (void *)(itr-second); \
@@ -630,7 +629,6 @@ typedef void (*ANY_FUNC_PTR)();
   return NULL; \
 } \
   extern C NativeTask::ObjectCreatorFunc Library##GetObjectCreator(const 
std::string  name) { \
-NativeObject * ret = NULL; \
 std::mapstd::string, NativeTask::ObjectCreatorFunc::iterator itr = 
Library##ClassMap__.find(name); \
 if (itr != Library##ClassMap__.end()) { \
   return itr-second; \

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
index a59b9d8..59cd8fb 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/codec/BlockCodec.cc
@@ -23,7 +23,7 @@
 namespace NativeTask {
 
 BlockCompressStream::BlockCompressStream(OutputStream * stream, uint32_t 
bufferSizeHint)
-: CompressStream(stream), _compressedBytesWritten(0), _tempBufferSize(0), 
_tempBuffer(NULL) {
+: CompressStream(stream), _tempBuffer(NULL), _tempBufferSize(0), 
_compressedBytesWritten(0) {
   _hint = bufferSizeHint;
   _blockMax = bufferSizeHint / 2 * 3;
 }
@@ -68,7 +68,7 @@ uint64_t BlockCompressStream::compressedBytesWritten() {
 //
 
 BlockDecompressStream::BlockDecompressStream(InputStream * stream, uint32_t 
bufferSizeHint)
-: DecompressStream(stream), _tempBufferSize(0), _tempBuffer(NULL) {
+: DecompressStream(stream), _tempBuffer(NULL), _tempBufferSize(0) {
   _hint = bufferSizeHint;
   _blockMax = bufferSizeHint / 2 * 3;
   _tempDecompressBuffer = NULL;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
index bc022a8..15ce2ea 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/BatchHandler.h
@@ -103,10 +103,10 @@ protected:
*/
   inline void output(const char * buff, uint32_t length) {
 while (length  0) {
-  if (length  _out.remain()) {
+  uint32_t remain = _out.remain();
+  if (length  remain) {
 flushOutput();
   }
-  uint32_t remain = _out.remain();
   uint32_t cp = length  remain ? length : remain;
   simple_memcpy(_out.current(), buff, cp);
   buff += cp;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fad4524c/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/handler/CombineHandler.cc
--
diff --git 

[32/64] [abbrv] git commit: MAPREDUCE-5991. native-task should not run unit tests if native profile is not enabled. Contributed by Binglin Chang.

2014-09-12 Thread todd
MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. Contributed by Binglin Chang.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613072 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ea9e5b7b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ea9e5b7b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ea9e5b7b

Branch: refs/heads/trunk
Commit: ea9e5b7baae34c7ba660681d1e5aceb9c1047e6f
Parents: 6635180
Author: Binglin Chang bch...@apache.org
Authored: Thu Jul 24 11:44:09 2014 +
Committer: Binglin Chang bch...@apache.org
Committed: Thu Jul 24 11:44:09 2014 +

--
 .../hadoop-mapreduce-client-common/pom.xml  | 11 ++
 .../hadoop-mapreduce-client-nativetask/pom.xml  | 35 
 hadoop-project/pom.xml  |  6 
 3 files changed, 45 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea9e5b7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
index 7ef7d3b..48db5c8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
@@ -84,6 +84,17 @@
   /execution
 /executions
   /plugin
+  plugin
+artifactIdmaven-jar-plugin/artifactId
+executions
+  execution
+goals
+  goaltest-jar/goal
+/goals
+phasetest-compile/phase
+  /execution
+/executions
+  /plugin
 /plugins
   /build
 /project

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea9e5b7b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index 40b6520..2cb483e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -63,6 +63,12 @@
   typetest-jar/type
   scopetest/scope
 /dependency
+dependency
+  groupIdorg.apache.hadoop/groupId
+  artifactIdhadoop-mapreduce-client-common/artifactId
+  typetest-jar/type
+  scopetest/scope
+/dependency
   /dependencies
 
   build
@@ -82,14 +88,7 @@
 include**/buffer/Test*.java/include
 include**/handlers/Test*.java/include
 include**/serde/Test*.java/include
-include**/combinertest/*Test.java/include
-include**/compresstest/*Test.java/include
-include**/nonsorttest/*Test.java/include
-include**/kvtest/*Test.java/include
   /includes
-  additionalClasspathElements
-
additionalClasspathElement${basedir}/../hadoop-mapreduce-client-common/target/classes/additionalClasspathElement
-  /additionalClasspathElements
 /configuration
   /plugin
 /plugins
@@ -184,6 +183,28 @@
   /execution
 /executions
   /plugin
+  plugin
+groupIdorg.apache.maven.plugins/groupId
+artifactIdmaven-surefire-plugin/artifactId
+configuration
+  properties
+property
+  namelistener/name
+  valueorg.apache.hadoop.test.TimedOutTestsListener/value
+/property
+  /properties
+  includes
+include**/TestTaskContext.java/include
+include**/buffer/Test*.java/include
+include**/handlers/Test*.java/include
+include**/serde/Test*.java/include
+include**/combinertest/*Test.java/include
+include**/compresstest/*Test.java/include
+include**/nonsorttest/*Test.java/include
+include**/kvtest/*Test.java/include
+  /includes
+/configuration
+  /plugin
 /plugins
   /build
 /profile

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ea9e5b7b/hadoop-project/pom.xml
--
diff --git 

[49/64] [abbrv] MAPREDUCE-6056. native-task: move system test working dir to target dir and cleanup test config xml files (Manu Zhang via bchang)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ade9b04/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/test-snappy-compress-conf.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/test-snappy-compress-conf.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/test-snappy-compress-conf.xml
deleted file mode 100644
index 46b3ea7..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/resources/test-snappy-compress-conf.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-?xml version=1.0?
-?xml-stylesheet type=text/xsl href=configuration.xsl?
-
-!-- Put site-specific property overrides in this file. --
-
-configuration
-property
-namemapred.output.compress/name
-valuefalse/value
-/property
-property
-namemapreduce.reduce.class/name
-valueorg.apache.hadoop.mapred.nativetask.kvtest.HashSumReducer/value
-/property
-
-property
-namemapred.output.value.class/name
-valueorg.apache.hadoop.io.IntWritable/value
-/property
-property
-namenativetask.compress.filesize/name
-value10/value
-/property
-
-property
-namemapred.compress.map.output/name
-valuetrue/value
-/property
-property
-namemapred.output.compression.type/name
-valueBLOCK/value
-/property
-property
-namemapred.map.output.compression.codec/name
-valueorg.apache.hadoop.io.compress.SnappyCodec/value
-/property
-property
-namehadoop.native.lib/name
-valuetrue/value
-/property
-/configuration



[64/64] [abbrv] git commit: Merge MR-2841 changes into main MapReduce CHANGES.txt file

2014-09-12 Thread todd
Merge MR-2841 changes into main MapReduce CHANGES.txt file


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3c918172
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3c918172
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3c918172

Branch: refs/heads/trunk
Commit: 3c9181722b05a9192f5440ea8f3f77231f84eac6
Parents: 8a3a327
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 12 18:35:13 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 12 18:35:13 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  | 26 
 hadoop-mapreduce-project/CHANGES.txt| 65 
 2 files changed, 65 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3c918172/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
deleted file mode 100644
index cfc9412..000
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ /dev/null
@@ -1,26 +0,0 @@
-Changes for Hadoop Native Map Output Collector
-
-
-MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang
-MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
-MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)
-MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
-MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
-MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)
-MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
-MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
-MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
-MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
-MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
-MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml 
(Binglin Chang via todd)
-MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
-MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)
-MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
-MAPREDUCE-6054. native-task: Speed up tests (todd)
-MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Binglin Chang)
-MAPREDUCE-6056. native-task: move system test working dir to target dir and 
cleanup test config xml files (Manu Zhang via bchang)
-MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc 
cleanup (todd)
-MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)
-MAPREDUCE-6069. native-task: Lint/style fixes and removal of unused code (todd)
-MAPREDUCE-6074. native-task: fix release audit, javadoc, javac warnings (todd)
-MAPREDUCE-6077. native-task: Remove CustomModule examples in nativetask 
(seanzhong)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3c918172/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index f1435d2..5d1e5f5 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -20,6 +20,9 @@ Trunk (Unreleased)
 MAPREDUCE-5910. Make MR AM resync with RM in case of work-preserving
 RM-restart. (Rohith via jianhe)
 
+MAPREDUCE-2841. Add a native implementation of MapOutputCollector.
+(see section below for detailed breakdown)
+
   IMPROVEMENTS
 
 MAPREDUCE-3481. [Gridmix] Improve Gridmix STRESS mode. (amarrk)
@@ -157,6 +160,68 @@ Trunk (Unreleased)
 MAPREDUCE-5972. Fix typo 'programatically' in job.xml (and a few other
 places) (Akira AJISAKA via aw)
 
+  BREAKDOWN OF MAPREDUCE-2841 (NATIVE TASK) SUBTASKS
+
+MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by
+Binglin Chang
+
+MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
+
+MAPREDUCE-5996. native-task: Rename system tests into standard directory
+layout (todd)
+
+MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
+
+MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
+
+MAPREDUCE-5991. native-task should not run unit tests if native profile is
+not enabled. (Binglin Chang)
+
+MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
+
+MAPREDUCE-6005. native-task

[55/64] [abbrv] MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by Todd Lipcon.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
index 2258726..f81d94f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
@@ -74,13 +74,14 @@ public class NonSortTest {
   public void startUp() throws Exception {
 Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
 Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
-final ScenarioConfiguration configuration = new ScenarioConfiguration();
-configuration.addNonSortTestConf();
-final FileSystem fs = FileSystem.get(configuration);
+final ScenarioConfiguration conf = new ScenarioConfiguration();
+conf.addNonSortTestConf();
+final FileSystem fs = FileSystem.get(conf);
 final Path path = new Path(TestConstants.NATIVETASK_NONSORT_TEST_INPUTDIR);
 if (!fs.exists(path)) {
-  new 
TestInputFile(configuration.getInt(TestConstants.NATIVETASK_NONSORTTEST_FILESIZE,
 1000), Text.class.getName(),
-  Text.class.getName(), 
configuration).createSequenceTestFile(path.toString());
+  int filesize = 
conf.getInt(TestConstants.NATIVETASK_NONSORTTEST_FILESIZE, 1000);
+  new TestInputFile(filesize, Text.class.getName(),
+  Text.class.getName(), conf).createSequenceTestFile(path.toString());
 }
 fs.close();
   }
@@ -93,7 +94,8 @@ public class NonSortTest {
   }
 
 
-  private Job getJob(Configuration conf, String jobName, String inputpath, 
String outputpath) throws IOException {
+  private Job getJob(Configuration conf, String jobName,
+ String inputpath, String outputpath) throws IOException {
 final FileSystem fs = FileSystem.get(conf);
 if (fs.exists(new Path(outputpath))) {
   fs.delete(new Path(outputpath), true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
index 4ca2449..4092e5f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
@@ -36,7 +36,8 @@ public class NonSortTestMR {
 private final Text word = new Text();
 
 @Override
-public void map(Object key, Text value, Context context) throws 
IOException, InterruptedException {
+public void map(Object key, Text value, Context context)
+  throws IOException, InterruptedException {
   final String line = value.toString();
   final StringTokenizer tokenizer = new StringTokenizer(line);
   while (tokenizer.hasMoreTokens()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
index 004e8b8..fd5b100 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
+++ 

[42/64] [abbrv] git commit: MAPREDUCE-6025: distribute nativetask.so to right folder (Manu Zhang)

2014-09-12 Thread todd
MAPREDUCE-6025: distribute nativetask.so to right folder (Manu Zhang)

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1618120 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6d39367f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6d39367f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6d39367f

Branch: refs/heads/trunk
Commit: 6d39367f6a95383b634d79b6df35a278e0d89fe0
Parents: eace4dd
Author: Sean Zhong seanzh...@apache.org
Authored: Fri Aug 15 08:55:49 2014 +
Committer: Sean Zhong seanzh...@apache.org
Committed: Fri Aug 15 08:55:49 2014 +

--
 hadoop-dist/pom.xml  |  1 -
 hadoop-mapreduce-project/pom.xml | 28 
 2 files changed, 28 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d39367f/hadoop-dist/pom.xml
--
diff --git a/hadoop-dist/pom.xml b/hadoop-dist/pom.xml
index 62a27f4..149f404 100644
--- a/hadoop-dist/pom.xml
+++ b/hadoop-dist/pom.xml
@@ -122,7 +122,6 @@
   run cp -r 
$ROOT/hadoop-hdfs-project/hadoop-hdfs-nfs/target/hadoop-hdfs-nfs-${project.version}/*
 .
   run cp -r 
$ROOT/hadoop-yarn-project/target/hadoop-yarn-project-${project.version}/* .
   run cp -r 
$ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
-  run cp -r 
$ROOT/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/target/hadoop-mapreduce-client-nativetask-${project.version}/*
 .
   run cp -r 
$ROOT/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}/*
 .
   echo
   echo Hadoop dist layout available at: 
${project.build.directory}/hadoop-${project.version}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/6d39367f/hadoop-mapreduce-project/pom.xml
--
diff --git a/hadoop-mapreduce-project/pom.xml b/hadoop-mapreduce-project/pom.xml
index 772a2a3..9110942 100644
--- a/hadoop-mapreduce-project/pom.xml
+++ b/hadoop-mapreduce-project/pom.xml
@@ -236,6 +236,34 @@
   /plugin
   plugin
 groupIdorg.apache.maven.plugins/groupId
+artifactIdmaven-antrun-plugin/artifactId
+executions
+  execution
+iddist/id
+phaseprepare-package/phase
+goals
+  goalrun/goal
+/goals
+configuration
+  target
+echo 
file=${project.build.directory}/dist-copynativelibs.sh
+  
LIB_DIR=${mr.basedir}/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/target/native/target/usr/local/lib
+  if [ -d $${LIB_DIR} ] ; then
+
TARGET_DIR=${project.build.directory}/${project.artifactId}-${project.version}/lib/native
+mkdir -p $${TARGET_DIR}
+cp -R $${LIB_DIR}/lib* $${TARGET_DIR}
+  fi
+/echo
+exec executable=sh dir=${project.build.directory} 
failonerror=true
+  arg line=./dist-copynativelibs.sh/
+/exec
+  /target
+/configuration
+  /execution
+/executions
+  /plugin
+  plugin
+groupIdorg.apache.maven.plugins/groupId
 artifactIdmaven-assembly-plugin/artifactId
 dependencies
   dependency



[58/64] [abbrv] git commit: MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by Todd Lipcon.

2014-09-12 Thread todd
MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by 
Todd Lipcon.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/683987be
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/683987be
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/683987be

Branch: refs/heads/trunk
Commit: 683987be7c160e67ddb8534eeb3c464bbe2796dd
Parents: 0032216
Author: Todd Lipcon t...@apache.org
Authored: Wed Sep 3 13:07:24 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 10:44:38 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop-mapreduce-client-nativetask/LICENSE  |  22 --
 .../src/CMakeLists.txt  |   6 -
 .../hadoop/mapred/nativetask/Constants.java |  13 -
 .../hadoop/mapred/nativetask/DataReceiver.java  |   3 -
 .../mapred/nativetask/ICombineHandler.java  |   2 -
 .../mapred/nativetask/INativeHandler.java   |   8 -
 .../mapred/nativetask/NativeBatchProcessor.java |  11 -
 .../NativeMapOutputCollectorDelegator.java  |  10 +-
 .../hadoop/mapred/nativetask/NativeRuntime.java |  32 +-
 .../hadoop/mapred/nativetask/Platform.java  |  11 +-
 .../nativetask/handlers/BufferPullee.java   |   3 +-
 .../nativetask/handlers/BufferPuller.java   |   2 -
 .../nativetask/handlers/BufferPushee.java   |   6 +-
 .../nativetask/handlers/BufferPusher.java   |   3 +-
 .../nativetask/handlers/CombinerHandler.java|  16 +-
 .../mapred/nativetask/handlers/IDataLoader.java |   1 -
 .../handlers/NativeCollectorOnlyHandler.java|   6 +-
 .../serde/BytesWritableSerializer.java  |   3 +-
 .../mapred/nativetask/serde/IKVSerializer.java  |  31 +-
 .../mapred/nativetask/serde/KVSerializer.java   |   6 +-
 .../nativetask/serde/NativeSerialization.java   |  11 +-
 .../mapred/nativetask/util/BytesUtil.java   |   2 +-
 .../nativetask/util/LocalJobOutputFiles.java|  58 +---
 .../nativetask/util/NativeTaskOutput.java   |  55 +---
 .../nativetask/util/NativeTaskOutputFiles.java  |  72 ++---
 .../mapred/nativetask/util/ReadWriteBuffer.java |  16 +-
 .../src/main/native/cityhash/city.cc| 307 ---
 .../src/main/native/cityhash/city.h |  90 --
 .../src/main/native/src/NativeTask.h| 137 -
 .../src/main/native/src/codec/BlockCodec.cc |   5 +-
 .../src/main/native/src/codec/BlockCodec.h  |   2 +-
 .../src/main/native/src/codec/GzipCodec.cc  |   2 +-
 .../src/main/native/src/codec/GzipCodec.h   |   2 +-
 .../src/main/native/src/codec/Lz4Codec.cc   |   2 +-
 .../src/main/native/src/codec/Lz4Codec.h|   2 +-
 .../src/main/native/src/codec/SnappyCodec.cc|   2 +-
 .../src/main/native/src/codec/SnappyCodec.h |   2 +-
 .../native/src/handler/AbstractMapHandler.cc|   9 +-
 .../src/main/native/src/handler/BatchHandler.cc |   9 +-
 .../main/native/src/handler/CombineHandler.cc   |   4 +-
 .../main/native/src/handler/CombineHandler.h|   2 +-
 .../src/handler/MCollectorOutputHandler.cc  |   6 +-
 .../src/main/native/src/lib/BufferStream.cc | 116 +--
 .../src/main/native/src/lib/BufferStream.h  |  39 +--
 .../src/main/native/src/lib/Buffers.cc  |  77 +
 .../src/main/native/src/lib/Buffers.h   |  63 +---
 .../src/main/native/src/lib/Combiner.cc |  73 -
 .../src/main/native/src/lib/Combiner.h  |  18 +-
 .../src/main/native/src/lib/Compressions.cc |   6 +-
 .../src/main/native/src/lib/Compressions.h  |   2 +-
 .../src/main/native/src/lib/FileSystem.cc   |  23 +-
 .../src/main/native/src/lib/FileSystem.h|   2 +-
 .../src/main/native/src/lib/IFile.cc|   6 +-
 .../src/main/native/src/lib/IFile.h |  10 +-
 .../src/main/native/src/lib/Iterator.cc |   6 +-
 .../src/main/native/src/lib/Log.cc  |   2 +-
 .../main/native/src/lib/MapOutputCollector.cc   |  58 ++--
 .../main/native/src/lib/MapOutputCollector.h|  16 +-
 .../src/main/native/src/lib/MapOutputSpec.cc|   4 +-
 .../src/main/native/src/lib/MapOutputSpec.h |   4 +-
 .../src/main/native/src/lib/MemoryBlock.cc  |  24 +-
 .../src/main/native/src/lib/MemoryPool.h|   4 +-
 .../src/main/native/src/lib/Merge.cc|   5 +-
 .../src/main/native/src/lib/Merge.h |   8 +-
 .../src/main/native/src/lib/MinHeap.h   |   2 +-
 .../src/main/native/src/lib/NativeLibrary.cc|   7 +-
 .../main/native/src/lib/NativeObjectFactory.cc  |  19 +-
 .../main/native/src/lib/NativeRuntimeJniImpl.cc |  27 +-
 .../src/main/native/src/lib/NativeTask.cc   |  44 +--
 .../src/main/native/src/lib/PartitionBucket.cc  |  25 +-
 .../src/main/native/src/lib/PartitionBucket.h   |  16 +-
 .../native/src/lib/PartitionBucketIterator.cc   |  22 +-
 .../native/src/lib/PartitionBucketIterator.h

[51/64] [abbrv] git commit: Merge remote-tracking branch 'origin/trunk' into MR-2841

2014-09-12 Thread todd
Merge remote-tracking branch 'origin/trunk' into MR-2841

Conflicts:

hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cce7d1e2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cce7d1e2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cce7d1e2

Branch: refs/heads/trunk
Commit: cce7d1e2f96ff75fe19ab2879ddd90a898ca5b18
Parents: 7ade9b0 727331b
Author: Todd Lipcon t...@apache.org
Authored: Tue Sep 2 15:55:54 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Tue Sep 2 15:55:54 2014 -0700

--
 .gitignore  |1 +
 BUILDING.txt|   22 +
 dev-support/test-patch.sh   |   26 +-
 .../main/resources/assemblies/hadoop-dist.xml   |7 +
 .../dev-support/findbugsExcludeFile.xml |   38 +
 hadoop-common-project/hadoop-auth/pom.xml   |   26 +
 .../authentication/client/AuthenticatedURL.java |   36 +-
 .../server/AuthenticationFilter.java|  109 +-
 .../server/KerberosAuthenticationHandler.java   |   21 +-
 .../server/PseudoAuthenticationHandler.java |   21 +-
 .../util/RandomSignerSecretProvider.java|   49 +
 .../util/RolloverSignerSecretProvider.java  |  139 ++
 .../security/authentication/util/Signer.java|   46 +-
 .../util/SignerSecretProvider.java  |   62 +
 .../util/StringSignerSecretProvider.java|   49 +
 .../client/AuthenticatorTestCase.java   |  137 +-
 .../client/TestAuthenticatedURL.java|   38 +-
 .../client/TestKerberosAuthenticator.java   |   58 +-
 .../server/TestAuthenticationFilter.java|  118 +-
 .../util/TestRandomSignerSecretProvider.java|   63 +
 .../util/TestRolloverSignerSecretProvider.java  |   79 +
 .../authentication/util/TestSigner.java |   85 +-
 .../util/TestStringSignerSecretProvider.java|   33 +
 hadoop-common-project/hadoop-common/CHANGES.txt |  502 +--
 .../dev-support/findbugsExcludeFile.xml |5 +
 hadoop-common-project/hadoop-common/pom.xml |   34 +-
 .../hadoop-common/src/CMakeLists.txt|   34 +
 .../hadoop-common/src/JNIFlags.cmake|6 +
 .../hadoop-common/src/config.h.cmake|1 +
 .../src/contrib/bash-tab-completion/hadoop.sh   |   28 +-
 .../hadoop-common/src/main/bin/hadoop   |  229 +--
 .../src/main/bin/hadoop-config.cmd  |   10 +-
 .../hadoop-common/src/main/bin/hadoop-config.sh |  409 ++---
 .../hadoop-common/src/main/bin/hadoop-daemon.sh |  214 +--
 .../src/main/bin/hadoop-daemons.sh  |   37 +-
 .../src/main/bin/hadoop-functions.sh| 1066 ++
 .../src/main/bin/hadoop-layout.sh.example   |   93 ++
 .../hadoop-common/src/main/bin/hadoop.cmd   |   33 +-
 .../hadoop-common/src/main/bin/rcc  |   52 +-
 .../hadoop-common/src/main/bin/slaves.sh|   51 +-
 .../hadoop-common/src/main/bin/start-all.sh |   38 +-
 .../hadoop-common/src/main/bin/stop-all.sh  |   36 +-
 .../hadoop-common/src/main/conf/hadoop-env.sh   |  432 +-
 .../org/apache/hadoop/conf/Configuration.java   |   78 +-
 .../hadoop/conf/ReconfigurationServlet.java |2 +
 .../apache/hadoop/crypto/AesCtrCryptoCodec.java |   67 +
 .../org/apache/hadoop/crypto/CipherSuite.java   |  115 ++
 .../org/apache/hadoop/crypto/CryptoCodec.java   |  179 +++
 .../apache/hadoop/crypto/CryptoInputStream.java |  680 +
 .../hadoop/crypto/CryptoOutputStream.java   |  280 
 .../apache/hadoop/crypto/CryptoStreamUtils.java |   70 +
 .../org/apache/hadoop/crypto/Decryptor.java |   72 +
 .../org/apache/hadoop/crypto/Encryptor.java |   71 +
 .../hadoop/crypto/JceAesCtrCryptoCodec.java |  165 +++
 .../hadoop/crypto/OpensslAesCtrCryptoCodec.java |  164 +++
 .../org/apache/hadoop/crypto/OpensslCipher.java |  289 
 .../hadoop/crypto/key/JavaKeyStoreProvider.java |  248 +++-
 .../apache/hadoop/crypto/key/KeyProvider.java   |   32 +-
 .../crypto/key/KeyProviderCryptoExtension.java  |   91 +-
 .../KeyProviderDelegationTokenExtension.java|8 +-
 .../hadoop/crypto/key/KeyProviderExtension.java |1 +
 .../org/apache/hadoop/crypto/key/KeyShell.java  |   89 +-
 .../apache/hadoop/crypto/key/UserProvider.java  |5 +-
 .../crypto/key/kms/KMSClientProvider.java   |  117 +-
 .../hadoop/crypto/key/kms/KMSRESTConstants.java |2 +-
 .../crypto/random/OpensslSecureRandom.java  |  122 ++
 .../hadoop/crypto/random/OsSecureRandom.java|  115 ++
 .../apache/hadoop/fs/AbstractFileSystem.java|  103 +-
 .../apache/hadoop/fs/ChecksumFileSystem.java|8 +-
 .../java/org/apache/hadoop/fs/ChecksumFs.java   |8 +-
 .../hadoop/fs/CommonConfigurationKeys.java

[50/64] [abbrv] git commit: MAPREDUCE-6056. native-task: move system test working dir to target dir and cleanup test config xml files (Manu Zhang via bchang)

2014-09-12 Thread todd
MAPREDUCE-6056. native-task: move system test working dir to target dir and 
cleanup test config xml files (Manu Zhang via bchang)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7ade9b04
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7ade9b04
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7ade9b04

Branch: refs/heads/trunk
Commit: 7ade9b04ea0a46a51564d23c8d715ddb1b9ca6d0
Parents: 17cd0fa
Author: Binglin Chang bch...@apache.org
Authored: Tue Sep 2 16:26:09 2014 +0800
Committer: Binglin Chang bch...@apache.org
Committed: Tue Sep 2 16:26:09 2014 +0800

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../nativetask/combinertest/CombinerTest.java   |  65 
 .../combinertest/LargeKVCombinerTest.java   |  94 +--
 .../combinertest/OldAPICombinerTest.java|  27 ++--
 .../nativetask/compresstest/CompressMapper.java |  38 ++---
 .../nativetask/compresstest/CompressTest.java   |  90 +++
 .../TestNativeCollectorOnlyHandler.java |  20 ++-
 .../hadoop/mapred/nativetask/kvtest/KVTest.java |  83 +-
 .../mapred/nativetask/kvtest/LargeKVTest.java   | 104 ++--
 .../nativetask/nonsorttest/NonSortTest.java |  30 ++--
 .../nativetask/testutil/TestConstants.java  |  56 ---
 .../src/test/resources/common_conf.xml  |  71 +
 .../src/test/resources/kvtest-conf.xml  | 158 +--
 .../src/test/resources/native_conf.xml  |  33 ++--
 .../src/test/resources/normal_conf.xml  |  26 ++-
 .../src/test/resources/test-combiner-conf.xml   |  54 +++
 .../src/test/resources/test-compress-conf.xml   |  50 ++
 .../test/resources/test-gzip-compress-conf.xml  |  39 -
 .../test/resources/test-lz4-compress-conf.xml   |  39 -
 .../src/test/resources/test-nonsort-conf.xml|  49 +++---
 .../resources/test-snappy-compress-conf.xml |  41 -
 21 files changed, 588 insertions(+), 580 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ade9b04/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index baa88c1..7c9558e 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -18,3 +18,4 @@ MAPREDUCE-6035. native-task: sources/test-sources jar 
distribution (Manu Zhang v
 MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
 MAPREDUCE-6054. native-task: Speed up tests (todd)
 MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Binglin Chang)
+MAPREDUCE-6056. native-task: move system test working dir to target dir and 
cleanup test config xml files (Manu Zhang via bchang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7ade9b04/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
index abbe28e..d7f05be 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
@@ -38,11 +38,14 @@ import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
 import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.junit.AfterClass;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.junit.Assume;
 import org.junit.Before;
 import org.junit.Test;
 
+import java.io.IOException;
+
 public class CombinerTest {
   private FileSystem fs;
   private String inputpath;
@@ -50,33 +53,25 @@ public class CombinerTest {
   private String hadoopoutputpath;
 
   @Test
-  public void testWordCountCombiner() {
-try {
-
-  final Configuration nativeConf = 
ScenarioConfiguration.getNativeConfiguration();
-  nativeConf.addResource(TestConstants.COMBINER_CONF_PATH);
-  final Job nativejob = getJob(nativewordcount

[60/64] [abbrv] git commit: MAPREDUCE-6074. native-task: Fix release audit warnings

2014-09-12 Thread todd
MAPREDUCE-6074. native-task: Fix release audit warnings


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7c91f9b1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7c91f9b1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7c91f9b1

Branch: refs/heads/trunk
Commit: 7c91f9b1484d487e792dca051fbd418697049422
Parents: 4b3f1e2
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 5 13:41:18 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 19:58:41 2014 -0700

--
 LICENSE.txt |  33 +++
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop-mapreduce-client-nativetask/LICENSE  | 266 ---
 .../hadoop-mapreduce-client-nativetask/pom.xml  |  12 +-
 .../mapred/nativetask/INativeComparable.java|  27 +-
 .../src/main/native/COPYING |  87 --
 ...oop_mapred_nativetask_NativeBatchProcessor.h |  54 
 ...che_hadoop_mapred_nativetask_NativeRuntime.h |  66 -
 .../src/main/native/test.sh |  11 +
 .../nativetask/buffer/TestBufferPushPull.java   |  13 +-
 .../buffer/TestByteBufferReadWrite.java |  16 +-
 .../nativetask/combinertest/CombinerTest.java   |   4 +-
 .../combinertest/OldAPICombinerTest.java|   5 +-
 .../nativetask/combinertest/WordCount.java  |   3 +-
 .../nativetask/compresstest/CompressMapper.java |   4 +-
 .../nativetask/compresstest/CompressTest.java   |   2 +-
 .../nativetask/handlers/TestCombineHandler.java |  13 +-
 .../TestNativeCollectorOnlyHandler.java |  17 +-
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |   3 +-
 .../mapred/nativetask/kvtest/TestInputFile.java |   7 +-
 .../nativetask/nonsorttest/NonSortTest.java |   2 +-
 .../serde/TestNativeSerialization.java  |  10 +-
 .../nativetask/testutil/BytesFactory.java   |   2 +-
 .../mapred/nativetask/utils/TestBytesUtil.java  |  20 +-
 hadoop-mapreduce-project/pom.xml|   2 +-
 25 files changed, 143 insertions(+), 537 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7c91f9b1/LICENSE.txt
--
diff --git a/LICENSE.txt b/LICENSE.txt
index 946a6df..99989f1 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -288,3 +288,36 @@ lz4_encoder.h,lz4hc.h,lz4hc.c,lz4hc_encoder.h},
- LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
- LZ4 source repository : http://code.google.com/p/lz4/
 */
+
+
+For 
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest
+-
+Copyright 2008, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+* Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7c91f9b1/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index ea17907..539e7be 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -22,3 +22,4 @@ MAPREDUCE-6056. native-task: move system test working dir to 
target dir and clea
 MAPREDUCE-6055. native-task: findbugs, interface annotations

[54/64] [abbrv] git commit: MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)

2014-09-12 Thread todd
MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/00322161
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/00322161
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/00322161

Branch: refs/heads/trunk
Commit: 00322161b5d4d54770c2f0823e036537edecf5bf
Parents: 1081d9c
Author: Binglin Chang bch...@apache.org
Authored: Fri Sep 5 14:20:39 2014 +0800
Committer: Binglin Chang bch...@apache.org
Committed: Fri Sep 5 14:20:39 2014 +0800

--
 .../CHANGES.MAPREDUCE-2841.txt  |  1 +
 .../NativeMapOutputCollectorDelegator.java  |  4 +
 .../mapred/nativetask/StatusReportChecker.java  |  5 --
 .../src/handler/MCollectorOutputHandler.cc  |  3 +-
 .../src/main/native/src/lib/IFile.cc|  6 +-
 .../src/main/native/src/lib/IFile.h |  3 +-
 .../main/native/src/lib/MapOutputCollector.cc   | 85 ++--
 .../main/native/src/lib/MapOutputCollector.h|  8 +-
 .../src/main/native/src/lib/Merge.cc| 24 --
 .../src/main/native/src/lib/PartitionBucket.h   |  2 -
 .../src/main/native/src/lib/TaskCounters.cc | 10 +--
 .../src/main/native/src/lib/TaskCounters.h  |  8 --
 .../nativetask/combinertest/CombinerTest.java   |  8 +-
 .../combinertest/LargeKVCombinerTest.java   |  6 +-
 .../nativetask/compresstest/CompressTest.java   |  3 +
 .../hadoop/mapred/nativetask/kvtest/KVTest.java | 74 +++--
 .../mapred/nativetask/kvtest/LargeKVTest.java   | 82 +--
 .../nativetask/nonsorttest/NonSortTest.java |  6 +-
 .../nativetask/testutil/ResultVerifier.java | 24 +-
 19 files changed, 179 insertions(+), 183 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/00322161/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 269a2f6..279b960 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -20,3 +20,4 @@ MAPREDUCE-6054. native-task: Speed up tests (todd)
 MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Binglin Chang)
 MAPREDUCE-6056. native-task: move system test working dir to target dir and 
cleanup test config xml files (Manu Zhang via bchang)
 MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc 
cleanup (todd)
+MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/00322161/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
index 224b95b..828d7df 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeMapOutputCollectorDelegator.java
@@ -34,6 +34,7 @@ import 
org.apache.hadoop.mapred.nativetask.serde.INativeSerializer;
 import org.apache.hadoop.mapred.nativetask.serde.NativeSerialization;
 import org.apache.hadoop.mapreduce.MRConfig;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.TaskCounter;
 import org.apache.hadoop.util.QuickSort;
 
 /**
@@ -46,6 +47,7 @@ public class NativeMapOutputCollectorDelegatorK, V 
implements MapOutputCollect
   private JobConf job;
   private NativeCollectorOnlyHandlerK, V handler;
 
+  private Context context;
   private StatusReportChecker updater;
 
   @Override
@@ -58,6 +60,7 @@ public class NativeMapOutputCollectorDelegatorK, V 
implements MapOutputCollect
 handler.close();
 if (null != updater) {
   updater.stop();
+  NativeRuntime.reportStatus(context.getReporter());
 }
   }
 
@@ -69,6 +72,7 @@ public class NativeMapOutputCollectorDelegatorK, V 
implements MapOutputCollect
   @SuppressWarnings(unchecked)
   @Override
   public void init(Context context) throws IOException, ClassNotFoundException 
{
+this.context

[30/64] [abbrv] git commit: MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common. Contributed by Todd Lipcon.

2014-09-12 Thread todd
MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common. 
Contributed by Todd Lipcon.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613034 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/77acc70d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/77acc70d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/77acc70d

Branch: refs/heads/trunk
Commit: 77acc70df5d64055ea809222f3d2f0d66c611196
Parents: 5149a8a
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 24 08:20:25 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 24 08:20:25 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../mapred/nativetask/NativeBatchProcessor.java |   8 +-
 .../nativetask/buffer/DirectBufferPool.java |  93 -
 .../mapred/nativetask/buffer/InputBuffer.java   |  17 +-
 .../nativetask/buffer/TestDirectBufferPool.java | 201 ---
 5 files changed, 20 insertions(+), 300 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/77acc70d/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index cea5a76..e12f743 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -4,3 +4,4 @@ Changes for Hadoop Native Map Output Collector
 MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang
 MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
 MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)
+MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77acc70d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
index fd68ea6..837da0e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeBatchProcessor.java
@@ -24,12 +24,13 @@ import java.nio.ByteBuffer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapred.nativetask.buffer.BufferType;
-import org.apache.hadoop.mapred.nativetask.buffer.DirectBufferPool;
 import org.apache.hadoop.mapred.nativetask.buffer.InputBuffer;
 import org.apache.hadoop.mapred.nativetask.buffer.OutputBuffer;
 import org.apache.hadoop.mapred.nativetask.util.ReadWriteBuffer;
 import org.apache.hadoop.mapred.nativetask.util.ConfigUtil;
+import org.apache.hadoop.util.DirectBufferPool;
 
 /**
  * used to create channel, transfer data and command between Java and native
@@ -126,9 +127,8 @@ public class NativeBatchProcessor implements INativeHandler 
{
   NativeRuntime.releaseNativeObject(nativeHandlerAddr);
   nativeHandlerAddr = 0;
 }
-if (null != in  null != in.getByteBuffer()  
in.getByteBuffer().isDirect()) {
-  DirectBufferPool.getInstance().returnBuffer(in.getByteBuffer());
-}
+IOUtils.cleanup(LOG, in);
+in = null;
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/77acc70d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/DirectBufferPool.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/DirectBufferPool.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/DirectBufferPool.java
deleted file mode 100644
index bd3c6bb..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src

[40/64] [abbrv] git commit: MAPREDUCE-6026. native-task: fix logging. Contributed by Manu Zhang.

2014-09-12 Thread todd
MAPREDUCE-6026. native-task: fix logging. Contributed by Manu Zhang.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1617878 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/808bf8ba
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/808bf8ba
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/808bf8ba

Branch: refs/heads/trunk
Commit: 808bf8bac1d4bdf83fb3bdf6e096b4038c9567ce
Parents: 886338f
Author: Todd Lipcon t...@apache.org
Authored: Thu Aug 14 04:53:36 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Aug 14 04:53:36 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt   |  1 +
 .../hadoop/mapred/nativetask/HadoopPlatform.java |  5 +++--
 .../hadoop/mapred/nativetask/Platforms.java  |  5 +++--
 .../combinertest/LargeKVCombinerTest.java|  5 -
 .../hadoop/mapred/nativetask/kvtest/KVTest.java  |  8 ++--
 .../mapred/nativetask/kvtest/LargeKVTest.java|  7 +--
 .../mapred/nativetask/kvtest/TestInputFile.java  |  9 ++---
 .../EnforceNativeOutputCollectorDelegator.java   |  6 --
 .../src/test/resources/log4j.properties  | 19 +++
 9 files changed, 51 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/808bf8ba/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index c3c6ab9..123eb6d 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -13,3 +13,4 @@ MAPREDUCE-5984. native-task: Reuse lz4 sources in 
hadoop-common (Binglin Chang)
 MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
 MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
 MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml 
(Binglin Chang via todd)
+MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/808bf8ba/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
index 1c4ede5..7599bb8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/HadoopPlatform.java
@@ -19,6 +19,8 @@ package org.apache.hadoop.mapred.nativetask;
 
 import java.io.IOException;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.io.BooleanWritable;
 import org.apache.hadoop.io.ByteWritable;
 import org.apache.hadoop.io.BytesWritable;
@@ -33,10 +35,9 @@ import org.apache.hadoop.io.VLongWritable;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.nativetask.serde.*;
-import org.apache.log4j.Logger;
 
 public class HadoopPlatform extends Platform {
-  private static final Logger LOG = Logger.getLogger(HadoopPlatform.class);
+  private static final Log LOG = LogFactory.getLog(HadoopPlatform.class);
 
   public HadoopPlatform() throws IOException {
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/808bf8ba/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java
index 154bbc8..d0a8496 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/Platforms.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client

[21/64] [abbrv] git commit: MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang

2014-09-12 Thread todd
MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1612659 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e788e8b6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e788e8b6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e788e8b6

Branch: refs/heads/trunk
Commit: e788e8b6eeafc1d20b5d51f5e2374610bc37304b
Parents: b2551c0
Author: Todd Lipcon t...@apache.org
Authored: Tue Jul 22 19:55:03 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Tue Jul 22 19:55:03 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   4 +
 .../hadoop-mapreduce-client-nativetask/pom.xml  |  77 ++--
 .../src/CMakeLists.txt  | 126 +--
 .../src/main/native/src/lib/commons.h   |   1 +
 .../src/main/native/test/TestIFile.cc   |   5 +-
 5 files changed, 111 insertions(+), 102 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e788e8b6/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
new file mode 100644
index 000..18bf029
--- /dev/null
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -0,0 +1,4 @@
+Changes for Hadoop Native Map Output Collector
+
+
+MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e788e8b6/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index 6f46fd5..40b6520 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -1,4 +1,4 @@
-?xml version=1.0?
+?xml version=1.0 encoding=UTF-8?
 !--
   Licensed under the Apache License, Version 2.0 (the License);
   you may not use this file except in compliance with the License.
@@ -12,7 +12,8 @@
   See the License for the specific language governing permissions and
   limitations under the License. See accompanying LICENSE file.
 --
-project xmlns=http://maven.apache.org/POM/4.0.0; 
xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
+project xmlns=http://maven.apache.org/POM/4.0.0;
+  xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
   xsi:schemaLocation=http://maven.apache.org/POM/4.0.0
   http://maven.apache.org/xsd/maven-4.0.0.xsd;
   parent
@@ -29,7 +30,7 @@
   properties
 !-- Needed for generating FindBugs warnings using parent pom --
 mr.basedir${project.parent.basedir}/..//mr.basedir
-  /properties  
+  /properties
 
 
   dependencies
@@ -53,7 +54,7 @@
 /dependency
 dependency
   groupIdorg.mockito/groupId
-artifactIdmockito-all/artifactId
+  artifactIdmockito-all/artifactId
   scopetest/scope
 /dependency
 dependency
@@ -85,7 +86,7 @@
 include**/compresstest/*Test.java/include
 include**/nonsorttest/*Test.java/include
 include**/kvtest/*Test.java/include
-  /includes 
+  /includes
   additionalClasspathElements
 
additionalClasspathElement${basedir}/../hadoop-mapreduce-client-common/target/classes/additionalClasspathElement
   /additionalClasspathElements
@@ -131,26 +132,26 @@
   /execution
 /executions
   /plugin
-plugin
-groupIdorg.codehaus.mojo/groupId
-artifactIdnative-maven-plugin/artifactId
-executions
-execution
-  phasecompile/phase
-goals
-  goaljavah/goal
-/goals
-configuration
-javahPath${env.JAVA_HOME}/bin/javah/javahPath
-javahClassNames
-
javaClassNameorg.apache.hadoop.mapred.nativetask.NativeBatchProcessor/javaClassName
-
javaClassNameorg.apache.hadoop.mapred.nativetask.NativeRuntime/javaClassName
-/javahClassNames
-
javahOutputDirectory

[31/64] [abbrv] git commit: MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer. Contributed by Todd Lipcon.

2014-09-12 Thread todd
MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer. Contributed 
by Todd Lipcon.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613036 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/66351803
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/66351803
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/66351803

Branch: refs/heads/trunk
Commit: 663518032449940f136ae8a9f149ed453837a8d8
Parents: 77acc70
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 24 08:24:05 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 24 08:24:05 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../nativetask/buffer/ByteBufferDataReader.java | 126 +
 .../nativetask/buffer/ByteBufferDataWriter.java | 141 ---
 .../buffer/TestByteBufferReadWrite.java |  93 +---
 4 files changed, 75 insertions(+), 286 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/66351803/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index e12f743..aa695cf 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -5,3 +5,4 @@ MAPREDUCE-5985. native-task: Fix build on macosx. Contributed 
by Binglin Chang
 MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)
 MAPREDUCE-5996. native-task: Rename system tests into standard directory 
layout (todd)
 MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
+MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/66351803/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
index 5af7180..24f402d 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/ByteBufferDataReader.java
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.mapred.nativetask.buffer;
 
+import com.google.common.base.Charsets;
+
 import java.io.DataInput;
 import java.io.EOFException;
 import java.io.IOException;
@@ -31,11 +33,13 @@ import java.nio.ByteBuffer;
 public class ByteBufferDataReader extends DataInputStream {
   private ByteBuffer byteBuffer;
   private char lineCache[];
+  private java.io.DataInputStream javaReader;
 
   public ByteBufferDataReader(InputBuffer buffer) {
 if (buffer != null) {
-  this.byteBuffer = buffer.getByteBuffer();
+  reset(buffer);
 }
+javaReader = new java.io.DataInputStream(this);
   }
 
   public void reset(InputBuffer buffer) {
@@ -128,128 +132,12 @@ public class ByteBufferDataReader extends 
DataInputStream {
 
   @Override
   public String readLine() throws IOException {
-
-InputStream in = this;
-
-char buf[] = lineCache;
-
-if (buf == null) {
-  buf = lineCache = new char[128];
-}
-
-int room = buf.length;
-int offset = 0;
-int c;
-
-loop: while (true) {
-  switch (c = in.read()) {
-  case -1:
-  case '\n':
-break loop;
-
-  case '\r':
-final int c2 = in.read();
-if ((c2 != '\n')  (c2 != -1)) {
-  if (!(in instanceof PushbackInputStream)) {
-in = new PushbackInputStream(in);
-  }
-  ((PushbackInputStream) in).unread(c2);
-}
-break loop;
-
-  default:
-if (--room  0) {
-  buf = new char[offset + 128];
-  room = buf.length - offset - 1;
-  System.arraycopy(lineCache, 0, buf, 0, offset);
-  lineCache = buf;
-}
-buf[offset++] = (char) c;
-break;
-  }
-}
-if ((c == -1)  (offset == 0)) {
-  return null;
-}
-return String.copyValueOf(buf, 0, offset);
+return javaReader.readLine();
   }
 
   @Override
   public

[53/64] [abbrv] git commit: MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc cleanup

2014-09-12 Thread todd
MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc 
cleanup


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1081d9ce
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1081d9ce
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1081d9ce

Branch: refs/heads/trunk
Commit: 1081d9cee23aa661d7c9165bc9855865a38b528e
Parents: cce7d1e
Author: Todd Lipcon t...@apache.org
Authored: Wed Sep 3 12:02:47 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Wed Sep 3 12:22:38 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  |  1 +
 .../hadoop/mapred/nativetask/Command.java   |  8 +
 .../mapred/nativetask/CommandDispatcher.java| 11 ++-
 .../hadoop/mapred/nativetask/Constants.java | 18 ++-
 .../hadoop/mapred/nativetask/DataChannel.java   |  3 ++
 .../hadoop/mapred/nativetask/DataReceiver.java  |  3 ++
 .../mapred/nativetask/HadoopPlatform.java   |  6 ++--
 .../mapred/nativetask/ICombineHandler.java  |  3 ++
 .../mapred/nativetask/INativeComparable.java|  5 +++
 .../mapred/nativetask/INativeHandler.java   |  2 ++
 .../mapred/nativetask/NativeBatchProcessor.java |  3 +-
 .../mapred/nativetask/NativeDataSource.java | 10 ++
 .../mapred/nativetask/NativeDataTarget.java | 14 +++--
 .../NativeMapOutputCollectorDelegator.java  | 11 ---
 .../hadoop/mapred/nativetask/NativeRuntime.java | 10 +++---
 .../hadoop/mapred/nativetask/Platform.java  | 15 -
 .../hadoop/mapred/nativetask/Platforms.java |  7 +++--
 .../mapred/nativetask/StatusReportChecker.java  | 33 ++--
 .../hadoop/mapred/nativetask/TaskContext.java   | 29 +
 .../mapred/nativetask/buffer/BufferType.java|  5 +--
 .../nativetask/buffer/ByteBufferDataReader.java | 10 ++
 .../nativetask/buffer/ByteBufferDataWriter.java | 10 +++---
 .../nativetask/buffer/DataInputStream.java  |  2 ++
 .../nativetask/buffer/DataOutputStream.java | 11 +++
 .../mapred/nativetask/buffer/InputBuffer.java   |  2 ++
 .../mapred/nativetask/buffer/OutputBuffer.java  |  2 ++
 .../nativetask/handlers/BufferPullee.java   |  4 ++-
 .../nativetask/handlers/BufferPuller.java   | 26 ---
 .../nativetask/handlers/BufferPushee.java   |  2 ++
 .../nativetask/handlers/BufferPusher.java   |  3 +-
 .../nativetask/handlers/CombinerHandler.java| 19 +--
 .../mapred/nativetask/handlers/IDataLoader.java |  3 ++
 .../handlers/NativeCollectorOnlyHandler.java| 22 -
 .../serde/BoolWritableSerializer.java   |  2 ++
 .../serde/ByteWritableSerializer.java   |  2 ++
 .../serde/BytesWritableSerializer.java  |  2 ++
 .../nativetask/serde/DefaultSerializer.java |  4 +++
 .../serde/DoubleWritableSerializer.java |  2 ++
 .../serde/FloatWritableSerializer.java  |  3 ++
 .../mapred/nativetask/serde/IKVSerializer.java  | 12 ---
 .../nativetask/serde/INativeSerializer.java |  4 +++
 .../nativetask/serde/IntWritableSerializer.java |  2 ++
 .../mapred/nativetask/serde/KVSerializer.java   | 17 ++
 .../serde/LongWritableSerializer.java   |  2 ++
 .../nativetask/serde/NativeSerialization.java   |  2 ++
 .../serde/NullWritableSerializer.java   |  2 ++
 .../serde/SerializationFramework.java   |  3 ++
 .../mapred/nativetask/serde/TextSerializer.java |  2 ++
 .../serde/VIntWritableSerializer.java   |  3 +-
 .../serde/VLongWritableSerializer.java  |  3 +-
 .../mapred/nativetask/util/BytesUtil.java   |  2 ++
 .../mapred/nativetask/util/ConfigUtil.java  |  5 ++-
 .../nativetask/util/LocalJobOutputFiles.java|  2 ++
 .../nativetask/util/NativeTaskOutput.java   |  2 ++
 .../nativetask/util/NativeTaskOutputFiles.java  |  3 +-
 .../mapred/nativetask/util/OutputUtil.java  |  5 +--
 .../mapred/nativetask/util/ReadWriteBuffer.java |  5 +--
 .../mapred/nativetask/util/SizedWritable.java   |  4 ++-
 .../mapred/nativetask/TestTaskContext.java  |  4 +--
 59 files changed, 264 insertions(+), 148 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 7c9558e..269a2f6 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -19,3 +19,4 @@ MAPREDUCE-5977. Fix or suppress native-task gcc warnings 
(Manu Zhang via todd)
 MAPREDUCE-6054. native-task: Speed up tests (todd)
 MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful

[34/64] [abbrv] git commit: MAPREDUCE-5995. native-task: Revert changes to Text internals. Contributed by Todd Lipcon.

2014-09-12 Thread todd
MAPREDUCE-5995. native-task: Revert changes to Text internals. Contributed by 
Todd Lipcon.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613828 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/78d86a98
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/78d86a98
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/78d86a98

Branch: refs/heads/trunk
Commit: 78d86a983960f231f51e47579c0b1ae73f9645fd
Parents: 43917e5
Author: Todd Lipcon t...@apache.org
Authored: Sun Jul 27 19:04:14 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Sun Jul 27 19:04:14 2014 +

--
 .../src/main/java/org/apache/hadoop/io/Text.java| 6 +-
 hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt | 1 +
 .../hadoop/mapred/nativetask/serde/TextSerializer.java  | 9 +
 3 files changed, 3 insertions(+), 13 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/78d86a98/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
index 73cd1e6..3dc5076 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
@@ -226,10 +226,6 @@ public class Text extends BinaryComparable
 this.length = len;
   }
 
-  public void setLength(int len) {
-this.length = len;
-  }
-
   /**
* Append a range of bytes to the end of the given text
* @param utf8 the data to copy from
@@ -264,7 +260,7 @@ public class Text extends BinaryComparable
* @param len the number of bytes we need
* @param keepData should the old data be kept
*/
-  public void setCapacity(int len, boolean keepData) {
+  private void setCapacity(int len, boolean keepData) {
 if (bytes == null || bytes.length  len) {
   if (bytes != null  keepData) {
 bytes = Arrays.copyOf(bytes, Math.max(len,length  1));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/78d86a98/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 92c94a8..42e0af3 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -7,3 +7,4 @@ MAPREDUCE-5996. native-task: Rename system tests into standard 
directory layout
 MAPREDUCE-5997. native-task: Use DirectBufferPool from Hadoop Common (todd)
 MAPREDUCE-6000. native-task: Simplify ByteBufferDataReader/Writer (todd)
 MAPREDUCE-5991. native-task should not run unit tests if native profile is not 
enabled. (Binglin Chang)
+MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/78d86a98/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
index 63a64de..cde4c7b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/TextSerializer.java
@@ -42,13 +42,6 @@ public class TextSerializer implements 
INativeSerializerText, INativeComparabl
 
   @Override
   public void deserialize(DataInput in, int length, Text w) throws IOException 
{
-try {
-  w.setCapacity(length, true);
-  w.setLength(length);
-} catch (final Exception e) {
-  throw new IOException(e);
-}
-final byte[] bytes = w.getBytes();
-in.readFully(bytes, 0, length);
+w.readWithKnownLength(in, length);
   }
 }



[61/64] [abbrv] git commit: MAPREDUCE-6077. native-task: Remove CustomModule examples in nativetask (seanzhong)

2014-09-12 Thread todd
MAPREDUCE-6077. native-task: Remove CustomModule examples in nativetask 
(seanzhong)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/52a8b4db
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/52a8b4db
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/52a8b4db

Branch: refs/heads/trunk
Commit: 52a8b4db92a35598006103c59a2ff93afc5312ee
Parents: 7c91f9b
Author: Sean Zhong clock...@gmail.com
Authored: Sat Sep 6 11:46:07 2014 +0800
Committer: Sean Zhong clock...@gmail.com
Committed: Sat Sep 6 11:46:07 2014 +0800

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../sdk/example/CustomModule/README.txt |   1 -
 .../sdk/example/CustomModule/pom.xml| 131 ---
 .../platform/custom/CustomPlatform.java |  39 --
 .../nativetask/serde/custom/CustomWritable.java |  75 ---
 .../serde/custom/CustomWritableSerializer.java  |  33 -
 .../src/main/native/src/CustomComparator.cpp|  88 -
 7 files changed, 1 insertion(+), 367 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/52a8b4db/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 539e7be..cfc9412 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -23,3 +23,4 @@ MAPREDUCE-6055. native-task: findbugs, interface annotations, 
and other misc cle
 MAPREDUCE-6067. native-task: fix some counter issues (Binglin Chang)
 MAPREDUCE-6069. native-task: Lint/style fixes and removal of unused code (todd)
 MAPREDUCE-6074. native-task: fix release audit, javadoc, javac warnings (todd)
+MAPREDUCE-6077. native-task: Remove CustomModule examples in nativetask 
(seanzhong)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52a8b4db/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/README.txt
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/README.txt
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/README.txt
deleted file mode 100644
index 0ad6f1e..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/README.txt
+++ /dev/null
@@ -1 +0,0 @@
-This project is depend on hadoop and hadoop-nativetask, so in order to 
complete the build, you have to specify the hadoop dir first.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/52a8b4db/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/pom.xml
deleted file mode 100644
index 5bf67c7..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/sdk/example/CustomModule/pom.xml
+++ /dev/null
@@ -1,131 +0,0 @@
-?xml version=1.0?
-!-- Licensed under the Apache License, Version 2.0 (the License); you 
-  may not use this file except in compliance with the License. You may obtain 
-  a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless 
-  required by applicable law or agreed to in writing, software distributed 
-  under the License is distributed on an AS IS BASIS, WITHOUT WARRANTIES 
-  OR CONDITIONS OF ANY KIND, either express or implied. See the License for 
-  the specific language governing permissions and limitations under the 
License. 
-  See accompanying LICENSE file. --
-project xmlns=http://maven.apache.org/POM/4.0.0; 
xmlns:xsi=http://www.w3.org/2001/XMLSchema-instance;
-  xsi:schemaLocation=http://maven.apache.org/POM/4.0.0
-  http://maven.apache.org/xsd/maven-4.0.0.xsd;
-  modelVersion4.0.0/modelVersion
-  groupIdorg.apache.hadoop/groupId
-  artifactIdnativetask-sdk/artifactId
-  version2.2.0/version
-  namenativetask-sdk/name
-
-  properties
-  /properties
-
-  dependencies
-dependency
-  groupIdorg.apache.hadoop/groupId
-  artifactIdhadoop-mapreduce-client-nativetask/artifactId
-  version2.2.0/version
-/dependency
-  /dependencies
-
-  build
-plugins
-  plugin
-groupIdorg.apache.maven.plugins/groupId

[57/64] [abbrv] MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by Todd Lipcon.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
index 60bb6f5..ba026f5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
@@ -38,13 +38,6 @@ using std::pair;
 enum NativeObjectType {
   UnknownObjectType = 0,
   BatchHandlerType = 1,
-  MapperType = 2,
-  ReducerType = 3,
-  PartitionerType = 4,
-  CombinerType = 5,
-  FolderType = 6,
-  RecordReaderType = 7,
-  RecordWriterType = 8
 };
 
 /**
@@ -69,7 +62,6 @@ enum Endium {
 #define INPUT_LINE_KV_SEPERATOR 
mapreduce.input.keyvaluelinerecordreader.key.value.separator
 #define MAPRED_TEXTOUTPUT_FORMAT_SEPERATOR 
mapreduce.output.textoutputformat.separator
 #define MAPRED_WORK_OUT_DIR mapreduce.task.output.dir
-#define NATIVE_OUTPUT_FILE_NAME native.output.file.name
 #define MAPRED_COMPRESS_OUTPUT mapreduce.output.fileoutputformat.compress
 #define MAPRED_OUTPUT_COMPRESSION_CODEC 
mapreduce.output.fileoutputformat.compress.codec
 #define TOTAL_ORDER_PARTITIONER_PATH total.order.partitioner.path
@@ -386,32 +378,6 @@ public:
   virtual bool next(Buffer  key, Buffer  value) = 0;
 };
 
-class RecordReader : public KVIterator, public Configurable, public Progress {
-public:
-  virtual NativeObjectType type() {
-return RecordReaderType;
-  }
-
-  virtual bool next(Buffer  key, Buffer  value) = 0;
-
-  virtual float getProgress() = 0;
-
-  virtual void close() = 0;
-};
-
-class RecordWriter : public Collector, public Configurable {
-public:
-  virtual NativeObjectType type() {
-return RecordWriterType;
-  }
-
-  virtual void collect(const void * key, uint32_t keyLen, const void * value, 
uint32_t valueLen) {
-  }
-
-  virtual void close() {
-  }
-
-};
 
 class ProcessorBase : public Configurable {
 protected:
@@ -444,36 +410,6 @@ public:
   }
 };
 
-class Mapper : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return MapperType;
-  }
-
-  /**
-   * Map interface, default IdenticalMapper
-   */
-  virtual void map(const char * key, uint32_t keyLen, const char * value, 
uint32_t valueLen) {
-collect(key, keyLen, value, valueLen);
-  }
-};
-
-class Partitioner : public Configurable {
-public:
-  virtual NativeObjectType type() {
-return PartitionerType;
-  }
-
-  /**
-   * Partition interface
-   * @param key key buffer
-   * @param keyLen key length, can be modified to smaller value
-   *   to truncate key
-   * @return partition number
-   */
-  virtual uint32_t getPartition(const char * key, uint32_t  keyLen, uint32_t 
numPartition);
-};
-
 enum KeyGroupIterState {
   SAME_KEY,
   NEW_KEY,
@@ -502,80 +438,7 @@ public:
   virtual const char * nextValue(uint32_t  len) = 0;
 };
 
-class Reducer : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return ReducerType;
-  }
-
-  /**
-   * Reduce interface, default IdenticalReducer
-   */
-  virtual void reduce(KeyGroupIterator  input) {
-const char * key;
-const char * value;
-uint32_t keyLen;
-uint32_t valueLen;
-key = input.getKey(keyLen);
-while (NULL != (value = input.nextValue(valueLen))) {
-  collect(key, keyLen, value, valueLen);
-}
-  }
-};
-
-/**
- * Folder API used for hashtable based aggregation
- * Folder will be used in this way:
- * on(key, value):
- *   state = hashtable.get(key)
- *   if state == None:
- * size = size()
- * if size == -1:
- *   state = init(null, -1)
- * elif size  0:
- *   state = fixallocator.get(key)
- *   init(state, size)
- *   folder(state, value, value.len)
- *
- * final():
- *   for k,state in hashtable:
- * final(key, key.len, state)
- */
-class Folder : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return FolderType;
-  }
-
-  /**
-   * Get aggregator state size
-   * @return state storage size
-   * -1 size not fixed or unknown, default
-   *e.g. list map tree
-   * 0  don't need to store state
-   * 0  fixed sized state
-   *e.g. int32 int64 float.
-   */
-  virtual int32_t size() {
-return -1;
-  }
 
-  /**
-   * Create and/or init new state
-   */
-  virtual void * init(const char * key, uint32_t keyLen) {
-return NULL;
-  }
-
-  /**
-   * Aggregation function
-   */
-  virtual void folder(void * dest, const char * value, uint32_t valueLen) {
-  }
-
-  virtual void final(const char * key, uint32_t keyLen, void * 

[23/64] [abbrv] git commit: MAPREDUCE-5994. Simplify ByteUtils and fix failing test. Contributed by Todd Lipcon.

2014-09-12 Thread todd
MAPREDUCE-5994. Simplify ByteUtils and fix failing test. Contributed by Todd 
Lipcon.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613003 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d468a924
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d468a924
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d468a924

Branch: refs/heads/trunk
Commit: d468a9244740fcdeacf23b3aa08ab1c457e050ca
Parents: 341695e
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 24 06:14:27 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 24 06:14:27 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop/mapred/nativetask/NativeRuntime.java |  10 +-
 .../mapred/nativetask/util/BytesUtil.java   | 148 +++
 .../mapred/nativetask/util/ConfigUtil.java  |   5 +-
 .../mapred/nativetask/util/ReadWriteBuffer.java |   6 +-
 .../mapred/nativetask/utils/TestBytesUtil.java  |  19 +--
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |   4 +-
 .../nativetask/testutil/BytesFactory.java   |  23 +--
 8 files changed, 55 insertions(+), 161 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d468a924/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 18bf029..166d68d 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -2,3 +2,4 @@ Changes for Hadoop Native Map Output Collector
 
 
 MAPREDUCE-5985. native-task: Fix build on macosx. Contributed by Binglin Chang
+MAPREDUCE-5994. Simplify ByteUtils and fix failing test. (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d468a924/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java
index 53b1acd..65ce652 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/NativeRuntime.java
@@ -20,20 +20,16 @@ package org.apache.hadoop.mapred.nativetask;
 
 import java.io.IOException;
 
+import com.google.common.base.Charsets;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.DataInputBuffer;
 import org.apache.hadoop.io.FloatWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.Task.TaskReporter;
-import org.apache.hadoop.mapred.nativetask.util.BytesUtil;
 import org.apache.hadoop.mapred.nativetask.util.ConfigUtil;
 import org.apache.hadoop.mapred.nativetask.util.SnappyUtil;
 import org.apache.hadoop.util.VersionInfo;
@@ -93,7 +89,7 @@ public class NativeRuntime {
*/
   public synchronized static long createNativeObject(String clazz) {
 assertNativeLibraryLoaded();
-final long ret = JNICreateNativeObject(BytesUtil.toBytes(clazz));
+final long ret = JNICreateNativeObject(clazz.getBytes(Charsets.UTF_8));
 if (ret == 0) {
   LOG.warn(Can't create NativeObject for class  + clazz + , probably 
not exist.);
 }
@@ -108,7 +104,7 @@ public class NativeRuntime {
*/
   public synchronized static long registerLibrary(String libraryName, String 
clazz) {
 assertNativeLibraryLoaded();
-final long ret = JNIRegisterModule(BytesUtil.toBytes(libraryName), 
BytesUtil.toBytes(clazz));
+final long ret = JNIRegisterModule(libraryName.getBytes(Charsets.UTF_8), 
clazz.getBytes(Charsets.UTF_8));
 if (ret != 0) {
   LOG.warn(Can't create NativeObject for class  + clazz + , probably 
not exist.);
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d468a924

[24/64] [abbrv] MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
deleted file mode 100644
index b665971..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/function/org/apache/hadoop/mapred/nativetask/testutil/ResultVerifier.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * License); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an AS IS BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.mapred.nativetask.testutil;
-
-import java.io.IOException;
-import java.util.zip.CRC32;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-
-public class ResultVerifier {
-  /**
-   * verify the result
-   * 
-   * @param sample
-   *  :nativetask output
-   * @param source
-   *  :yuanwenjian
-   * @throws Exception
-   */
-  public static boolean verify(String sample, String source) throws Exception {
-FSDataInputStream sourcein = null;
-FSDataInputStream samplein = null;
-
-final Configuration conf = new Configuration();
-final FileSystem fs = FileSystem.get(conf);
-final Path hdfssource = new Path(source);
-final Path[] sourcepaths = FileUtil.stat2Paths(fs.listStatus(hdfssource));
-
-final Path hdfssample = new Path(sample);
-final Path[] samplepaths = FileUtil.stat2Paths(fs.listStatus(hdfssample));
-if (sourcepaths == null) {
-  throw new Exception(source file can not be found);
-}
-if (samplepaths == null) {
-  throw new Exception(sample file can not be found);
-}
-if (sourcepaths.length != samplepaths.length) {
-  return false;
-}
-for (int i = 0; i  sourcepaths.length; i++) {
-  final Path sourcepath = sourcepaths[i];
-  // op result file start with part-r like part-r-0
-
-  if (!sourcepath.getName().startsWith(part-r)) {
-continue;
-  }
-  Path samplepath = null;
-  for (int j = 0; j  samplepaths.length; j++) {
-if (samplepaths[i].getName().equals(sourcepath.getName())) {
-  samplepath = samplepaths[i];
-  break;
-}
-  }
-  if (samplepath == null) {
-throw new Exception(cound not found file  + 
samplepaths[0].getParent() + / + sourcepath.getName()
-+  , as sourcepaths has such file);
-  }
-
-  // compare
-  try {
-if (fs.exists(sourcepath)  fs.exists(samplepath)) {
-  sourcein = fs.open(sourcepath);
-  samplein = fs.open(samplepath);
-} else {
-  System.err.println(result file not found: + sourcepath +  or  + 
samplepath);
-  return false;
-}
-
-CRC32 sourcecrc, samplecrc;
-samplecrc = new CRC32();
-sourcecrc = new CRC32();
-final byte[] bufin = new byte[1  16];
-int readnum = 0;
-int totalRead = 0;
-while (samplein.available()  0) {
-  readnum = samplein.read(bufin);
-  totalRead += readnum;
-  samplecrc.update(bufin, 0, readnum);
-}
-
-if (0 == totalRead) {
-  throw new Exception(source  + sample +  is empty file);
-}
-
-totalRead = 0;
-while (sourcein.available()  0) {
-  readnum = sourcein.read(bufin);
-  totalRead += readnum;
-  sourcecrc.update(bufin, 0, readnum);
-}
-if (0 == totalRead) {
-  throw new Exception(source  + sample +  is empty file);
-}
-
-if (samplecrc.getValue() == sourcecrc.getValue()) {
-  ;
-} else 

[48/64] [abbrv] git commit: MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is sucessful (Contributed by Binglin Chang)

2014-09-12 Thread todd
MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Contributed by Binglin Chang)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/17cd0faa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/17cd0faa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/17cd0faa

Branch: refs/heads/trunk
Commit: 17cd0faaceb8f9ce00b8c2c200e810f78f36c671
Parents: bfd1d75
Author: Binglin Chang bch...@apache.org
Authored: Tue Sep 2 11:00:48 2014 +0800
Committer: Binglin Chang bch...@apache.org
Committed: Tue Sep 2 11:00:48 2014 +0800

--
 .../CHANGES.MAPREDUCE-2841.txt  |  1 +
 .../hadoop-mapreduce-client-nativetask/pom.xml  | 28 ++--
 .../nativetask/combinertest/CombinerTest.java   | 10 +--
 .../combinertest/LargeKVCombinerTest.java   | 15 +--
 .../combinertest/OldAPICombinerTest.java|  5 
 .../nativetask/compresstest/CompressTest.java   | 18 -
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |  5 ++--
 .../hadoop/mapred/nativetask/kvtest/KVTest.java | 15 +--
 .../mapred/nativetask/kvtest/LargeKVTest.java   | 15 +--
 .../nativetask/nonsorttest/NonSortTest.java |  5 
 10 files changed, 74 insertions(+), 43 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 6384757..baa88c1 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -17,3 +17,4 @@ MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
 MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)
 MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
 MAPREDUCE-6054. native-task: Speed up tests (todd)
+MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful (Binglin Chang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index bb7d7bb..f62743e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -83,10 +83,8 @@
 /property
   /properties
   includes
-include**/TestTaskContext.java/include
-include**/buffer/Test*.java/include
-include**/handlers/Test*.java/include
-include**/serde/Test*.java/include
+include**/*Test.java/include
+include**/Test*.java/include
   /includes
 /configuration
   /plugin
@@ -201,28 +199,6 @@
   /execution
 /executions
   /plugin
-  plugin
-groupIdorg.apache.maven.plugins/groupId
-artifactIdmaven-surefire-plugin/artifactId
-configuration
-  properties
-property
-  namelistener/name
-  valueorg.apache.hadoop.test.TimedOutTestsListener/value
-/property
-  /properties
-  includes
-include**/TestTaskContext.java/include
-include**/buffer/Test*.java/include
-include**/handlers/Test*.java/include
-include**/serde/Test*.java/include
-include**/combinertest/*Test.java/include
-include**/compresstest/*Test.java/include
-include**/nonsorttest/*Test.java/include
-include**/kvtest/*Test.java/include
-  /includes
-/configuration
-  /plugin
 /plugins
   /build
 /profile

http://git-wip-us.apache.org/repos/asf/hadoop/blob/17cd0faa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/combinertest/CombinerTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop

[20/64] [abbrv] git commit: Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
Import initial code for MAPREDUCE-2841 (native output collector)

Contributed by:
  Binglin Chang
  Yang Dong
  Sean Zhong
  Manu Zhang
  Zhongliang Zhu
  Vincent Wang
  Yan Dong
  Cheng Lian
  Xusen Yin
  Fangqin Dai
  Jiang Weihua
  Gansha Wu
  Avik Dey


git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1611413 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b2551c06
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b2551c06
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b2551c06

Branch: refs/heads/trunk
Commit: b2551c06a09fb80a9e69adbc01c4c34b93ad0139
Parents: cc57df3
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 17 17:44:55 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 17 17:44:55 2014 +

--
 .../main/java/org/apache/hadoop/io/Text.java| 6 +-
 hadoop-dist/pom.xml | 1 +
 .../java/org/apache/hadoop/mapred/MapTask.java  |14 +-
 .../hadoop-mapreduce-client-nativetask/LICENSE  |   288 +
 .../hadoop-mapreduce-client-nativetask/pom.xml  |   186 +
 .../sdk/example/CustomModule/README.txt | 1 +
 .../sdk/example/CustomModule/pom.xml|   131 +
 .../platform/custom/CustomPlatform.java |39 +
 .../nativetask/serde/custom/CustomWritable.java |75 +
 .../serde/custom/CustomWritableSerializer.java  |33 +
 .../src/main/native/src/CustomComparator.cpp|88 +
 .../src/CMakeLists.txt  |   279 +
 .../src/JNIFlags.cmake  |   118 +
 .../src/config.h.cmake  |23 +
 .../hadoop/mapred/nativetask/Command.java   |49 +
 .../mapred/nativetask/CommandDispatcher.java|38 +
 .../hadoop/mapred/nativetask/Constants.java |59 +
 .../hadoop/mapred/nativetask/DataChannel.java   |38 +
 .../hadoop/mapred/nativetask/DataReceiver.java  |37 +
 .../mapred/nativetask/HadoopPlatform.java   |81 +
 .../mapred/nativetask/ICombineHandler.java  |43 +
 .../mapred/nativetask/INativeComparable.java|50 +
 .../mapred/nativetask/INativeHandler.java   |59 +
 .../mapred/nativetask/NativeBatchProcessor.java |   289 +
 .../mapred/nativetask/NativeDataSource.java |53 +
 .../mapred/nativetask/NativeDataTarget.java |51 +
 .../NativeMapOutputCollectorDelegator.java  |   170 +
 .../hadoop/mapred/nativetask/NativeRuntime.java |   216 +
 .../hadoop/mapred/nativetask/Platform.java  |   102 +
 .../hadoop/mapred/nativetask/Platforms.java |75 +
 .../mapred/nativetask/StatusReportChecker.java  |   103 +
 .../hadoop/mapred/nativetask/TaskContext.java   |91 +
 .../mapred/nativetask/buffer/BufferType.java|26 +
 .../nativetask/buffer/ByteBufferDataReader.java |   264 +
 .../nativetask/buffer/ByteBufferDataWriter.java |   252 +
 .../nativetask/buffer/DataInputStream.java  |25 +
 .../nativetask/buffer/DataOutputStream.java |42 +
 .../nativetask/buffer/DirectBufferPool.java |93 +
 .../mapred/nativetask/buffer/InputBuffer.java   |   121 +
 .../mapred/nativetask/buffer/OutputBuffer.java  |74 +
 .../nativetask/handlers/BufferPullee.java   |   118 +
 .../nativetask/handlers/BufferPuller.java   |   187 +
 .../nativetask/handlers/BufferPushee.java   |   147 +
 .../nativetask/handlers/BufferPusher.java   |89 +
 .../nativetask/handlers/CombinerHandler.java|   140 +
 .../mapred/nativetask/handlers/IDataLoader.java |35 +
 .../handlers/NativeCollectorOnlyHandler.java|   161 +
 .../serde/BoolWritableSerializer.java   |33 +
 .../serde/ByteWritableSerializer.java   |33 +
 .../serde/BytesWritableSerializer.java  |45 +
 .../nativetask/serde/DefaultSerializer.java |67 +
 .../serde/DoubleWritableSerializer.java |33 +
 .../serde/FloatWritableSerializer.java  |33 +
 .../mapred/nativetask/serde/IKVSerializer.java  |71 +
 .../nativetask/serde/INativeSerializer.java |45 +
 .../nativetask/serde/IntWritableSerializer.java |33 +
 .../mapred/nativetask/serde/KVSerializer.java   |   108 +
 .../serde/LongWritableSerializer.java   |32 +
 .../nativetask/serde/NativeSerialization.java   |86 +
 .../serde/NullWritableSerializer.java   |33 +
 .../serde/SerializationFramework.java   |32 +
 .../mapred/nativetask/serde/TextSerializer.java |54 +
 .../serde/VIntWritableSerializer.java   |26 +
 .../serde/VLongWritableSerializer.java  |26 +
 .../mapred/nativetask/util/BytesUtil.java   |   267 +
 .../mapred/nativetask/util/ConfigUtil.java  |43 +
 .../nativetask/util/LocalJobOutputFiles.java|   189 +
 .../nativetask/util/NativeTaskOutput.java   |   137 +
 .../nativetask

[39/64] [abbrv] git commit: MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml. Contributed by Binglin Chang.

2014-09-12 Thread todd
MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml. 
Contributed by Binglin Chang.

git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1617877 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/886338ff
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/886338ff
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/886338ff

Branch: refs/heads/trunk
Commit: 886338ff80438b2daea3755f165cc411e39ef560
Parents: 4d4fb17
Author: Todd Lipcon t...@apache.org
Authored: Thu Aug 14 04:47:20 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Aug 14 04:47:20 2014 +

--
 .../CHANGES.MAPREDUCE-2841.txt  |  3 +-
 .../hadoop-mapreduce-client-common/pom.xml  | 11 
 .../hadoop-mapreduce-client-nativetask/pom.xml  | 16 +--
 .../src/CMakeLists.txt  |  4 +++
 .../src/main/native/test.sh | 29 
 hadoop-project/pom.xml  |  6 
 6 files changed, 38 insertions(+), 31 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/886338ff/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 462ac8b..c3c6ab9 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -11,4 +11,5 @@ MAPREDUCE-5995. native-task: Revert changes to Text internals 
(todd)
 MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
 MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
 MAPREDUCE-5976. native-task: should not fail to build if snappy is missing 
(Manu Zhang)
-MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
\ No newline at end of file
+MAPREDUCE-5978. native-task: remove test case for not supported codec 
Bzip2Codec and DefaultCodec (Manu Zhang)
+MAPREDUCE-6006. native-task: add native tests to maven and fix bug in pom.xml 
(Binglin Chang via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/886338ff/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
index 48db5c8..7ef7d3b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
@@ -84,17 +84,6 @@
   /execution
 /executions
   /plugin
-  plugin
-artifactIdmaven-jar-plugin/artifactId
-executions
-  execution
-goals
-  goaltest-jar/goal
-/goals
-phasetest-compile/phase
-  /execution
-/executions
-  /plugin
 /plugins
   /build
 /project

http://git-wip-us.apache.org/repos/asf/hadoop/blob/886338ff/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index 9727800..bb7d7bb 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -66,7 +66,6 @@
 dependency
   groupIdorg.apache.hadoop/groupId
   artifactIdhadoop-mapreduce-client-common/artifactId
-  typetest-jar/type
   scopetest/scope
 /dependency
   /dependencies
@@ -173,7 +172,6 @@
   overwrite=true
   fileset dir=${basedir}/src/main/native/testData /
 /copy
-copy file=${basedir}/src/main/native/test.sh 
todir=${project.build.directory}/native/test /
 exec executable=cmake 
dir=${project.build.directory}/native
   failonerror=true
   arg line=${basedir}/src/ 
-DGENERATED_JAVAH=${project.build.directory}/native/javah 
-DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_SNAPPY=${require.snappy} 
-DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB

[26/64] [abbrv] MAPREDUCE-5996. native-task: Rename system tests into standard directory layout. Contributed by Todd Lipcon.

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2cba48f/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/data/testGlibcBugSpill.out
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/data/testGlibcBugSpill.out
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/data/testGlibcBugSpill.out
deleted file mode 100644
index 168a65d..000
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/system/data/testGlibcBugSpill.out
+++ /dev/null
@@ -1,2 +0,0 @@
-��*�PKM��:\xBD\xC8\xCA\xCB\xAA\xB4c`\xD5|A1+\xCB\xDC\xAA\x7F\x1D\xA4\x1Bx\x1B\xD1k\xE1\x01\x0E\xEA\xC82\xB7\xF8
 
\xBD.z\xEFH\xFB*\xE8\xBD\xD4\x15o\x82\xA1\x0F}\x03\x00\x17\xC7\xF7Ce*T\x1D\xC4tk\xE7\x0CS\xBF[\xADW{\xB9\xFFLM\xB2\x87\x03\xFC\xC2\xE2\x9C%\xCB\xFF\x9A\x97]\x15_N\xA3\x8FDb\xA6\x172\x99\xF8+\xA4\xA5\xAC4n\x0D}\xA0/[\xBD\xFD\xC4\xA7\xBCj\xF8\x85\xC6V\x87B\xBA\x85\x8A\xCDZ\xFBU\x84\xD7\xBC\xAB\x84H}_v\xC4\xB2\x11\x19\xDB\x06\x93\xB5\xBE\x92E\xCE\xFA\x02RG\xE4\xA3HcI4\xAB.X\xA5\xB8a\x06\x1E.\xADS\xFB\x8AW\xDA\xCA2^\\x90\x87\x95l]r\x99\x88b\x1EU\xC8\xE0l\x1F#h\x08\xA8\x80I\x9B\xC0E\x19%\xDE\xE5\xA6?\xC2\x83\xA0\xD6O1\xFB_\x94\xF8\x8F\xD6\xD9\x16\xE2\x91\x1E\x8EL\x1F\xA2Y\x05\x9CoMN\x0C\x86\xE2\xE9\x9A\xB16o\xF6\xF3Q\xA2\xD8\xB8\xD0\xDB\x86\x93\xAFesc\xA0h\x12\xB9E\x1BU\x12\x02OXS\x8BXn\x0EQ\xABJ\xCE\xB2k\xB1\xD7Z'\xE4\xB7[`~4\x11\xB4\xCD`\xF5%\x1F\xF9\xEEW\x88\xC5\x14+O\x1B9B\x11\x95\xDE\xD5\xA4Co\x8E\xDB\xEAz\x9FGi\xAD\xFE\xF8\xB63\xED\x04\xA1\xE2v\x86G\
 
xAE\x09\x91\xE4\x03JS=\\xD1\x81}\xEE\xA3?\x87\xDB\xC1\x8B\xFF)\xE8L\xC3\xC7\x8B\x05\x1B'\xCC)\xD4\x0D\x90{\xB9\xB7C6\xA9\x16\xE2\xF9D%\xE9!Bt]N\xFB\xF3\xCE\x7F\xB7d8\xBCN\x191,Nm\xABb\x92\x00\xEA\xBC\xD0\xD5!\x956\xF9\x9D\x98\x86\x81QDnL\xB1\xEFxX\xA4\x1FZ\xE9uf\x7F\xF7\x8F\xCD\xC5P\x81\x17\x17\xB0\xCB\xAF\x9C\x05\x8E\xC6\xDB\x09\x1Ak\xA6\xF822\xF4\xA4t\xA1;2\xCC\xEA\xFD?0k]\xF8\xE4\x13\xD2'Z\x9F~5\x9C\xFC+k\x07\xE4k\xB9\xEDx\x9B\x17\xA2\xA8\x85R\x90\x1CV\xD3T\xB7b[\x81)8\xEA\xE6\x12\xE0\x88~\xB8\x87\xA9X\xB0\x88\x19\xA5,\x88cm@\xEE\xF9.\x8A?\xF8!\xD8oR\xAB\x05\x93h3\x13\x0A\x98_E\x11\x81\xD6\xB8;P\xD8u\x9DTv]\\xF1\x0C\xD5\xF4\x0E#\x87}\xE3\x89\xA2\xC2\xEA\x86\x9D\xE7\xAF\xA1\xC3;\xD2\xFF\xA6\xB2!\xAB\\x90i|n\xDE\xBB:\xC6\x08\x1D,Q\xC1;\x15\x9DUV\x8F\xD3;\xFA\x12\xD0U\xA9\xD7\xC6\xFDX\x8F\x10vX\xF1\xF9H7\x12Z\x1FoB\x8B\xD6\xF0\x04\xEB\xA2\xC1\xACH\xFE\xC9_R\xF5\x1D\xA8\x82\xBD\xC6\x11\x19#\x9A\xA8\x09\x95\x06\x98\xA9f\xD9\x12\xA0i\xFA\xD3\xB2\x01\xE9\x06\x14~.z\xDF\xF2YV
 
#z\xEB-\xFA$[U\xA6\xE2.\xD6\xD0kf\xA3\x83\xE0\x86\xD9\xC1\x7FO\xE9\xF4\xEF\x81\x06\xA3\xDE\xC8\xACt\x8B\xCAr\xE6\x82$8\x0F\xE1\x06\xF9\xBB\x0C\x9F\xAA8\x94\xBBU\x8DQ\xC3\xE9@'\xF9\xBF\xCF\x8B\xD4\x995\xEB\xE9t3Q\x10\xD7\x0D\x9D\x94\xCA\x84\xC2\xE2{\x0B\x89r\xA6\xC6\xAA\xE5C\xC6U\xA6\xD1tqa\xA0\xD7RO\x92\xC9\xBE\xF9\xD1\xDE\x93b\x06\xD3ae:\xB7\x8C\x99\xD6\xFFI\x86\x8CvtX@k\xE4m\x93wz\x8A\xC2U\xFBb\xA2\x9Ao\xAF\x8D\x19k\xA2pP\x83s\xFE\x0E\x0FY\xA0\xA7E'\xC0\x02\xF4\x98A5\xF2\x8A?\x04$\x89\xC7]\x0A\xFBX\x97*\xAEN\x13#\xB3a\xD2y\xD3#_B\xAC\x05:\xAC\x92\xEAO\x08H\x88N\x1A\xB9\xDC\xFA\x11ikJ\x12!\xE8u\xCD+\x88\x98\xE3c\xCB\xD91%\x98KDN\xC6\xF2\xB7\x86o6\x91P`\x9B\xA1\x0B\x82\xEB\x15H\xA0\xC4\x85\xDF\xAC\xA1b\xD9\xA3b\xB8E\xB59_\xF4\xCCWH\x095\xE6\xBE\xF2\x19FC\x0E\xAB\xEA6\x0C\xAD5\x90/$D\xB3\x9E\x81[9j\x8A\xC4\x85\xAAA\x7Fe\xDCG8\x00\xDA\xCFi\xBDp\x18?\xF5\xA8~@\xC1\x08\xDF\xE5\xAE,\xDF0t\xCB\x92W8V\x01F\x1A./\x8D\xAF\xD8\x87\xCE\x80w*\x18Is\x17\x15\x17DI\xB4a_\N\xB77\xA7n\x16\xDF
 
IE\xEF\x9E\x8Cd7\x1B\xF9\x97\xF9E\x86\x98\x9F\x1D\xB6\x9F\x94\xF7\x8AJ\x1A\xCD\x88\xD3\xD3\xDEw\x92Q\\xF5\xC6\xD6\x11c\x81\x00\xE8\xD9'\xE1\x9D5\xFC\x11},\xB8\xB2V\xE6\xC0\xB7a/\x18~=G\xAC\x9EGxR,\x9B\x91\xA0\xE9\x85\x14J\xB3\xB2O\xEA3\xB2F\xA7vo\x88\xFEm\x18*g%\xA4l\x9B\xF0\xA5`$\xEBo\xFC?\x13s\x0D\x91y\x92\xE0u\xFA\xD1p+)\xACpi\xE3\xB4L-\x0A\xF1#\xCF\x1A\x82\x8A\xE5\xEF80\xC9\x17z@\xD1\x9AoK\xCE\xE42\x92M\xEF\x85\xBE(z\x860\xC6\x03t\x02\xA1\xD2\x09\x1E\xB3\x80t\x86|\x8E~F\xFD1i|\x84^\x07\xC9Z\xBE\x91\xA7\x06\x9B\xC7\x8F\xFB\xD4\xB84\xED\xA2\x108/X\x89\xF4W\xF6\xE3\xEE\x94Q,H\xFFo3E\xA7q\xE5\x15\x86\xCF\x0F_\xF9\xE8N\xCD}\xEB\xFD\x0E\x03EZi\x83
 
\xA7D|{]\xEE\xBA\xE4\x00RR\x1C\xFBj\x81\xF0{w\x9F\xA6F\xBB\x00\x0Cw\x01\xE1\xFE\xC8\xE8\xAC\xD18\x19,\xE9\x9E\xFE\xF0\xA45ov[K\x86UT\x00\xC0*\xEF\x9De\xE7pN[\xA5~\xF3\xDA\xAD\xE0\x85\xB0Nb\x09I_\xA8B:b\x9A\x10\xC2\xCF\xE83|\xB1\xCD\x17\xE8\x95\xA6!\xD72DR\x03\xB7\xF4\xC2\x88\xF1Rl+t\xA7x\x04\x10\xC1@\\xC3\xE5}\xDD`\xA2\x91W\x7F%S\xB7\x1
 

[18/64] [abbrv] Import initial code for MAPREDUCE-2841 (native output collector)

2014-09-12 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/InputBuffer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/InputBuffer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/InputBuffer.java
new file mode 100644
index 000..eb15164
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/InputBuffer.java
@@ -0,0 +1,121 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred.nativetask.buffer;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+public class InputBuffer {
+
+  private ByteBuffer byteBuffer;
+  private final BufferType type;
+
+  public InputBuffer(BufferType type, int inputSize) throws IOException {
+
+final int capacity = inputSize;
+this.type = type;
+
+if (capacity  0) {
+
+  switch (type) {
+  case DIRECT_BUFFER:
+this.byteBuffer = 
DirectBufferPool.getInstance().borrowBuffer(capacity);
+this.byteBuffer.order(ByteOrder.BIG_ENDIAN);
+break;
+  case HEAP_BUFFER:
+this.byteBuffer = ByteBuffer.allocate(capacity);
+this.byteBuffer.order(ByteOrder.BIG_ENDIAN);
+break;
+  }
+  byteBuffer.position(0);
+  byteBuffer.limit(0);
+}
+  }
+
+  public BufferType getType() {
+return this.type;
+  }
+
+  public InputBuffer(byte[] bytes) {
+this.type = BufferType.HEAP_BUFFER;
+if (bytes.length  0) {
+  this.byteBuffer = ByteBuffer.wrap(bytes);
+  this.byteBuffer.order(ByteOrder.BIG_ENDIAN);
+  byteBuffer.position(0);
+  byteBuffer.limit(0);
+}
+  }
+
+  public ByteBuffer getByteBuffer() {
+return this.byteBuffer;
+  }
+
+  public int length() {
+if (null == byteBuffer) {
+  return 0;
+}
+return byteBuffer.limit();
+  }
+
+  public void rewind(int startOffset, int length) {
+if (null == byteBuffer) {
+  return;
+}
+byteBuffer.position(startOffset);
+byteBuffer.limit(length);
+  }
+
+  public int remaining() {
+if (null == byteBuffer) {
+  return 0;
+}
+return byteBuffer.remaining();
+  }
+
+  public int position() {
+if (null == byteBuffer) {
+  return 0;
+}
+return byteBuffer.position();
+  }
+
+  public int position(int pos) {
+if (null == byteBuffer) {
+  return 0;
+}
+
+byteBuffer.position(pos);
+return pos;
+  }
+
+  public int capacity() {
+if (null == byteBuffer) {
+  return 0;
+}
+return byteBuffer.capacity();
+  }
+
+  public byte[] array() {
+if (null == byteBuffer) {
+  return null;
+}
+return byteBuffer.array();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2551c06/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/OutputBuffer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/OutputBuffer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/OutputBuffer.java
new file mode 100644
index 000..3c54948
--- /dev/null
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/buffer/OutputBuffer.java
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The 

[59/64] [abbrv] git commit: Merge remote-tracking branch 'apache/trunk' into MR-2841

2014-09-12 Thread todd
Merge remote-tracking branch 'apache/trunk' into MR-2841


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4b3f1e2c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4b3f1e2c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4b3f1e2c

Branch: refs/heads/trunk
Commit: 4b3f1e2ce48124b7406d77cc2ae1d0914311b0d4
Parents: 683987b c6107f5
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 5 10:47:27 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 10:47:27 2014 -0700

--
 LICENSE.txt | 290 
 NOTICE.txt  |   2 +
 README.txt  |  31 ++
 dev-support/create-release.sh   |  24 +-
 .../main/resources/assemblies/hadoop-src.xml|   8 +
 hadoop-common-project/hadoop-common/CHANGES.txt |  20 ++
 hadoop-common-project/hadoop-common/LICENSE.txt | 290 
 hadoop-common-project/hadoop-common/NOTICE.txt  |   2 -
 hadoop-common-project/hadoop-common/README.txt  |  31 --
 .../dev-support/findbugsExcludeFile.xml |   2 +-
 .../hadoop/crypto/key/KeyProviderFactory.java   |  36 +-
 .../crypto/key/kms/KMSClientProvider.java   |  57 +---
 .../hadoop/crypto/random/OsSecureRandom.java|  21 +-
 .../security/authorize/AccessControlList.java   |  12 +-
 .../DelegationTokenAuthenticationFilter.java|  15 +-
 .../DelegationTokenAuthenticationHandler.java   |   6 +-
 .../web/DelegationTokenAuthenticator.java   |  20 +-
 .../apache/hadoop/util/HttpExceptionUtils.java  | 185 ++
 .../apache/hadoop/crypto/TestCryptoCodec.java   |  69 +++-
 .../apache/hadoop/crypto/TestCryptoStreams.java |   2 +-
 .../crypto/key/TestKeyProviderFactory.java  |  13 +
 .../crypto/random/TestOsSecureRandom.java   |  15 +
 ...tionTokenAuthenticationHandlerWithMocks.java |  35 +-
 .../hadoop/util/TestHttpExceptionUtils.java | 167 +
 hadoop-common-project/hadoop-kms/pom.xml|   1 +
 .../hadoop/crypto/key/kms/server/KMS.java   |  27 +-
 .../hadoop/crypto/key/kms/server/KMSACLs.java   |  55 ++-
 .../key/kms/server/KMSExceptionsProvider.java   |  12 +-
 .../hadoop-kms/src/site/apt/index.apt.vm|  88 -
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 100 +-
 .../crypto/key/kms/server/TestKMSACLs.java  |   2 +-
 hadoop-dist/pom.xml |   3 +
 .../hadoop/fs/http/client/HttpFSFileSystem.java |  70 ++--
 .../hadoop/fs/http/client/HttpFSUtils.java  |  50 ---
 .../hadoop/lib/wsrs/ExceptionProvider.java  |  14 +-
 .../fs/http/client/BaseTestHttpFSWith.java  |   4 +-
 .../fs/http/server/TestHttpFSServerNoACLs.java  |  10 +-
 .../hadoop/hdfs/nfs/nfs3/OpenFileCtx.java   |   6 +-
 .../hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java|   2 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  25 ++
 hadoop-hdfs-project/hadoop-hdfs/LICENSE.txt | 271 ---
 hadoop-hdfs-project/hadoop-hdfs/NOTICE.txt  |   2 -
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |   3 +
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  80 -
 .../hadoop/hdfs/DistributedFileSystem.java  |  11 +-
 .../org/apache/hadoop/hdfs/inotify/Event.java   |  12 +
 .../apache/hadoop/hdfs/protocolPB/PBHelper.java |   6 +-
 .../hdfs/server/datanode/BlockPoolManager.java  |  12 +-
 .../hadoop/hdfs/server/datanode/DataNode.java   |   2 +-
 .../hdfs/server/datanode/DataStorage.java   |   2 +-
 .../hdfs/server/namenode/FSDirectory.java   |  44 ++-
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |   6 +-
 .../hdfs/server/namenode/FSEditLogLoader.java   |   8 +-
 .../hdfs/server/namenode/FSEditLogOp.java   |  18 +
 .../server/namenode/FSImageSerialization.java   |  17 +
 .../hdfs/server/namenode/FSNamesystem.java  |  60 ++--
 .../hdfs/server/namenode/INodeReference.java|   4 +-
 .../namenode/InotifyFSEditLogOpTranslator.java  |   1 +
 .../server/namenode/NameNodeLayoutVersion.java  |   4 +-
 .../snapshot/DirectorySnapshottableFeature.java |   2 +-
 .../org/apache/hadoop/hdfs/tools/GetConf.java   |   4 +-
 .../hadoop-hdfs/src/main/proto/inotify.proto|   1 +
 .../src/main/resources/hdfs-default.xml |  18 +
 .../hadoop/hdfs/TestBlocksScheduledCounter.java |  18 +-
 .../hdfs/TestDFSInotifyEventInputStream.java|   8 +-
 .../org/apache/hadoop/hdfs/TestDFSRename.java   |   6 +
 .../org/apache/hadoop/hdfs/TestDFSUtil.java |  26 ++
 .../org/apache/hadoop/hdfs/TestFileAppend4.java |   2 +-
 .../apache/hadoop/hdfs/TestFileCreation.java| 119 +++
 .../org/apache/hadoop/hdfs/TestLocalDFS.java|  29 +-
 .../server/datanode/TestBlockPoolManager.java   |  22 ++
 .../hdfs/server/namenode/CreateEditsLog.java|   2 +-
 .../hdfs/server/namenode/TestEditLog.java   |   2 +-
 .../hdfs/server/namenode/TestStartup.java

[29/64] [abbrv] git commit: Merge trunk into branch

2014-09-12 Thread todd
Merge trunk into branch


git-svn-id: 
https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1613007 
13f79535-47bb-0310-9956-ffa450edef68


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5149a8a6
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5149a8a6
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5149a8a6

Branch: refs/heads/trunk
Commit: 5149a8a6f1d2fc673a7d011d7e0e1a5407ceac4c
Parents: b2cba48 2054453
Author: Todd Lipcon t...@apache.org
Authored: Thu Jul 24 06:23:41 2014 +
Committer: Todd Lipcon t...@apache.org
Committed: Thu Jul 24 06:23:41 2014 +

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  14 +
 .../apache/hadoop/fs/ChecksumFileSystem.java|   4 +-
 .../java/org/apache/hadoop/fs/FileContext.java  |  29 ++
 .../java/org/apache/hadoop/fs/FileSystem.java   |   2 +-
 .../org/apache/hadoop/fs/viewfs/ChRootedFs.java |  34 ++
 .../apache/hadoop/fs/viewfs/ViewFileSystem.java |  34 ++
 .../org/apache/hadoop/fs/viewfs/ViewFs.java |  80 +
 .../security/alias/JavaKeyStoreProvider.java|   5 +-
 .../apache/hadoop/util/DirectBufferPool.java| 109 ++
 .../org/apache/hadoop/fs/TestDFVariations.java  |  21 +-
 .../apache/hadoop/fs/shell/TestPathData.java|  12 +-
 .../fs/viewfs/ViewFileSystemBaseTest.java   |  30 ++
 .../apache/hadoop/fs/viewfs/ViewFsBaseTest.java |  30 ++
 .../hadoop/util/TestDirectBufferPool.java   |  97 +
 .../java/org/apache/hadoop/nfs/NfsExports.java  |   7 +-
 .../hadoop/hdfs/nfs/mount/RpcProgramMountd.java |  19 +-
 .../hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java|   5 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  10 +
 .../apache/hadoop/hdfs/BlockReaderLocal.java|   2 +-
 .../hadoop/hdfs/BlockReaderLocalLegacy.java |   2 +-
 .../hadoop/hdfs/protocol/ClientProtocol.java|   2 +-
 .../protocol/datatransfer/PacketReceiver.java   |   2 +-
 .../server/datanode/BlockPoolSliceScanner.java  |  54 ++-
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |   3 +-
 .../hdfs/server/namenode/FSEditLogLoader.java   |   4 +
 .../hdfs/server/namenode/FSEditLogOp.java   |   5 +
 .../hdfs/server/namenode/FSNamesystem.java  |  62 +++-
 .../server/namenode/XAttrPermissionFilter.java  |  17 +-
 .../hadoop/hdfs/util/DirectBufferPool.java  | 107 --
 .../hdfs/web/resources/XAttrNameParam.java  |   4 +-
 .../org/apache/hadoop/hdfs/TestDFSShell.java|  69 
 .../hdfs/server/namenode/FSXAttrBaseTest.java   | 351 +--
 .../server/namenode/TestNamenodeRetryCache.java |   4 +-
 .../namenode/ha/TestRetryCacheWithHA.java   |  54 ++-
 .../hadoop/hdfs/util/TestDirectBufferPool.java  |  97 -
 .../hadoop/hdfs/web/resources/TestParam.java|   6 -
 .../hadoop-hdfs/src/test/resources/editsStored  | Bin 4970 - 4992 bytes
 .../src/test/resources/editsStored.xml  |   2 +
 hadoop-yarn-project/CHANGES.txt |  12 +
 .../dev-support/findbugs-exclude.xml|   6 +
 .../BaseContainerTokenSecretManager.java|   2 +-
 .../yarn/server/nodemanager/NodeManager.java|   6 +-
 .../recovery/NMLeveldbStateStoreService.java| 117 ++-
 .../recovery/NMNullStateStoreService.java   |  30 +-
 .../recovery/NMStateStoreService.java   |  40 ++-
 .../security/NMContainerTokenSecretManager.java | 105 +-
 .../security/NMTokenSecretManagerInNM.java  |   5 +-
 .../recovery/NMMemoryStateStoreService.java |  55 ++-
 .../TestNMLeveldbStateStoreService.java |  96 -
 .../TestNMContainerTokenSecretManager.java  | 144 
 .../security/TestNMTokenSecretManagerInNM.java  |   8 +-
 .../scheduler/fair/FairScheduler.java   |  15 +-
 .../fair/FairSchedulerConfiguration.java|   9 +
 .../security/DelegationTokenRenewer.java|   6 +-
 .../fair/TestFairSchedulerPreemption.java   |   2 +-
 .../security/TestDelegationTokenRenewer.java|  36 +-
 .../src/site/apt/FairScheduler.apt.vm   |   6 +
 .../src/site/apt/ResourceManagerRest.apt.vm | 105 +++---
 58 files changed, 1789 insertions(+), 405 deletions(-)
--




[63/64] [abbrv] git commit: MAPREDUCE-2841. Merge remote-tracking branch 'apache/MR-2841' into trunk

2014-09-12 Thread todd
MAPREDUCE-2841. Merge remote-tracking branch 'apache/MR-2841' into trunk


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8a3a327c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8a3a327c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8a3a327c

Branch: refs/heads/trunk
Commit: 8a3a327cd3c7fd1049b521325e485c59137855e8
Parents: a0ad975 b160707
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 12 18:31:53 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 12 18:31:53 2014 -0700

--
 LICENSE.txt |33 +
 .../assemblies/hadoop-mapreduce-dist.xml| 7 +
 .../CHANGES.MAPREDUCE-2841.txt  |26 +
 .../hadoop-mapreduce-client-nativetask/pom.xml  |   216 +
 .../src/CMakeLists.txt  |   276 +
 .../src/JNIFlags.cmake  |   118 +
 .../src/config.h.cmake  |23 +
 .../hadoop/mapred/nativetask/Command.java   |57 +
 .../mapred/nativetask/CommandDispatcher.java|33 +
 .../hadoop/mapred/nativetask/Constants.java |48 +
 .../hadoop/mapred/nativetask/DataChannel.java   |41 +
 .../hadoop/mapred/nativetask/DataReceiver.java  |37 +
 .../mapred/nativetask/HadoopPlatform.java   |84 +
 .../mapred/nativetask/ICombineHandler.java  |44 +
 .../mapred/nativetask/INativeComparable.java|54 +
 .../mapred/nativetask/INativeHandler.java   |53 +
 .../mapred/nativetask/NativeBatchProcessor.java |   279 +
 .../mapred/nativetask/NativeDataSource.java |47 +
 .../mapred/nativetask/NativeDataTarget.java |47 +
 .../NativeMapOutputCollectorDelegator.java  |   171 +
 .../hadoop/mapred/nativetask/NativeRuntime.java |   197 +
 .../hadoop/mapred/nativetask/Platform.java  |   100 +
 .../hadoop/mapred/nativetask/Platforms.java |79 +
 .../mapred/nativetask/StatusReportChecker.java  |99 +
 .../hadoop/mapred/nativetask/TaskContext.java   |94 +
 .../mapred/nativetask/buffer/BufferType.java|27 +
 .../nativetask/buffer/ByteBufferDataReader.java |   148 +
 .../nativetask/buffer/ByteBufferDataWriter.java |   169 +
 .../nativetask/buffer/DataInputStream.java  |27 +
 .../nativetask/buffer/DataOutputStream.java |39 +
 .../mapred/nativetask/buffer/InputBuffer.java   |   136 +
 .../mapred/nativetask/buffer/OutputBuffer.java  |76 +
 .../nativetask/handlers/BufferPullee.java   |   121 +
 .../nativetask/handlers/BufferPuller.java   |   203 +
 .../nativetask/handlers/BufferPushee.java   |   151 +
 .../nativetask/handlers/BufferPusher.java   |91 +
 .../nativetask/handlers/CombinerHandler.java|   145 +
 .../mapred/nativetask/handlers/IDataLoader.java |37 +
 .../handlers/NativeCollectorOnlyHandler.java|   171 +
 .../serde/BoolWritableSerializer.java   |35 +
 .../serde/ByteWritableSerializer.java   |35 +
 .../serde/BytesWritableSerializer.java  |48 +
 .../nativetask/serde/DefaultSerializer.java |71 +
 .../serde/DoubleWritableSerializer.java |35 +
 .../serde/FloatWritableSerializer.java  |36 +
 .../mapred/nativetask/serde/IKVSerializer.java  |48 +
 .../nativetask/serde/INativeSerializer.java |49 +
 .../nativetask/serde/IntWritableSerializer.java |35 +
 .../mapred/nativetask/serde/KVSerializer.java   |   115 +
 .../serde/LongWritableSerializer.java   |34 +
 .../nativetask/serde/NativeSerialization.java   |91 +
 .../serde/NullWritableSerializer.java   |35 +
 .../serde/SerializationFramework.java   |35 +
 .../mapred/nativetask/serde/TextSerializer.java |49 +
 .../serde/VIntWritableSerializer.java   |27 +
 .../serde/VLongWritableSerializer.java  |27 +
 .../mapred/nativetask/util/BytesUtil.java   |   172 +
 .../mapred/nativetask/util/ConfigUtil.java  |47 +
 .../nativetask/util/LocalJobOutputFiles.java|   159 +
 .../nativetask/util/NativeTaskOutput.java   |   106 +
 .../nativetask/util/NativeTaskOutputFiles.java  |   170 +
 .../mapred/nativetask/util/OutputUtil.java  |47 +
 .../mapred/nativetask/util/ReadWriteBuffer.java |   159 +
 .../mapred/nativetask/util/SizedWritable.java   |55 +
 .../src/main/native/gtest/gtest-all.cc  |  9120 
 .../src/main/native/gtest/gtest_main.cc |39 +
 .../src/main/native/gtest/include/gtest/gtest.h | 19541 +
 .../src/main/native/src/NativeTask.h|   507 +
 .../src/main/native/src/codec/BlockCodec.cc |   179 +
 .../src/main/native/src/codec/BlockCodec.h  |98 +
 .../src/main/native/src/codec/GzipCodec.cc  |   192 +
 .../src/main/native/src/codec/GzipCodec.h   |82 +
 .../src/main/native/src/codec/Lz4Codec.cc   |88

[47/64] [abbrv] git commit: MAPREDUCE-6054. native-task: Speed up tests. Contributed by Todd Lipcon.

2014-09-12 Thread todd
MAPREDUCE-6054. native-task: Speed up tests. Contributed by Todd Lipcon.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bfd1d75d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bfd1d75d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bfd1d75d

Branch: refs/heads/trunk
Commit: bfd1d75d875b6ba261fdb1825d0f151b026c2d24
Parents: fad4524
Author: Todd Lipcon t...@apache.org
Authored: Wed Aug 27 12:25:07 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Wed Aug 27 12:25:49 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  |  2 +-
 .../mapred/nativetask/util/BytesUtil.java   |  7 +-
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |  8 +-
 .../hadoop/mapred/nativetask/kvtest/KVTest.java | 94 +---
 .../mapred/nativetask/kvtest/TestInputFile.java | 13 ++-
 .../nativetask/testutil/BytesFactory.java   | 71 +--
 .../nativetask/testutil/MockValueClass.java |  8 ++
 .../mapred/nativetask/utils/TestBytesUtil.java  |  7 ++
 8 files changed, 123 insertions(+), 87 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfd1d75d/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 4dc08cb..6384757 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -16,4 +16,4 @@ MAPREDUCE-6006. native-task: add native tests to maven and 
fix bug in pom.xml (B
 MAPREDUCE-6026. native-task: fix logging (Manu Zhang via todd)
 MAPREDUCE-6035. native-task: sources/test-sources jar distribution (Manu Zhang 
via todd)
 MAPREDUCE-5977. Fix or suppress native-task gcc warnings (Manu Zhang via todd)
-
+MAPREDUCE-6054. native-task: Speed up tests (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfd1d75d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
index d90ae8d..e33b23e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/util/BytesUtil.java
@@ -23,6 +23,9 @@ import com.google.common.primitives.Longs;
 
 public class BytesUtil {
 
+  private static final char[] HEX_CHARS =
+  0123456789abcdef.toCharArray();
+
   /**
* Converts a big-endian byte array to a long value.
*
@@ -124,7 +127,9 @@ public class BytesUtil {
 ||  `~!@#$%^*()-_=+[]{}|;:'\,./?.indexOf(ch) = 0 ) {
 result.append((char)ch);
   } else {
-result.append(String.format(\\x%02X, ch));
+result.append(\\x);
+result.append(HEX_CHARS[(ch  4)  0x0F]);
+result.append(HEX_CHARS[ch  0x0F]);
   }
 }
 return result.toString();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bfd1d75d/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
index 6d683f8..3215d0b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/kvtest/KVJob.java
@@ -20,7 +20,11 @@ package org.apache.hadoop.mapred.nativetask.kvtest;
 import java.io.IOException;
 import java.util.zip.CRC32;
 
+import com.google.common.base.Stopwatch;
 import com.google.common.primitives.Longs;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import

[1/4] Lint/style fixes and removal of unused code

2014-09-05 Thread todd
Repository: hadoop
Updated Branches:
  refs/heads/MR-2841 00322161b - 19bd6659a


http://git-wip-us.apache.org/repos/asf/hadoop/blob/19bd6659/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
index 2258726..f81d94f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTest.java
@@ -74,13 +74,14 @@ public class NonSortTest {
   public void startUp() throws Exception {
 Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
 Assume.assumeTrue(NativeRuntime.isNativeLibraryLoaded());
-final ScenarioConfiguration configuration = new ScenarioConfiguration();
-configuration.addNonSortTestConf();
-final FileSystem fs = FileSystem.get(configuration);
+final ScenarioConfiguration conf = new ScenarioConfiguration();
+conf.addNonSortTestConf();
+final FileSystem fs = FileSystem.get(conf);
 final Path path = new Path(TestConstants.NATIVETASK_NONSORT_TEST_INPUTDIR);
 if (!fs.exists(path)) {
-  new 
TestInputFile(configuration.getInt(TestConstants.NATIVETASK_NONSORTTEST_FILESIZE,
 1000), Text.class.getName(),
-  Text.class.getName(), 
configuration).createSequenceTestFile(path.toString());
+  int filesize = 
conf.getInt(TestConstants.NATIVETASK_NONSORTTEST_FILESIZE, 1000);
+  new TestInputFile(filesize, Text.class.getName(),
+  Text.class.getName(), conf).createSequenceTestFile(path.toString());
 }
 fs.close();
   }
@@ -93,7 +94,8 @@ public class NonSortTest {
   }
 
 
-  private Job getJob(Configuration conf, String jobName, String inputpath, 
String outputpath) throws IOException {
+  private Job getJob(Configuration conf, String jobName,
+ String inputpath, String outputpath) throws IOException {
 final FileSystem fs = FileSystem.get(conf);
 if (fs.exists(new Path(outputpath))) {
   fs.delete(new Path(outputpath), true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19bd6659/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
index 4ca2449..4092e5f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/nonsorttest/NonSortTestMR.java
@@ -36,7 +36,8 @@ public class NonSortTestMR {
 private final Text word = new Text();
 
 @Override
-public void map(Object key, Text value, Context context) throws 
IOException, InterruptedException {
+public void map(Object key, Text value, Context context)
+  throws IOException, InterruptedException {
   final String line = value.toString();
   final StringTokenizer tokenizer = new StringTokenizer(line);
   while (tokenizer.hasMoreTokens()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/19bd6659/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/test/java/org/apache/hadoop/mapred/nativetask/serde/TestKVSerializer.java
index 004e8b8..fd5b100 100644
--- 

[3/4] Lint/style fixes and removal of unused code

2014-09-05 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/19bd6659/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
index 60bb6f5..ba026f5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
@@ -38,13 +38,6 @@ using std::pair;
 enum NativeObjectType {
   UnknownObjectType = 0,
   BatchHandlerType = 1,
-  MapperType = 2,
-  ReducerType = 3,
-  PartitionerType = 4,
-  CombinerType = 5,
-  FolderType = 6,
-  RecordReaderType = 7,
-  RecordWriterType = 8
 };
 
 /**
@@ -69,7 +62,6 @@ enum Endium {
 #define INPUT_LINE_KV_SEPERATOR 
mapreduce.input.keyvaluelinerecordreader.key.value.separator
 #define MAPRED_TEXTOUTPUT_FORMAT_SEPERATOR 
mapreduce.output.textoutputformat.separator
 #define MAPRED_WORK_OUT_DIR mapreduce.task.output.dir
-#define NATIVE_OUTPUT_FILE_NAME native.output.file.name
 #define MAPRED_COMPRESS_OUTPUT mapreduce.output.fileoutputformat.compress
 #define MAPRED_OUTPUT_COMPRESSION_CODEC 
mapreduce.output.fileoutputformat.compress.codec
 #define TOTAL_ORDER_PARTITIONER_PATH total.order.partitioner.path
@@ -386,32 +378,6 @@ public:
   virtual bool next(Buffer  key, Buffer  value) = 0;
 };
 
-class RecordReader : public KVIterator, public Configurable, public Progress {
-public:
-  virtual NativeObjectType type() {
-return RecordReaderType;
-  }
-
-  virtual bool next(Buffer  key, Buffer  value) = 0;
-
-  virtual float getProgress() = 0;
-
-  virtual void close() = 0;
-};
-
-class RecordWriter : public Collector, public Configurable {
-public:
-  virtual NativeObjectType type() {
-return RecordWriterType;
-  }
-
-  virtual void collect(const void * key, uint32_t keyLen, const void * value, 
uint32_t valueLen) {
-  }
-
-  virtual void close() {
-  }
-
-};
 
 class ProcessorBase : public Configurable {
 protected:
@@ -444,36 +410,6 @@ public:
   }
 };
 
-class Mapper : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return MapperType;
-  }
-
-  /**
-   * Map interface, default IdenticalMapper
-   */
-  virtual void map(const char * key, uint32_t keyLen, const char * value, 
uint32_t valueLen) {
-collect(key, keyLen, value, valueLen);
-  }
-};
-
-class Partitioner : public Configurable {
-public:
-  virtual NativeObjectType type() {
-return PartitionerType;
-  }
-
-  /**
-   * Partition interface
-   * @param key key buffer
-   * @param keyLen key length, can be modified to smaller value
-   *   to truncate key
-   * @return partition number
-   */
-  virtual uint32_t getPartition(const char * key, uint32_t  keyLen, uint32_t 
numPartition);
-};
-
 enum KeyGroupIterState {
   SAME_KEY,
   NEW_KEY,
@@ -502,80 +438,7 @@ public:
   virtual const char * nextValue(uint32_t  len) = 0;
 };
 
-class Reducer : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return ReducerType;
-  }
-
-  /**
-   * Reduce interface, default IdenticalReducer
-   */
-  virtual void reduce(KeyGroupIterator  input) {
-const char * key;
-const char * value;
-uint32_t keyLen;
-uint32_t valueLen;
-key = input.getKey(keyLen);
-while (NULL != (value = input.nextValue(valueLen))) {
-  collect(key, keyLen, value, valueLen);
-}
-  }
-};
-
-/**
- * Folder API used for hashtable based aggregation
- * Folder will be used in this way:
- * on(key, value):
- *   state = hashtable.get(key)
- *   if state == None:
- * size = size()
- * if size == -1:
- *   state = init(null, -1)
- * elif size  0:
- *   state = fixallocator.get(key)
- *   init(state, size)
- *   folder(state, value, value.len)
- *
- * final():
- *   for k,state in hashtable:
- * final(key, key.len, state)
- */
-class Folder : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return FolderType;
-  }
-
-  /**
-   * Get aggregator state size
-   * @return state storage size
-   * -1 size not fixed or unknown, default
-   *e.g. list map tree
-   * 0  don't need to store state
-   * 0  fixed sized state
-   *e.g. int32 int64 float.
-   */
-  virtual int32_t size() {
-return -1;
-  }
 
-  /**
-   * Create and/or init new state
-   */
-  virtual void * init(const char * key, uint32_t keyLen) {
-return NULL;
-  }
-
-  /**
-   * Aggregation function
-   */
-  virtual void folder(void * dest, const char * value, uint32_t valueLen) {
-  }
-
-  virtual void final(const char * key, uint32_t keyLen, void * 

[4/4] git commit: MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by Todd Lipcon.

2014-09-05 Thread todd
MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by 
Todd Lipcon.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/683987be
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/683987be
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/683987be

Branch: refs/heads/MR-2841
Commit: 683987be7c160e67ddb8534eeb3c464bbe2796dd
Parents: 0032216
Author: Todd Lipcon t...@apache.org
Authored: Wed Sep 3 13:07:24 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 10:44:38 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop-mapreduce-client-nativetask/LICENSE  |  22 --
 .../src/CMakeLists.txt  |   6 -
 .../hadoop/mapred/nativetask/Constants.java |  13 -
 .../hadoop/mapred/nativetask/DataReceiver.java  |   3 -
 .../mapred/nativetask/ICombineHandler.java  |   2 -
 .../mapred/nativetask/INativeHandler.java   |   8 -
 .../mapred/nativetask/NativeBatchProcessor.java |  11 -
 .../NativeMapOutputCollectorDelegator.java  |  10 +-
 .../hadoop/mapred/nativetask/NativeRuntime.java |  32 +-
 .../hadoop/mapred/nativetask/Platform.java  |  11 +-
 .../nativetask/handlers/BufferPullee.java   |   3 +-
 .../nativetask/handlers/BufferPuller.java   |   2 -
 .../nativetask/handlers/BufferPushee.java   |   6 +-
 .../nativetask/handlers/BufferPusher.java   |   3 +-
 .../nativetask/handlers/CombinerHandler.java|  16 +-
 .../mapred/nativetask/handlers/IDataLoader.java |   1 -
 .../handlers/NativeCollectorOnlyHandler.java|   6 +-
 .../serde/BytesWritableSerializer.java  |   3 +-
 .../mapred/nativetask/serde/IKVSerializer.java  |  31 +-
 .../mapred/nativetask/serde/KVSerializer.java   |   6 +-
 .../nativetask/serde/NativeSerialization.java   |  11 +-
 .../mapred/nativetask/util/BytesUtil.java   |   2 +-
 .../nativetask/util/LocalJobOutputFiles.java|  58 +---
 .../nativetask/util/NativeTaskOutput.java   |  55 +---
 .../nativetask/util/NativeTaskOutputFiles.java  |  72 ++---
 .../mapred/nativetask/util/ReadWriteBuffer.java |  16 +-
 .../src/main/native/cityhash/city.cc| 307 ---
 .../src/main/native/cityhash/city.h |  90 --
 .../src/main/native/src/NativeTask.h| 137 -
 .../src/main/native/src/codec/BlockCodec.cc |   5 +-
 .../src/main/native/src/codec/BlockCodec.h  |   2 +-
 .../src/main/native/src/codec/GzipCodec.cc  |   2 +-
 .../src/main/native/src/codec/GzipCodec.h   |   2 +-
 .../src/main/native/src/codec/Lz4Codec.cc   |   2 +-
 .../src/main/native/src/codec/Lz4Codec.h|   2 +-
 .../src/main/native/src/codec/SnappyCodec.cc|   2 +-
 .../src/main/native/src/codec/SnappyCodec.h |   2 +-
 .../native/src/handler/AbstractMapHandler.cc|   9 +-
 .../src/main/native/src/handler/BatchHandler.cc |   9 +-
 .../main/native/src/handler/CombineHandler.cc   |   4 +-
 .../main/native/src/handler/CombineHandler.h|   2 +-
 .../src/handler/MCollectorOutputHandler.cc  |   6 +-
 .../src/main/native/src/lib/BufferStream.cc | 116 +--
 .../src/main/native/src/lib/BufferStream.h  |  39 +--
 .../src/main/native/src/lib/Buffers.cc  |  77 +
 .../src/main/native/src/lib/Buffers.h   |  63 +---
 .../src/main/native/src/lib/Combiner.cc |  73 -
 .../src/main/native/src/lib/Combiner.h  |  18 +-
 .../src/main/native/src/lib/Compressions.cc |   6 +-
 .../src/main/native/src/lib/Compressions.h  |   2 +-
 .../src/main/native/src/lib/FileSystem.cc   |  23 +-
 .../src/main/native/src/lib/FileSystem.h|   2 +-
 .../src/main/native/src/lib/IFile.cc|   6 +-
 .../src/main/native/src/lib/IFile.h |  10 +-
 .../src/main/native/src/lib/Iterator.cc |   6 +-
 .../src/main/native/src/lib/Log.cc  |   2 +-
 .../main/native/src/lib/MapOutputCollector.cc   |  58 ++--
 .../main/native/src/lib/MapOutputCollector.h|  16 +-
 .../src/main/native/src/lib/MapOutputSpec.cc|   4 +-
 .../src/main/native/src/lib/MapOutputSpec.h |   4 +-
 .../src/main/native/src/lib/MemoryBlock.cc  |  24 +-
 .../src/main/native/src/lib/MemoryPool.h|   4 +-
 .../src/main/native/src/lib/Merge.cc|   5 +-
 .../src/main/native/src/lib/Merge.h |   8 +-
 .../src/main/native/src/lib/MinHeap.h   |   2 +-
 .../src/main/native/src/lib/NativeLibrary.cc|   7 +-
 .../main/native/src/lib/NativeObjectFactory.cc  |  19 +-
 .../main/native/src/lib/NativeRuntimeJniImpl.cc |  27 +-
 .../src/main/native/src/lib/NativeTask.cc   |  44 +--
 .../src/main/native/src/lib/PartitionBucket.cc  |  25 +-
 .../src/main/native/src/lib/PartitionBucket.h   |  16 +-
 .../native/src/lib/PartitionBucketIterator.cc   |  22 +-
 .../native/src/lib/PartitionBucketIterator.h

[3/4] MAPREDUCE-6069. Lint/style fixes and removal of unused code. Contributed by Todd Lipcon.

2014-09-05 Thread todd
http://git-wip-us.apache.org/repos/asf/hadoop/blob/683987be/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
index 60bb6f5..ba026f5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/src/NativeTask.h
@@ -38,13 +38,6 @@ using std::pair;
 enum NativeObjectType {
   UnknownObjectType = 0,
   BatchHandlerType = 1,
-  MapperType = 2,
-  ReducerType = 3,
-  PartitionerType = 4,
-  CombinerType = 5,
-  FolderType = 6,
-  RecordReaderType = 7,
-  RecordWriterType = 8
 };
 
 /**
@@ -69,7 +62,6 @@ enum Endium {
 #define INPUT_LINE_KV_SEPERATOR 
mapreduce.input.keyvaluelinerecordreader.key.value.separator
 #define MAPRED_TEXTOUTPUT_FORMAT_SEPERATOR 
mapreduce.output.textoutputformat.separator
 #define MAPRED_WORK_OUT_DIR mapreduce.task.output.dir
-#define NATIVE_OUTPUT_FILE_NAME native.output.file.name
 #define MAPRED_COMPRESS_OUTPUT mapreduce.output.fileoutputformat.compress
 #define MAPRED_OUTPUT_COMPRESSION_CODEC 
mapreduce.output.fileoutputformat.compress.codec
 #define TOTAL_ORDER_PARTITIONER_PATH total.order.partitioner.path
@@ -386,32 +378,6 @@ public:
   virtual bool next(Buffer  key, Buffer  value) = 0;
 };
 
-class RecordReader : public KVIterator, public Configurable, public Progress {
-public:
-  virtual NativeObjectType type() {
-return RecordReaderType;
-  }
-
-  virtual bool next(Buffer  key, Buffer  value) = 0;
-
-  virtual float getProgress() = 0;
-
-  virtual void close() = 0;
-};
-
-class RecordWriter : public Collector, public Configurable {
-public:
-  virtual NativeObjectType type() {
-return RecordWriterType;
-  }
-
-  virtual void collect(const void * key, uint32_t keyLen, const void * value, 
uint32_t valueLen) {
-  }
-
-  virtual void close() {
-  }
-
-};
 
 class ProcessorBase : public Configurable {
 protected:
@@ -444,36 +410,6 @@ public:
   }
 };
 
-class Mapper : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return MapperType;
-  }
-
-  /**
-   * Map interface, default IdenticalMapper
-   */
-  virtual void map(const char * key, uint32_t keyLen, const char * value, 
uint32_t valueLen) {
-collect(key, keyLen, value, valueLen);
-  }
-};
-
-class Partitioner : public Configurable {
-public:
-  virtual NativeObjectType type() {
-return PartitionerType;
-  }
-
-  /**
-   * Partition interface
-   * @param key key buffer
-   * @param keyLen key length, can be modified to smaller value
-   *   to truncate key
-   * @return partition number
-   */
-  virtual uint32_t getPartition(const char * key, uint32_t  keyLen, uint32_t 
numPartition);
-};
-
 enum KeyGroupIterState {
   SAME_KEY,
   NEW_KEY,
@@ -502,80 +438,7 @@ public:
   virtual const char * nextValue(uint32_t  len) = 0;
 };
 
-class Reducer : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return ReducerType;
-  }
-
-  /**
-   * Reduce interface, default IdenticalReducer
-   */
-  virtual void reduce(KeyGroupIterator  input) {
-const char * key;
-const char * value;
-uint32_t keyLen;
-uint32_t valueLen;
-key = input.getKey(keyLen);
-while (NULL != (value = input.nextValue(valueLen))) {
-  collect(key, keyLen, value, valueLen);
-}
-  }
-};
-
-/**
- * Folder API used for hashtable based aggregation
- * Folder will be used in this way:
- * on(key, value):
- *   state = hashtable.get(key)
- *   if state == None:
- * size = size()
- * if size == -1:
- *   state = init(null, -1)
- * elif size  0:
- *   state = fixallocator.get(key)
- *   init(state, size)
- *   folder(state, value, value.len)
- *
- * final():
- *   for k,state in hashtable:
- * final(key, key.len, state)
- */
-class Folder : public ProcessorBase {
-public:
-  virtual NativeObjectType type() {
-return FolderType;
-  }
-
-  /**
-   * Get aggregator state size
-   * @return state storage size
-   * -1 size not fixed or unknown, default
-   *e.g. list map tree
-   * 0  don't need to store state
-   * 0  fixed sized state
-   *e.g. int32 int64 float.
-   */
-  virtual int32_t size() {
-return -1;
-  }
 
-  /**
-   * Create and/or init new state
-   */
-  virtual void * init(const char * key, uint32_t keyLen) {
-return NULL;
-  }
-
-  /**
-   * Aggregation function
-   */
-  virtual void folder(void * dest, const char * value, uint32_t valueLen) {
-  }
-
-  virtual void final(const char * key, uint32_t keyLen, void * 

[08/24] git commit: HDFS-6848. Lack of synchronization on access to datanodeUuid in DataStorage#format. (Contributed by Xiaoyu Yao)

2014-09-05 Thread todd
HDFS-6848. Lack of synchronization on access to datanodeUuid in 
DataStorage#format. (Contributed by Xiaoyu Yao)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3a0142bd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3a0142bd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3a0142bd

Branch: refs/heads/MR-2841
Commit: 3a0142bd773ab11bfb4de76ef6f449d9852732a9
Parents: a18424e
Author: arp a...@apache.org
Authored: Wed Sep 3 17:17:06 2014 -0700
Committer: arp a...@apache.org
Committed: Wed Sep 3 17:17:06 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt   | 3 +++
 .../java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java  | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3a0142bd/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index c33a0d2..555f294 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -587,6 +587,9 @@ Release 2.6.0 - UNRELEASED
 
 HDFS-6942. Fix typos in log messages. (Ray Chiang via wheat9)
 
+  HDFS-6848. Lack of synchronization on access to datanodeUuid in
+  DataStorage#format(). (Xiaoyu Yao via Arpit Agarwal)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating  deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3a0142bd/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
index ceb2aa0..f382a9e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataStorage.java
@@ -463,7 +463,7 @@ public class DataStorage extends Storage {
 this.clusterID = nsInfo.getClusterID();
 this.namespaceID = nsInfo.getNamespaceID();
 this.cTime = 0;
-this.datanodeUuid = datanodeUuid;
+setDatanodeUuid(datanodeUuid);
 
 if (sd.getStorageUuid() == null) {
   // Assign a new Storage UUID.



[07/24] git commit: MAPREDUCE-6063. Correct spill size calculation for spills wrapping the circular buffer. Contributed by zhihai xu.

2014-09-05 Thread todd
MAPREDUCE-6063. Correct spill size calculation for spills wrapping the circular 
buffer. Contributed by zhihai xu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a18424e7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a18424e7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a18424e7

Branch: refs/heads/MR-2841
Commit: a18424e724982d01e0191ca7db713da1d43d1e4d
Parents: d9a03e2
Author: Chris Douglas cdoug...@apache.org
Authored: Wed Sep 3 14:32:35 2014 -0700
Committer: Chris Douglas cdoug...@apache.org
Committed: Wed Sep 3 17:01:59 2014 -0700

--
 hadoop-mapreduce-project/CHANGES.txt | 3 +++
 .../src/main/java/org/apache/hadoop/mapred/MapTask.java  | 4 +---
 2 files changed, 4 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a18424e7/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index 63bc08b..0ee0731 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -267,6 +267,9 @@ Release 2.6.0 - UNRELEASED
 MAPREDUCE-5931. Validate SleepJob command line parameters (Gera Shegalov
 via jlowe)
 
+MAPREDUCE-6063. Correct spill size calculation for spills wrapping the
+circular buffer. (zhihai xu via cdouglas)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a18424e7/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
index dfcbe09..75b4141 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
@@ -1575,9 +1575,7 @@ public class MapTask extends Task {
InterruptedException {
   //approximate the length of the output file to be the length of the
   //buffer + header lengths for the partitions
-  final long size = (bufend = bufstart
-  ? bufend - bufstart
-  : (bufvoid - bufend) + bufstart) +
+  final long size = distanceTo(bufstart, bufend, bufvoid) +
   partitions * APPROX_HEADER_LENGTH;
   FSDataOutputStream out = null;
   try {



[09/24] git commit: HDFS-6959 Make the HDFS home directory location customizable. Contributed by Yongjun Zhang

2014-09-05 Thread todd
HDFS-6959 Make the HDFS home directory location customizable.  Contributed by 
Yongjun Zhang


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f4caedfc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f4caedfc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f4caedfc

Branch: refs/heads/MR-2841
Commit: f4caedfcbfeae7e2fe7c0e812ddbb087608a5ffd
Parents: 3a0142b
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Wed Sep 3 19:30:30 2014 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Wed Sep 3 19:30:30 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |  2 ++
 .../hadoop/hdfs/DistributedFileSystem.java  | 11 ++--
 .../src/main/resources/hdfs-default.xml |  8 ++
 .../org/apache/hadoop/hdfs/TestLocalDFS.java| 29 +++-
 5 files changed, 49 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4caedfc/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 555f294..8498b00 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -435,6 +435,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-4257. The ReplaceDatanodeOnFailure policies could have a forgiving
 option (szetszwo via cmccabe)
 
+HDFS-6959. Make the HDFS home directory location customizable. (yzhang via
+cmccabe)
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4caedfc/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index ace2ae9..b5b4f3c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -41,6 +41,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final int DFS_STREAM_BUFFER_SIZE_DEFAULT = 4096;
   public static final String  DFS_BYTES_PER_CHECKSUM_KEY = 
dfs.bytes-per-checksum;
   public static final int DFS_BYTES_PER_CHECKSUM_DEFAULT = 512;
+  public static final String  DFS_USER_HOME_DIR_PREFIX_KEY = 
dfs.user.home.dir.prefix;
+  public static final String  DFS_USER_HOME_DIR_PREFIX_DEFAULT = /user;
   public static final String  DFS_CLIENT_RETRY_POLICY_ENABLED_KEY = 
dfs.client.retry.policy.enabled;
   public static final boolean DFS_CLIENT_RETRY_POLICY_ENABLED_DEFAULT = false; 
   public static final String  DFS_CLIENT_RETRY_POLICY_SPEC_KEY = 
dfs.client.retry.policy.spec;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f4caedfc/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
index fc4bd84..bf7d62e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
@@ -102,6 +102,8 @@ import com.google.common.base.Preconditions;
 public class DistributedFileSystem extends FileSystem {
   private Path workingDir;
   private URI uri;
+  private String homeDirPrefix =
+  DFSConfigKeys.DFS_USER_HOME_DIR_PREFIX_DEFAULT;
 
   DFSClient dfs;
   private boolean verifyChecksum = true;
@@ -136,7 +138,10 @@ public class DistributedFileSystem extends FileSystem {
 if (host == null) {
   throw new IOException(Incomplete HDFS URI, no host: + uri);
 }
-
+homeDirPrefix = conf.get(
+DFSConfigKeys.DFS_USER_HOME_DIR_PREFIX_KEY,
+DFSConfigKeys.DFS_USER_HOME_DIR_PREFIX_DEFAULT);
+
 this.dfs = new DFSClient(uri, conf, statistics);
 this.uri = URI.create(uri.getScheme()+://+uri.getAuthority());
 this.workingDir = getHomeDirectory();
@@ -167,10 +172,10 @@ public class DistributedFileSystem extends FileSystem {
 workingDir = fixRelativePart(dir);
   }
 
-  
   @Override
   public Path getHomeDirectory() {
-return makeQualified(new Path(/user/ + 

[11/24] git commit: HADOOP-11054. Add a KeyProvider instantiation based on a URI. (tucu)

2014-09-05 Thread todd
HADOOP-11054. Add a KeyProvider instantiation based on a URI. (tucu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/41f1662d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/41f1662d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/41f1662d

Branch: refs/heads/MR-2841
Commit: 41f1662d467ec0b295b742bb80c87482504fbf25
Parents: 8f1a668
Author: Alejandro Abdelnur t...@apache.org
Authored: Thu Sep 4 09:08:31 2014 -0700
Committer: Alejandro Abdelnur t...@apache.org
Committed: Thu Sep 4 09:09:39 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 ++
 .../hadoop/crypto/key/KeyProviderFactory.java   | 36 ++--
 .../crypto/key/TestKeyProviderFactory.java  | 13 +++
 3 files changed, 41 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/41f1662d/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index e8d0f52..2e04917 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -496,6 +496,8 @@ Release 2.6.0 - UNRELEASED
 HADOOP-10863. KMS should have a blacklist for decrypting EEKs. 
 (asuresh via tucu)
 
+HADOOP-11054. Add a KeyProvider instantiation based on a URI. (tucu)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/41f1662d/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
index 9855bc8..6ca0425 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderFactory.java
@@ -63,16 +63,10 @@ public abstract class KeyProviderFactory {
 for(String path: conf.getStringCollection(KEY_PROVIDER_PATH)) {
   try {
 URI uri = new URI(path);
-boolean found = false;
-for(KeyProviderFactory factory: serviceLoader) {
-  KeyProvider kp = factory.createProvider(uri, conf);
-  if (kp != null) {
-result.add(kp);
-found = true;
-break;
-  }
-}
-if (!found) {
+KeyProvider kp = get(uri, conf);
+if (kp != null) {
+  result.add(kp);
+} else {
   throw new IOException(No KeyProviderFactory for  + uri +  in  +
   KEY_PROVIDER_PATH);
 }
@@ -83,4 +77,26 @@ public abstract class KeyProviderFactory {
 }
 return result;
   }
+
+  /**
+   * Create a KeyProvider based on a provided URI.
+   *
+   * @param uri key provider URI
+   * @param conf configuration to initialize the key provider
+   * @return the key provider for the specified URI, or codeNULL/code if
+   * a provider for the specified URI scheme could not be found.
+   * @throws IOException thrown if the provider failed to initialize.
+   */
+  public static KeyProvider get(URI uri, Configuration conf)
+  throws IOException {
+KeyProvider kp = null;
+for (KeyProviderFactory factory : serviceLoader) {
+  kp = factory.createProvider(uri, conf);
+  if (kp != null) {
+break;
+  }
+}
+return kp;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/41f1662d/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
index d72ac51..8c4c7b3 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyProviderFactory.java
@@ -357,4 +357,17 @@ public class TestKeyProviderFactory {
 }
   }
 
+  @Test
+  public void testGetProviderViaURI() throws Exception {
+Configuration conf = new Configuration(false);
+URI uri = new URI(JavaKeyStoreProvider.SCHEME_NAME + ://file + tmpDir +
+/test.jks);
+KeyProvider kp

[01/24] git commit: HDFS-6942. Fix typos in log messages. Contributed by Ray Chiang.

2014-09-05 Thread todd
Repository: hadoop
Updated Branches:
  refs/heads/MR-2841 683987be7 - 4b3f1e2ce


HDFS-6942. Fix typos in log messages. Contributed by Ray Chiang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/08a9ac70
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/08a9ac70
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/08a9ac70

Branch: refs/heads/MR-2841
Commit: 08a9ac7098cb4ae684f40cf2513e3137110cc7e4
Parents: 727331b
Author: Haohui Mai whe...@apache.org
Authored: Tue Sep 2 16:22:18 2014 -0700
Committer: Haohui Mai whe...@apache.org
Committed: Tue Sep 2 16:22:18 2014 -0700

--
 .../main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java | 6 +++---
 .../java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java   | 2 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt| 2 ++
 .../java/org/apache/hadoop/hdfs/server/datanode/DataNode.java  | 2 +-
 .../src/test/java/org/apache/hadoop/hdfs/TestFileAppend4.java  | 2 +-
 .../org/apache/hadoop/hdfs/server/namenode/TestStartup.java| 2 +-
 6 files changed, 9 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/08a9ac70/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
 
b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
index cf44af5..ede65c6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java
@@ -421,7 +421,7 @@ class OpenFileCtx {
   if (existantWriteCtx != null) {
 if (!existantWriteCtx.getReplied()) {
   if (LOG.isDebugEnabled()) {
-LOG.debug(Repeated write request which hasn't be served: xid=
+LOG.debug(Repeated write request which hasn't been served: xid=
 + xid + , drop it.);
   }
 } else {
@@ -579,7 +579,7 @@ class OpenFileCtx {
* writing, and there is no other threads writing (i.e., asyncStatus is
* false), start the writing and set asyncStatus to true.
* 
-   * @return True if the new write is sequencial and we can start writing
+   * @return True if the new write is sequential and we can start writing
* (including the case that there is already a thread writing).
*/
   private synchronized boolean checkAndStartWrite(
@@ -898,7 +898,7 @@ class OpenFileCtx {
   long offset = nextOffset.get();
   if (range.getMin()  offset) {
 if (LOG.isDebugEnabled()) {
-  LOG.debug(The next sequencial write has not arrived yet);
+  LOG.debug(The next sequential write has not arrived yet);
 }
 processCommits(nextOffset.get()); // handle race
 this.asyncStatus = false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/08a9ac70/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
 
b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
index 33dc3a3..0d591d6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java
@@ -1423,7 +1423,7 @@ public class RpcProgramNfs3 extends RpcProgram implements 
Nfs3Interface {
 throw io;
   }
   // This happens when startAfter was just deleted
-  LOG.info(Cookie cound't be found:  + new String(startAfter)
+  LOG.info(Cookie couldn't be found:  + new String(startAfter)
   + , do listing from beginning);
   dlisting = dfsClient
   .listPaths(dirFileIdPath, HdfsFileStatus.EMPTY_NAME);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/08a9ac70/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index ae66b0d..2258008 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -585,6 +585,8 @@ Release 2.6.0 - UNRELEASED
 HDFS-6972. TestRefreshUserMappings.testRefreshSuperUserGroupsConfiguration
 doesn't decode url correctly. (Yongjun Zhang via wang)
 
+HDFS-6942. Fix 

[18/24] git commit: YARN-2511. Allowed all origins by default when CrossOriginFilter is enabled. Contributed by Jonathan Eagles.

2014-09-05 Thread todd
YARN-2511. Allowed all origins by default when CrossOriginFilter is enabled. 
Contributed by Jonathan Eagles.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/51a4faf5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/51a4faf5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/51a4faf5

Branch: refs/heads/MR-2841
Commit: 51a4faf52199b181471d591a9fde900fc166fc48
Parents: 3fa5f72
Author: Zhijie Shen zjs...@apache.org
Authored: Thu Sep 4 15:01:01 2014 -0700
Committer: Zhijie Shen zjs...@apache.org
Committed: Thu Sep 4 15:01:01 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt  |  3 +++
 .../server/timeline/webapp/CrossOriginFilter.java| 10 +++---
 .../timeline/webapp/TestCrossOriginFilter.java   | 15 +++
 3 files changed, 25 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/51a4faf5/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index f6f5c4b..1a5ea07 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -178,6 +178,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2509. Enable Cross Origin Filter for timeline server only and not all
 Yarn servers (Mit Desai via jeagles)
 
+YARN-2511. Allowed all origins by default when CrossOriginFilter is
+enabled. (Jonathan Eagles via zjshen)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/51a4faf5/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
index a9fb3e8..d71175f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
@@ -76,6 +76,7 @@ public class CrossOriginFilter implements Filter {
   private ListString allowedMethods = new ArrayListString();
   private ListString allowedHeaders = new ArrayListString();
   private ListString allowedOrigins = new ArrayListString();
+  private boolean allowAllOrigins = true;
   private String maxAge;
 
   @Override
@@ -171,7 +172,9 @@ public class CrossOriginFilter implements Filter {
 }
 allowedOrigins =
 Arrays.asList(allowedOriginsConfig.trim().split(\\s*,\\s*));
+allowAllOrigins = allowedOrigins.contains(*);
 LOG.info(Allowed Origins:  + StringUtils.join(allowedOrigins, ','));
+LOG.info(Allow All Origins:  + allowAllOrigins);
   }
 
   private void initializeMaxAge(FilterConfig filterConfig) {
@@ -199,8 +202,9 @@ public class CrossOriginFilter implements Filter {
 return origin != null;
   }
 
-  private boolean isOriginAllowed(String origin) {
-return allowedOrigins.contains(origin);
+  @VisibleForTesting
+  boolean isOriginAllowed(String origin) {
+return allowAllOrigins || allowedOrigins.contains(origin);
   }
 
   private boolean areHeadersAllowed(String accessControlRequestHeaders) {
@@ -213,7 +217,7 @@ public class CrossOriginFilter implements Filter {
 
   private boolean isMethodAllowed(String accessControlRequestMethod) {
 if (accessControlRequestMethod == null) {
-  return false;
+  return true;
 }
 return allowedMethods.contains(accessControlRequestMethod);
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/51a4faf5/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
index a29e4a0..f666c21 

[02/24] git commit: HDFS-2975. Rename with overwrite flag true can make NameNode to stuck in safemode on NN (crash + restart). (Yi Liu via umamahesh)

2014-09-05 Thread todd
HDFS-2975. Rename with overwrite flag true can make NameNode to stuck in 
safemode on NN (crash + restart). (Yi Liu via umamahesh)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3425ae5d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3425ae5d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3425ae5d

Branch: refs/heads/MR-2841
Commit: 3425ae5d7eaa27b2526d0e0c07bdfea9440359f8
Parents: 08a9ac7
Author: Uma Maheswara Rao G umamah...@apache.org
Authored: Wed Sep 3 18:53:51 2014 +0530
Committer: Uma Maheswara Rao G umamah...@apache.org
Committed: Wed Sep 3 18:53:51 2014 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../hdfs/server/namenode/FSDirectory.java   | 39 
 .../hdfs/server/namenode/FSNamesystem.java  | 15 ++--
 .../org/apache/hadoop/hdfs/TestDFSRename.java   |  6 +++
 4 files changed, 52 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3425ae5d/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 2258008..c33a0d2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -680,6 +680,9 @@ Release 2.6.0 - UNRELEASED
 
   HDFS-6954. With crypto, no native lib systems are too verbose. (clamb 
via wang)
 
+  HDFS-2975. Rename with overwrite flag true can make NameNode to stuck in 
safemode 
+  on NN (crash + restart). (Yi Liu via umamahesh)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3425ae5d/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
index 54e3181..1fa22a2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
@@ -452,7 +452,7 @@ public class FSDirectory implements Closeable {
* @see #unprotectedRenameTo(String, String, long, Options.Rename...)
*/
   void renameTo(String src, String dst, long mtime,
-  Options.Rename... options)
+  BlocksMapUpdateInfo collectedBlocks, Options.Rename... options)
   throws FileAlreadyExistsException, FileNotFoundException,
   ParentNotDirectoryException, QuotaExceededException,
   UnresolvedLinkException, IOException {
@@ -462,7 +462,7 @@ public class FSDirectory implements Closeable {
 }
 writeLock();
 try {
-  if (unprotectedRenameTo(src, dst, mtime, options)) {
+  if (unprotectedRenameTo(src, dst, mtime, collectedBlocks, options)) {
 namesystem.incrDeletedFileCount(1);
   }
 } finally {
@@ -569,18 +569,44 @@ public class FSDirectory implements Closeable {
 
   /**
* Rename src to dst.
+   * br
+   * Note: This is to be used by {@link FSEditLog} only.
+   * br
+   * 
+   * @param src source path
+   * @param dst destination path
+   * @param timestamp modification time
+   * @param options Rename options
+   */
+  boolean unprotectedRenameTo(String src, String dst, long timestamp,
+  Options.Rename... options) throws FileAlreadyExistsException, 
+  FileNotFoundException, ParentNotDirectoryException, 
+  QuotaExceededException, UnresolvedLinkException, IOException {
+BlocksMapUpdateInfo collectedBlocks = new BlocksMapUpdateInfo();
+boolean ret = unprotectedRenameTo(src, dst, timestamp, 
+collectedBlocks, options);
+if (!collectedBlocks.getToDeleteList().isEmpty()) {
+  getFSNamesystem().removeBlocksAndUpdateSafemodeTotal(collectedBlocks);
+}
+return ret;
+  }
+  
+  /**
+   * Rename src to dst.
* See {@link DistributedFileSystem#rename(Path, Path, Options.Rename...)}
* for details related to rename semantics and exceptions.
* 
* @param src source path
* @param dst destination path
* @param timestamp modification time
+   * @param collectedBlocks blocks to be removed
* @param options Rename options
*/
   boolean unprotectedRenameTo(String src, String dst, long timestamp,
-  Options.Rename... options) throws FileAlreadyExistsException,
-  FileNotFoundException, ParentNotDirectoryException,
-  QuotaExceededException, UnresolvedLinkException, IOException {
+  BlocksMapUpdateInfo collectedBlocks, 

[20/24] git commit: HDFS-6886. Use single editlog record for creating file + overwrite. Contributed by Yi Liu.

2014-09-05 Thread todd
HDFS-6886. Use single editlog record for creating file + overwrite. Contributed 
by Yi Liu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/61045203
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/61045203
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/61045203

Branch: refs/heads/MR-2841
Commit: 6104520369045dfaa4b543cbad21236ed322249b
Parents: f7df24b
Author: Jing Zhao j...@hortonworks.com
Authored: Thu Sep 4 18:54:38 2014 -0700
Committer: Jing Zhao j...@hortonworks.com
Committed: Thu Sep 4 18:54:38 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../org/apache/hadoop/hdfs/inotify/Event.java   |  12 ++
 .../apache/hadoop/hdfs/protocolPB/PBHelper.java |   6 +-
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |   6 +-
 .../hdfs/server/namenode/FSEditLogLoader.java   |   8 +-
 .../hdfs/server/namenode/FSEditLogOp.java   |  18 ++
 .../server/namenode/FSImageSerialization.java   |  17 ++
 .../hdfs/server/namenode/FSNamesystem.java  |  45 +++--
 .../namenode/InotifyFSEditLogOpTranslator.java  |   1 +
 .../server/namenode/NameNodeLayoutVersion.java  |   4 +-
 .../hadoop-hdfs/src/main/proto/inotify.proto|   1 +
 .../hdfs/TestDFSInotifyEventInputStream.java|   8 +-
 .../apache/hadoop/hdfs/TestFileCreation.java| 119 
 .../hdfs/server/namenode/CreateEditsLog.java|   2 +-
 .../hdfs/server/namenode/TestEditLog.java   |   2 +-
 .../hadoop-hdfs/src/test/resources/editsStored  | Bin 4992 - 5252 bytes
 .../src/test/resources/editsStored.xml  | 182 ---
 17 files changed, 339 insertions(+), 95 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/61045203/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 3e00eba..8964d2d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -438,6 +438,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6959. Make the HDFS home directory location customizable. (yzhang via
 cmccabe)
 
+HDFS-6886. Use single editlog record for creating file + overwrite. (Yi Liu
+via jing9)
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/61045203/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/inotify/Event.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/inotify/Event.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/inotify/Event.java
index c7129ca..e8a34e7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/inotify/Event.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/inotify/Event.java
@@ -100,6 +100,7 @@ public abstract class Event {
 private String groupName;
 private FsPermission perms;
 private String symlinkTarget;
+private boolean overwrite;
 
 public static class Builder {
   private INodeType iNodeType;
@@ -110,6 +111,7 @@ public abstract class Event {
   private String groupName;
   private FsPermission perms;
   private String symlinkTarget;
+  private boolean overwrite;
 
   public Builder iNodeType(INodeType type) {
 this.iNodeType = type;
@@ -150,6 +152,11 @@ public abstract class Event {
 this.symlinkTarget = symlinkTarget;
 return this;
   }
+  
+  public Builder overwrite(boolean overwrite) {
+this.overwrite = overwrite;
+return this;
+  }
 
   public CreateEvent build() {
 return new CreateEvent(this);
@@ -166,6 +173,7 @@ public abstract class Event {
   this.groupName = b.groupName;
   this.perms = b.perms;
   this.symlinkTarget = b.symlinkTarget;
+  this.overwrite = b.overwrite;
 }
 
 public INodeType getiNodeType() {
@@ -208,6 +216,10 @@ public abstract class Event {
 public String getSymlinkTarget() {
   return symlinkTarget;
 }
+
+public boolean getOverwrite() {
+  return overwrite;
+}
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/61045203/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
index 38ba7db..193b826 100644
--- 

[05/24] git commit: YARN-2394. FairScheduler: Configure fairSharePreemptionThreshold per queue. (Wei Yan via kasha)

2014-09-05 Thread todd
YARN-2394. FairScheduler: Configure fairSharePreemptionThreshold per queue. 
(Wei Yan via kasha)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1dcaba9a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1dcaba9a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1dcaba9a

Branch: refs/heads/MR-2841
Commit: 1dcaba9a7aa27f7ca4ba693e3abb56ab3c59c8a7
Parents: ce04621
Author: Karthik Kambatla ka...@apache.org
Authored: Wed Sep 3 10:27:36 2014 -0700
Committer: Karthik Kambatla ka...@apache.org
Committed: Wed Sep 3 10:27:36 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |   3 +
 .../scheduler/fair/AllocationConfiguration.java |  22 ++-
 .../fair/AllocationFileLoaderService.java   |  48 -
 .../scheduler/fair/FSLeafQueue.java |  54 -
 .../scheduler/fair/FSParentQueue.java   |   6 +-
 .../resourcemanager/scheduler/fair/FSQueue.java |  23 ++-
 .../scheduler/fair/FairScheduler.java   |  60 ++
 .../scheduler/fair/QueueManager.java|  32 +--
 .../fair/TestAllocationFileLoaderService.java   |  45 -
 .../scheduler/fair/TestFSLeafQueue.java | 198 ---
 .../scheduler/fair/TestFairScheduler.java   | 158 ---
 .../src/site/apt/FairScheduler.apt.vm   |  10 +
 12 files changed, 412 insertions(+), 247 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1dcaba9a/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index a6a1b9b3..64ccd28 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -61,6 +61,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2395. FairScheduler: Preemption timeout should be configurable per 
 queue. (Wei Yan via kasha)
 
+YARN-2394. FairScheduler: Configure fairSharePreemptionThreshold per queue.
+(Wei Yan via kasha)
+
   IMPROVEMENTS
 
 YARN-2197. Add a link to YARN CHANGES.txt in the left side of doc

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1dcaba9a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java
index 228a761..de5a999 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/fair/AllocationConfiguration.java
@@ -70,6 +70,12 @@ public class AllocationConfiguration {
   // allowed to preempt other jobs' tasks.
   private final MapString, Long fairSharePreemptionTimeouts;
 
+  // The fair share preemption threshold for each queue. If a queue waits
+  // fairSharePreemptionTimeout without receiving
+  // fairshare * fairSharePreemptionThreshold resources, it is allowed to
+  // preempt other queues' tasks.
+  private final MapString, Float fairSharePreemptionThresholds;
+
   private final MapString, SchedulingPolicy schedulingPolicies;
   
   private final SchedulingPolicy defaultSchedulingPolicy;
@@ -92,6 +98,7 @@ public class AllocationConfiguration {
   SchedulingPolicy defaultSchedulingPolicy,
   MapString, Long minSharePreemptionTimeouts,
   MapString, Long fairSharePreemptionTimeouts,
+  MapString, Float fairSharePreemptionThresholds,
   MapString, MapQueueACL, AccessControlList queueAcls,
   QueuePlacementPolicy placementPolicy,
   MapFSQueueType, SetString configuredQueues) {
@@ -108,6 +115,7 @@ public class AllocationConfiguration {
 this.schedulingPolicies = schedulingPolicies;
 this.minSharePreemptionTimeouts = minSharePreemptionTimeouts;
 this.fairSharePreemptionTimeouts = fairSharePreemptionTimeouts;
+this.fairSharePreemptionThresholds = fairSharePreemptionThresholds;
 this.queueAcls = queueAcls;
 this.placementPolicy = placementPolicy;
 this.configuredQueues = configuredQueues;
@@ -126,6 +134,7 @@ public class AllocationConfiguration {
 queueAcls = new HashMapString, MapQueueACL, 

[19/24] git commit: HDFS-6996. SnapshotDiff report can hit IndexOutOfBoundsException when there are nested renamed directory/file. Contributed by Jing Zhao.

2014-09-05 Thread todd
HDFS-6996. SnapshotDiff report can hit IndexOutOfBoundsException when there are 
nested renamed directory/file. Contributed by Jing Zhao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f7df24bb
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f7df24bb
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f7df24bb

Branch: refs/heads/MR-2841
Commit: f7df24bb1b4278d27f5c2b7fecf7b69be4302c7a
Parents: 51a4faf
Author: Jing Zhao j...@hortonworks.com
Authored: Thu Sep 4 16:12:44 2014 -0700
Committer: Jing Zhao j...@hortonworks.com
Committed: Thu Sep 4 16:12:44 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  7 ++--
 .../hdfs/server/namenode/FSDirectory.java   |  5 +--
 .../hdfs/server/namenode/INodeReference.java|  4 +--
 .../snapshot/DirectorySnapshottableFeature.java |  2 +-
 .../snapshot/TestSnapshotDiffReport.java| 36 
 5 files changed, 47 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f7df24bb/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 27b97cf..3e00eba 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -590,8 +590,11 @@ Release 2.6.0 - UNRELEASED
 
 HDFS-6942. Fix typos in log messages. (Ray Chiang via wheat9)
 
-  HDFS-6848. Lack of synchronization on access to datanodeUuid in
-  DataStorage#format(). (Xiaoyu Yao via Arpit Agarwal)
+HDFS-6848. Lack of synchronization on access to datanodeUuid in
+DataStorage#format(). (Xiaoyu Yao via Arpit Agarwal)
+
+HDFS-6996. SnapshotDiff report can hit IndexOutOfBoundsException when there
+are nested renamed directory/file. (jing9)
 
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f7df24bb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
index 1fa22a2..7b21f73 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java
@@ -428,7 +428,8 @@ public class FSDirectory implements Closeable {
   /**
* @throws SnapshotAccessControlException 
* @see #unprotectedRenameTo(String, String, long)
-   * @deprecated Use {@link #renameTo(String, String, boolean, Rename...)}
+   * @deprecated Use {@link #renameTo(String, String, long,
+   *  BlocksMapUpdateInfo, Rename...)}
*/
   @Deprecated
   boolean renameTo(String src, String dst, long mtime)
@@ -479,7 +480,7 @@ public class FSDirectory implements Closeable {
* @throws QuotaExceededException if the operation violates any quota limit
* @throws FileAlreadyExistsException if the src is a symlink that points to 
dst
* @throws SnapshotAccessControlException if path is in RO snapshot
-   * @deprecated See {@link #renameTo(String, String, boolean, Rename...)}
+   * @deprecated See {@link #renameTo(String, String, long, 
BlocksMapUpdateInfo, Rename...)}
*/
   @Deprecated
   boolean unprotectedRenameTo(String src, String dst, long timestamp)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f7df24bb/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
index 05e144d..9bd2ad0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeReference.java
@@ -28,7 +28,6 @@ import org.apache.hadoop.fs.permission.PermissionStatus;
 import org.apache.hadoop.hdfs.protocol.QuotaExceededException;
 import 
org.apache.hadoop.hdfs.server.namenode.snapshot.DirectoryWithSnapshotFeature;
 import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot;
-import org.apache.hadoop.hdfs.server.namenode.XAttrFeature;
 
 import 

[16/24] git commit: HADOOP-11063. KMS cannot deploy on Windows, because class names are too long. Contributed by Chris Nauroth.

2014-09-05 Thread todd
HADOOP-11063. KMS cannot deploy on Windows, because class names are too long. 
Contributed by Chris Nauroth.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b44b2ee4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b44b2ee4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b44b2ee4

Branch: refs/heads/MR-2841
Commit: b44b2ee4adb78723c221a7da8fd35ed011d0905c
Parents: 1a09536
Author: cnauroth cnaur...@apache.org
Authored: Thu Sep 4 11:47:18 2014 -0700
Committer: cnauroth cnaur...@apache.org
Committed: Thu Sep 4 11:47:18 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++
 hadoop-common-project/hadoop-kms/pom.xml| 1 +
 2 files changed, 4 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b44b2ee4/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index f610c5d..d38fae9 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -754,6 +754,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11012. hadoop fs -text of zero-length file causes EOFException
 (Eric Payne via jlowe)
 
+HADOOP-11063. KMS cannot deploy on Windows, because class names are too 
long.
+(cnauroth)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b44b2ee4/hadoop-common-project/hadoop-kms/pom.xml
--
diff --git a/hadoop-common-project/hadoop-kms/pom.xml 
b/hadoop-common-project/hadoop-kms/pom.xml
index b1ca307..3bb97c5 100644
--- a/hadoop-common-project/hadoop-kms/pom.xml
+++ b/hadoop-common-project/hadoop-kms/pom.xml
@@ -243,6 +243,7 @@
   goalwar/goal
 /goals
 configuration
+  archiveClassestrue/archiveClasses
   warNamekms/warName
   webappDirectory${project.build.directory}/kms
   /webappDirectory



[22/24] git commit: MAPREDUCE-6071. JobImpl#makeUberDecision doesn't log that Uber mode is disabled because of too much CPUs. Contributed by Tsuyoshi OZAWA

2014-09-05 Thread todd
MAPREDUCE-6071. JobImpl#makeUberDecision doesn't log that Uber mode is disabled 
because of too much CPUs. Contributed by Tsuyoshi OZAWA


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/45efc966
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/45efc966
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/45efc966

Branch: refs/heads/MR-2841
Commit: 45efc966ee1a1bda0271464dcee2564862d53b1f
Parents: 772d1fb
Author: Jason Lowe jl...@apache.org
Authored: Fri Sep 5 13:42:00 2014 +
Committer: Jason Lowe jl...@apache.org
Committed: Fri Sep 5 13:42:00 2014 +

--
 hadoop-mapreduce-project/CHANGES.txt  | 3 +++
 .../java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java | 2 ++
 2 files changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/45efc966/hadoop-mapreduce-project/CHANGES.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.txt 
b/hadoop-mapreduce-project/CHANGES.txt
index 0ee0731..73dd8d8 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -270,6 +270,9 @@ Release 2.6.0 - UNRELEASED
 MAPREDUCE-6063. Correct spill size calculation for spills wrapping the
 circular buffer. (zhihai xu via cdouglas)
 
+MAPREDUCE-6071. JobImpl#makeUberDecision doesn't log that Uber mode is
+disabled because of too much CPUs (Tsuyoshi OZAWA via jlowe)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/45efc966/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
index c1bc17d..dff5ef4 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
@@ -1285,6 +1285,8 @@ public class JobImpl implements 
org.apache.hadoop.mapreduce.v2.app.job.Job,
 msg.append( too many reduces;);
   if (!smallInput)
 msg.append( too much input;);
+  if (!smallCpu)
+msg.append( too much CPU;);
   if (!smallMemory)
 msg.append( too much RAM;);
   if (!notChainJob)



[15/24] git commit: Fixing HDFS CHANGES.txt, missing HDFS-6905 entry

2014-09-05 Thread todd
Fixing HDFS CHANGES.txt, missing HDFS-6905 entry


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1a095361
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1a095361
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1a095361

Branch: refs/heads/MR-2841
Commit: 1a095361414ba660c139f33ae1eee430a3c3446c
Parents: 91d45f0
Author: Alejandro Abdelnur t...@apache.org
Authored: Thu Sep 4 11:05:20 2014 -0700
Committer: Alejandro Abdelnur t...@apache.org
Committed: Thu Sep 4 11:07:08 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1a095361/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 8498b00..27b97cf 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -689,6 +689,8 @@ Release 2.6.0 - UNRELEASED
   HDFS-2975. Rename with overwrite flag true can make NameNode to stuck in 
safemode 
   on NN (crash + restart). (Yi Liu via umamahesh)
 
+  HDFS-6905. fs-encryption merge triggered release audit failures. (clamb 
via tucu)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES



[13/24] git commit: HADOOP-11060. Create a CryptoCodec test that verifies interoperability between the JCE and OpenSSL implementations. (hitliuyi via tucu)

2014-09-05 Thread todd
HADOOP-11060. Create a CryptoCodec test that verifies interoperability between 
the JCE and OpenSSL implementations. (hitliuyi via tucu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b69a48c9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b69a48c9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b69a48c9

Branch: refs/heads/MR-2841
Commit: b69a48c988c147abf192e36c99e2d4aecc116339
Parents: 70b2187
Author: Alejandro Abdelnur t...@apache.org
Authored: Thu Sep 4 09:22:00 2014 -0700
Committer: Alejandro Abdelnur t...@apache.org
Committed: Thu Sep 4 09:22:00 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +
 .../apache/hadoop/crypto/TestCryptoCodec.java   | 69 +++-
 .../apache/hadoop/crypto/TestCryptoStreams.java |  2 +-
 3 files changed, 55 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b69a48c9/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 9645cba..f610c5d 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -501,6 +501,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11015. Http server/client utils to propagate and recreate 
 Exceptions from server to client. (tucu)
 
+HADOOP-11060. Create a CryptoCodec test that verifies interoperability 
+between the JCE and OpenSSL implementations. (hitliuyi via tucu)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b69a48c9/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
index 49b5056..298f4ef 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/TestCryptoCodec.java
@@ -52,35 +52,40 @@ public class TestCryptoCodec {
   private Configuration conf = new Configuration();
   private int count = 1;
   private int seed = new Random().nextInt();
+  private final String jceCodecClass = 
+  org.apache.hadoop.crypto.JceAesCtrCryptoCodec;
+  private final String opensslCodecClass = 
+  org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec;
   
   @Test(timeout=12)
   public void testJceAesCtrCryptoCodec() throws Exception {
-cryptoCodecTest(conf, seed, 0, 
-org.apache.hadoop.crypto.JceAesCtrCryptoCodec);
-cryptoCodecTest(conf, seed, count, 
-org.apache.hadoop.crypto.JceAesCtrCryptoCodec);
+Assume.assumeTrue(NativeCodeLoader.buildSupportsOpenssl());
+Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason());
+cryptoCodecTest(conf, seed, 0, jceCodecClass, jceCodecClass);
+cryptoCodecTest(conf, seed, count, jceCodecClass, jceCodecClass);
+cryptoCodecTest(conf, seed, count, jceCodecClass, opensslCodecClass);
   }
   
-  @Test(timeout=120)
+  @Test(timeout=12)
   public void testOpensslAesCtrCryptoCodec() throws Exception {
 Assume.assumeTrue(NativeCodeLoader.buildSupportsOpenssl());
 Assert.assertEquals(null, OpensslCipher.getLoadingFailureReason());
-cryptoCodecTest(conf, seed, 0, 
-org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec);
-cryptoCodecTest(conf, seed, count, 
-org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec);
+cryptoCodecTest(conf, seed, 0, opensslCodecClass, opensslCodecClass);
+cryptoCodecTest(conf, seed, count, opensslCodecClass, opensslCodecClass);
+cryptoCodecTest(conf, seed, count, opensslCodecClass, jceCodecClass);
   }
   
   private void cryptoCodecTest(Configuration conf, int seed, int count, 
-  String codecClass) throws IOException, GeneralSecurityException {
-CryptoCodec codec = null;
+  String encCodecClass, String decCodecClass) throws IOException, 
+  GeneralSecurityException {
+CryptoCodec encCodec = null;
 try {
-  codec = (CryptoCodec)ReflectionUtils.newInstance(
-  conf.getClassByName(codecClass), conf);
+  encCodec = (CryptoCodec)ReflectionUtils.newInstance(
+  conf.getClassByName(encCodecClass), conf);
 } catch (ClassNotFoundException cnfe) {
   throw new IOException(Illegal crypto codec!);
 }
-LOG.info(Created a Codec object of type:  + codecClass

[06/24] git commit: HADOOP-10863. KMS should have a blacklist for decrypting EEKs. (asuresh via tucu)

2014-09-05 Thread todd
HADOOP-10863. KMS should have a blacklist for decrypting EEKs. (asuresh via 
tucu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d9a03e27
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d9a03e27
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d9a03e27

Branch: refs/heads/MR-2841
Commit: d9a03e272adbf3e9fde501610400f18fb4f6b865
Parents: 1dcaba9
Author: Alejandro Abdelnur t...@apache.org
Authored: Wed Sep 3 15:08:55 2014 -0700
Committer: Alejandro Abdelnur t...@apache.org
Committed: Wed Sep 3 15:08:55 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../security/authorize/AccessControlList.java   |  12 ++-
 .../hadoop/crypto/key/kms/server/KMS.java   |  27 ++---
 .../hadoop/crypto/key/kms/server/KMSACLs.java   |  55 +-
 .../hadoop-kms/src/site/apt/index.apt.vm|  88 +++-
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 100 +--
 .../crypto/key/kms/server/TestKMSACLs.java  |   2 +-
 7 files changed, 252 insertions(+), 35 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9a03e27/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 8e5f02a..0b9cfdc 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -493,6 +493,9 @@ Release 2.6.0 - UNRELEASED
 
 HADOOP-10990. Add missed NFSv3 request and response classes (brandonli)
 
+HADOOP-10863. KMS should have a blacklist for decrypting EEKs. 
+(asuresh via tucu)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9a03e27/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
index f78602a..d250df1 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/AccessControlList.java
@@ -221,7 +221,13 @@ public class AccessControlList implements Writable {
 return groups;
   }
 
-  public boolean isUserAllowed(UserGroupInformation ugi) {
+  /**
+   * Checks if a user represented by the provided {@link UserGroupInformation}
+   * is a member of the Access Control List
+   * @param ugi UserGroupInformation to check if contained in the ACL
+   * @return true if ugi is member of the list
+   */
+  public final boolean isUserInList(UserGroupInformation ugi) {
 if (allAllowed || users.contains(ugi.getShortUserName())) {
   return true;
 } else {
@@ -234,6 +240,10 @@ public class AccessControlList implements Writable {
 return false;
   }
 
+  public boolean isUserAllowed(UserGroupInformation ugi) {
+return isUserInList(ugi);
+  }
+
   /**
* Returns descriptive way of users and groups that are part of this ACL.
* Use {@link #getAclString()} to get the exact String that can be given to

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d9a03e27/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
index faec70a..43b07fe 100644
--- 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMS.java
@@ -26,10 +26,10 @@ import 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.EncryptedKeyVersi
 import org.apache.hadoop.crypto.key.kms.KMSRESTConstants;
 import org.apache.hadoop.security.AccessControlException;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.AuthorizationException;
 import org.apache.hadoop.crypto.key.kms.KMSClientProvider;
 import 
org.apache.hadoop.security.token.delegation.web.HttpUserGroupInformation;
 
+
 import javax.ws.rs.Consumes;
 import javax.ws.rs.DELETE;
 import javax.ws.rs.DefaultValue;
@@ -73,29 +73,14 @@ public class KMS

[04/24] git commit: HADOOP-10956. Fix create-release script to include docs and necessary txt files. (kasha)

2014-09-05 Thread todd
HADOOP-10956. Fix create-release script to include docs and necessary txt 
files. (kasha)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ce046212
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ce046212
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ce046212

Branch: refs/heads/MR-2841
Commit: ce0462129fe09898fd9b169cae0564cb6d9bc419
Parents: 3425ae5
Author: Karthik Kambatla ka...@apache.org
Authored: Wed Sep 3 09:39:02 2014 -0700
Committer: Karthik Kambatla ka...@apache.org
Committed: Wed Sep 3 09:39:02 2014 -0700

--
 LICENSE.txt | 290 
 NOTICE.txt  |   2 +
 README.txt  |  31 ++
 dev-support/create-release.sh   |  24 +-
 .../main/resources/assemblies/hadoop-src.xml|   8 +
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 hadoop-common-project/hadoop-common/LICENSE.txt | 290 
 hadoop-common-project/hadoop-common/NOTICE.txt  |   2 -
 hadoop-common-project/hadoop-common/README.txt  |  31 --
 hadoop-dist/pom.xml |   3 +
 hadoop-hdfs-project/hadoop-hdfs/LICENSE.txt | 271 ---
 hadoop-hdfs-project/hadoop-hdfs/NOTICE.txt  |   2 -
 hadoop-mapreduce-project/LICENSE.txt| 341 ---
 hadoop-mapreduce-project/NOTICE.txt |   2 -
 hadoop-yarn-project/LICENSE.txt | 341 ---
 hadoop-yarn-project/NOTICE.txt  |  17 -
 16 files changed, 356 insertions(+), 1302 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ce046212/LICENSE.txt
--
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 000..946a6df
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,290 @@
+
+ Apache License
+   Version 2.0, January 2004
+http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+  License shall mean the terms and conditions for use, reproduction,
+  and distribution as defined by Sections 1 through 9 of this document.
+
+  Licensor shall mean the copyright owner or entity authorized by
+  the copyright owner that is granting the License.
+
+  Legal Entity shall mean the union of the acting entity and all
+  other entities that control, are controlled by, or are under common
+  control with that entity. For the purposes of this definition,
+  control means (i) the power, direct or indirect, to cause the
+  direction or management of such entity, whether by contract or
+  otherwise, or (ii) ownership of fifty percent (50%) or more of the
+  outstanding shares, or (iii) beneficial ownership of such entity.
+
+  You (or Your) shall mean an individual or Legal Entity
+  exercising permissions granted by this License.
+
+  Source form shall mean the preferred form for making modifications,
+  including but not limited to software source code, documentation
+  source, and configuration files.
+
+  Object form shall mean any form resulting from mechanical
+  transformation or translation of a Source form, including but
+  not limited to compiled object code, generated documentation,
+  and conversions to other media types.
+
+  Work shall mean the work of authorship, whether in Source or
+  Object form, made available under the License, as indicated by a
+  copyright notice that is included in or attached to the work
+  (an example is provided in the Appendix below).
+
+  Derivative Works shall mean any work, whether in Source or Object
+  form, that is based on (or derived from) the Work and for which the
+  editorial revisions, annotations, elaborations, or other modifications
+  represent, as a whole, an original work of authorship. For the purposes
+  of this License, Derivative Works shall not include works that remain
+  separable from, or merely link (or bind by name) to the interfaces of,
+  the Work and Derivative Works thereof.
+
+  Contribution shall mean any work of authorship, including
+  the original version of the Work and any modifications or additions
+  to that Work or Derivative Works thereof, that is intentionally
+  submitted to Licensor for inclusion in the Work by the copyright owner
+  or by an individual or Legal Entity authorized to submit on behalf of
+  the copyright owner. For the purposes of this definition, submitted
+  means any form of electronic, verbal, or written communication sent
+  to the Licensor 

[10/24] git commit: HADOOP-11056. OsSecureRandom.setConf() might leak file descriptors. Contributed by Yongjun Zhang.

2014-09-05 Thread todd
HADOOP-11056. OsSecureRandom.setConf() might leak file descriptors.  
Contributed by Yongjun Zhang.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8f1a6685
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8f1a6685
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8f1a6685

Branch: refs/heads/MR-2841
Commit: 8f1a668575d35bee11f4cd8173335be5352ec620
Parents: f4caedf
Author: Colin Patrick Mccabe cmcc...@cloudera.com
Authored: Wed Sep 3 19:35:39 2014 -0700
Committer: Colin Patrick Mccabe cmcc...@cloudera.com
Committed: Wed Sep 3 19:35:39 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  3 +++
 .../hadoop/crypto/random/OsSecureRandom.java| 21 +---
 .../crypto/random/TestOsSecureRandom.java   | 15 ++
 3 files changed, 36 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8f1a6685/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 0b9cfdc..e8d0f52 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -684,6 +684,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-8815. RandomDatum needs to override hashCode().
 (Brandon Li via suresh)
 
+HADOOP-11056. OsSecureRandom.setConf() might leak file descriptors (yzhang
+via cmccabe)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HADOOP-10734. Implement high-performance secure random number sources.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8f1a6685/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
index c6cb0a8..fee4186 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/random/OsSecureRandom.java
@@ -23,6 +23,8 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.util.Random;
 
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
@@ -37,6 +39,8 @@ import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY
  */
 @InterfaceAudience.Private
 public class OsSecureRandom extends Random implements Closeable, Configurable {
+  public static final Log LOG = LogFactory.getLog(OsSecureRandom.class);
+  
   private static final long serialVersionUID = 6391500337172057900L;
 
   private transient Configuration conf;
@@ -72,12 +76,20 @@ public class OsSecureRandom extends Random implements 
Closeable, Configurable {
 HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY,
 HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT);
 File randomDevFile = new File(randomDevPath);
+
 try {
+  close();
   this.stream = new FileInputStream(randomDevFile);
-  fillReservoir(0);
 } catch (IOException e) {
   throw new RuntimeException(e);
 }
+
+try {
+  fillReservoir(0);
+} catch (RuntimeException e) {
+  close();
+  throw e;
+}
   }
 
   @Override
@@ -109,7 +121,10 @@ public class OsSecureRandom extends Random implements 
Closeable, Configurable {
   }
 
   @Override
-  synchronized public void close() throws IOException {
-stream.close();
+  synchronized public void close() {
+if (stream != null) {
+  IOUtils.cleanup(LOG, stream);
+  stream = null;
+}
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8f1a6685/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOsSecureRandom.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOsSecureRandom.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOsSecureRandom.java
index 8fc5c70..50a0031 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOsSecureRandom.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/random/TestOsSecureRandom.java
@@ -22,6 +22,7 @@ 

[14/24] git commit: YARN-2509. Enable Cross Origin Filter for timeline server only and not all Yarn servers (Mit Desai via jeagles)

2014-09-05 Thread todd
YARN-2509. Enable Cross Origin Filter for timeline server only and not all Yarn 
servers (Mit Desai via jeagles)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/91d45f0f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/91d45f0f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/91d45f0f

Branch: refs/heads/MR-2841
Commit: 91d45f0f017281c8ab7ac858aa5d618accb9ea44
Parents: b69a48c
Author: Jonathan Eagles jeag...@gmail.com
Authored: Thu Sep 4 11:53:52 2014 -0500
Committer: Jonathan Eagles jeag...@gmail.com
Committed: Thu Sep 4 11:53:52 2014 -0500

--
 hadoop-yarn-project/CHANGES.txt  |  3 +++
 .../hadoop/yarn/conf/YarnConfiguration.java  |  8 
 .../ApplicationHistoryServer.java| 19 +++
 3 files changed, 26 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/91d45f0f/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 64ccd28..b0e4a01 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -175,6 +175,9 @@ Release 2.6.0 - UNRELEASED
 YARN-1506. Changed RMNode/SchedulerNode to update resource with event
 notification. (Junping Du via jianhe)
 
+YARN-2509. Enable Cross Origin Filter for timeline server only and not all
+Yarn servers (Mit Desai via jeagles)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/91d45f0f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 034ec4f..7b7511d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -1239,6 +1239,14 @@ public class YarnConfiguration extends Configuration {
   public static final String TIMELINE_SERVICE_KEYTAB =
   TIMELINE_SERVICE_PREFIX + keytab;
 
+  /** Enables cross origin support for timeline server.*/
+  public static final String TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED =
+  TIMELINE_SERVICE_PREFIX + http-cross-origin.enabled;
+
+  /** Default value for cross origin support for timeline server.*/
+  public static final boolean
+  TIMELINE_SERVICE_HTTP_CROSS_ORIGIN_ENABLED_DEFAULT = false;
+
   
   // Other Configs
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/91d45f0f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 6ec0d42..158f2e6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.yarn.server.timeline.TimelineStore;
 import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager;
 import 
org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer;
 import 
org.apache.hadoop.yarn.server.timeline.security.TimelineDelegationTokenSecretManagerService;
+import 
org.apache.hadoop.yarn.server.timeline.webapp.CrossOriginFilterInitializer;
 import org.apache.hadoop.yarn.webapp.WebApp;
 import org.apache.hadoop.yarn.webapp.WebApps;
 import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
@@ -197,17 +198,27 @@ public class ApplicationHistoryServer extends 
CompositeService {
 // the customized filter will be loaded by 

[17/24] git commit: YARN-2431. NM restart: cgroup is not removed for reacquired containers. Contributed by Jason Lowe

2014-09-05 Thread todd
YARN-2431. NM restart: cgroup is not removed for reacquired containers. 
Contributed by Jason Lowe


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3fa5f728
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3fa5f728
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3fa5f728

Branch: refs/heads/MR-2841
Commit: 3fa5f728c4d6944302621965d6a0376827af5b51
Parents: b44b2ee
Author: Jason Lowe jl...@apache.org
Authored: Thu Sep 4 21:11:27 2014 +
Committer: Jason Lowe jl...@apache.org
Committed: Thu Sep 4 21:11:27 2014 +

--
 hadoop-yarn-project/CHANGES.txt |  3 +
 .../nodemanager/LinuxContainerExecutor.java | 11 
 .../nodemanager/TestLinuxContainerExecutor.java | 60 
 3 files changed, 74 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fa5f728/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index b0e4a01..f6f5c4b 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -278,6 +278,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2462. 
TestNodeManagerResync#testBlockNewContainerRequestsOnStartAndResync
 should have a test timeout (Eric Payne via jlowe)
 
+YARN-2431. NM restart: cgroup is not removed for reacquired containers
+(jlowe)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fa5f728/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
index 804864e..6b8dd28 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.yarn.server.nodemanager;
 
 import com.google.common.base.Optional;
+
 import java.io.File;
 import java.io.IOException;
 import java.net.InetSocketAddress;
@@ -342,6 +343,16 @@ public class LinuxContainerExecutor extends 
ContainerExecutor {
   }
 
   @Override
+  public int reacquireContainer(String user, ContainerId containerId)
+  throws IOException {
+try {
+  return super.reacquireContainer(user, containerId);
+} finally {
+  resourcesHandler.postExecute(containerId);
+}
+  }
+
+  @Override
   public boolean signalContainer(String user, String pid, Signal signal)
   throws IOException {
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fa5f728/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java
index a5ec43b..c02212e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutor.java
@@ -30,6 +30,8 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Set;
 
 import org.junit.Assert;
 import org.apache.commons.logging.Log;
@@ -42,11 +44,15 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
 import 

[23/24] git commit: HDFS-6376. Distcp data between two HA clusters requires another configuration. Contributed by Dave Marion and Haohui Mai.

2014-09-05 Thread todd
HDFS-6376. Distcp data between two HA clusters requires another configuration. 
Contributed by Dave Marion and Haohui Mai.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c6107f56
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c6107f56
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c6107f56

Branch: refs/heads/MR-2841
Commit: c6107f566ff01e9bfee9052f86f6e5b21d5e89f3
Parents: 45efc96
Author: Jing Zhao j...@hortonworks.com
Authored: Fri Sep 5 10:40:02 2014 -0700
Committer: Jing Zhao j...@hortonworks.com
Committed: Fri Sep 5 10:40:02 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |  1 +
 .../java/org/apache/hadoop/hdfs/DFSUtil.java| 80 ++--
 .../hdfs/server/datanode/BlockPoolManager.java  | 12 +--
 .../org/apache/hadoop/hdfs/tools/GetConf.java   |  4 +-
 .../src/main/resources/hdfs-default.xml | 10 +++
 .../org/apache/hadoop/hdfs/TestDFSUtil.java | 26 +++
 .../server/datanode/TestBlockPoolManager.java   | 22 ++
 .../apache/hadoop/hdfs/tools/TestGetConf.java   | 26 ++-
 9 files changed, 164 insertions(+), 20 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6107f56/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 6a4cf28..d4059de 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -441,6 +441,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6886. Use single editlog record for creating file + overwrite. (Yi Liu
 via jing9)
 
+HDFS-6376. Distcp data between two HA clusters requires another 
configuration.
+(Dave Marion and Haohui Mai via jing9)
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6107f56/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index b5b4f3c..2f86ed6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -530,6 +530,7 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   
   public static final String  DFS_NAMESERVICES = dfs.nameservices;
   public static final String  DFS_NAMESERVICE_ID = dfs.nameservice.id;
+  public static final String  DFS_INTERNAL_NAMESERVICES_KEY = 
dfs.internal.nameservices;
   public static final String  DFS_NAMENODE_RESOURCE_CHECK_INTERVAL_KEY = 
dfs.namenode.resource.check.interval;
   public static final int DFS_NAMENODE_RESOURCE_CHECK_INTERVAL_DEFAULT = 
5000;
   public static final String  DFS_NAMENODE_DU_RESERVED_KEY = 
dfs.namenode.resource.du.reserved;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c6107f56/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index 5559e0d..021890b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -60,6 +60,7 @@ import java.util.Set;
 
 import javax.net.SocketFactory;
 
+import com.google.common.collect.Sets;
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.Option;
@@ -612,7 +613,7 @@ public class DFSUtil {
 String keySuffix = concatSuffixes(suffixes);
 return addSuffix(key, keySuffix);
   }
-  
+
   /**
* Returns the configured address for all NameNodes in the cluster.
* @param conf configuration
@@ -621,14 +622,25 @@ public class DFSUtil {
* @return a map(nameserviceId to map(namenodeId to InetSocketAddress))
*/
   private static MapString, MapString, InetSocketAddress
-getAddresses(Configuration conf,
-  String defaultAddress, String... keys) {
+getAddresses(Configuration conf, String defaultAddress, String... keys) {
 CollectionString nameserviceIds = getNameServiceIds(conf);
-
+return getAddressesForNsIds(conf, nameserviceIds, defaultAddress, 

[12/24] git commit: HADOOP-11015. Http server/client utils to propagate and recreate Exceptions from server to client. (tucu)

2014-09-05 Thread todd
HADOOP-11015. Http server/client utils to propagate and recreate Exceptions 
from server to client. (tucu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/70b21874
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/70b21874
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/70b21874

Branch: refs/heads/MR-2841
Commit: 70b218748badf079c859c3af2b468a0b7b49c333
Parents: 41f1662
Author: Alejandro Abdelnur t...@apache.org
Authored: Thu Sep 4 09:11:10 2014 -0700
Committer: Alejandro Abdelnur t...@apache.org
Committed: Thu Sep 4 09:11:10 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../dev-support/findbugsExcludeFile.xml |   2 +-
 .../crypto/key/kms/KMSClientProvider.java   |  57 +-
 .../DelegationTokenAuthenticationFilter.java|  15 +-
 .../DelegationTokenAuthenticationHandler.java   |   6 +-
 .../web/DelegationTokenAuthenticator.java   |  20 +-
 .../apache/hadoop/util/HttpExceptionUtils.java  | 185 +++
 ...tionTokenAuthenticationHandlerWithMocks.java |  35 ++--
 .../hadoop/util/TestHttpExceptionUtils.java | 167 +
 .../key/kms/server/KMSExceptionsProvider.java   |  12 +-
 .../hadoop/fs/http/client/HttpFSFileSystem.java |  70 ---
 .../hadoop/fs/http/client/HttpFSUtils.java  |  50 -
 .../hadoop/lib/wsrs/ExceptionProvider.java  |  14 +-
 .../fs/http/client/BaseTestHttpFSWith.java  |   4 +-
 .../fs/http/server/TestHttpFSServerNoACLs.java  |  10 +-
 15 files changed, 423 insertions(+), 227 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/70b21874/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 2e04917..9645cba 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -498,6 +498,9 @@ Release 2.6.0 - UNRELEASED
 
 HADOOP-11054. Add a KeyProvider instantiation based on a URI. (tucu)
 
+HADOOP-11015. Http server/client utils to propagate and recreate 
+Exceptions from server to client. (tucu)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/70b21874/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml 
b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
index 1469034..204e6ab 100644
--- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
@@ -367,7 +367,7 @@
  /Match
 
   Match
-Class name=org.apache.hadoop.crypto.key.kms.KMSClientProvider/
+Class name=org.apache.hadoop.util.HttpExceptionUtils/
 Method name=validateResponse/
 Bug pattern=REC_CATCH_EXCEPTION/
   /Match

http://git-wip-us.apache.org/repos/asf/hadoop/blob/70b21874/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
index dc9e6cb..a4e336c 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
@@ -34,6 +34,7 @@ import 
org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
 import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.security.token.Token;
 import 
org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL;
+import org.apache.hadoop.util.HttpExceptionUtils;
 import org.apache.http.client.utils.URIBuilder;
 import org.codehaus.jackson.map.ObjectMapper;
 
@@ -44,7 +45,6 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.io.Writer;
-import java.lang.reflect.Constructor;
 import java.net.HttpURLConnection;
 import java.net.SocketTimeoutException;
 import java.net.URI;
@@ -54,7 +54,6 @@ import java.net.URLEncoder;
 import java.security.GeneralSecurityException;
 import java.security.NoSuchAlgorithmException;
 import java.security.PrivilegedExceptionAction;
-import java.text.MessageFormat;
 import

[24/24] git commit: Merge remote-tracking branch 'apache/trunk' into MR-2841

2014-09-05 Thread todd
Merge remote-tracking branch 'apache/trunk' into MR-2841


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4b3f1e2c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4b3f1e2c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4b3f1e2c

Branch: refs/heads/MR-2841
Commit: 4b3f1e2ce48124b7406d77cc2ae1d0914311b0d4
Parents: 683987b c6107f5
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 5 10:47:27 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 10:47:27 2014 -0700

--
 LICENSE.txt | 290 
 NOTICE.txt  |   2 +
 README.txt  |  31 ++
 dev-support/create-release.sh   |  24 +-
 .../main/resources/assemblies/hadoop-src.xml|   8 +
 hadoop-common-project/hadoop-common/CHANGES.txt |  20 ++
 hadoop-common-project/hadoop-common/LICENSE.txt | 290 
 hadoop-common-project/hadoop-common/NOTICE.txt  |   2 -
 hadoop-common-project/hadoop-common/README.txt  |  31 --
 .../dev-support/findbugsExcludeFile.xml |   2 +-
 .../hadoop/crypto/key/KeyProviderFactory.java   |  36 +-
 .../crypto/key/kms/KMSClientProvider.java   |  57 +---
 .../hadoop/crypto/random/OsSecureRandom.java|  21 +-
 .../security/authorize/AccessControlList.java   |  12 +-
 .../DelegationTokenAuthenticationFilter.java|  15 +-
 .../DelegationTokenAuthenticationHandler.java   |   6 +-
 .../web/DelegationTokenAuthenticator.java   |  20 +-
 .../apache/hadoop/util/HttpExceptionUtils.java  | 185 ++
 .../apache/hadoop/crypto/TestCryptoCodec.java   |  69 +++-
 .../apache/hadoop/crypto/TestCryptoStreams.java |   2 +-
 .../crypto/key/TestKeyProviderFactory.java  |  13 +
 .../crypto/random/TestOsSecureRandom.java   |  15 +
 ...tionTokenAuthenticationHandlerWithMocks.java |  35 +-
 .../hadoop/util/TestHttpExceptionUtils.java | 167 +
 hadoop-common-project/hadoop-kms/pom.xml|   1 +
 .../hadoop/crypto/key/kms/server/KMS.java   |  27 +-
 .../hadoop/crypto/key/kms/server/KMSACLs.java   |  55 ++-
 .../key/kms/server/KMSExceptionsProvider.java   |  12 +-
 .../hadoop-kms/src/site/apt/index.apt.vm|  88 -
 .../hadoop/crypto/key/kms/server/TestKMS.java   | 100 +-
 .../crypto/key/kms/server/TestKMSACLs.java  |   2 +-
 hadoop-dist/pom.xml |   3 +
 .../hadoop/fs/http/client/HttpFSFileSystem.java |  70 ++--
 .../hadoop/fs/http/client/HttpFSUtils.java  |  50 ---
 .../hadoop/lib/wsrs/ExceptionProvider.java  |  14 +-
 .../fs/http/client/BaseTestHttpFSWith.java  |   4 +-
 .../fs/http/server/TestHttpFSServerNoACLs.java  |  10 +-
 .../hadoop/hdfs/nfs/nfs3/OpenFileCtx.java   |   6 +-
 .../hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java|   2 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  25 ++
 hadoop-hdfs-project/hadoop-hdfs/LICENSE.txt | 271 ---
 hadoop-hdfs-project/hadoop-hdfs/NOTICE.txt  |   2 -
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |   3 +
 .../java/org/apache/hadoop/hdfs/DFSUtil.java|  80 -
 .../hadoop/hdfs/DistributedFileSystem.java  |  11 +-
 .../org/apache/hadoop/hdfs/inotify/Event.java   |  12 +
 .../apache/hadoop/hdfs/protocolPB/PBHelper.java |   6 +-
 .../hdfs/server/datanode/BlockPoolManager.java  |  12 +-
 .../hadoop/hdfs/server/datanode/DataNode.java   |   2 +-
 .../hdfs/server/datanode/DataStorage.java   |   2 +-
 .../hdfs/server/namenode/FSDirectory.java   |  44 ++-
 .../hadoop/hdfs/server/namenode/FSEditLog.java  |   6 +-
 .../hdfs/server/namenode/FSEditLogLoader.java   |   8 +-
 .../hdfs/server/namenode/FSEditLogOp.java   |  18 +
 .../server/namenode/FSImageSerialization.java   |  17 +
 .../hdfs/server/namenode/FSNamesystem.java  |  60 ++--
 .../hdfs/server/namenode/INodeReference.java|   4 +-
 .../namenode/InotifyFSEditLogOpTranslator.java  |   1 +
 .../server/namenode/NameNodeLayoutVersion.java  |   4 +-
 .../snapshot/DirectorySnapshottableFeature.java |   2 +-
 .../org/apache/hadoop/hdfs/tools/GetConf.java   |   4 +-
 .../hadoop-hdfs/src/main/proto/inotify.proto|   1 +
 .../src/main/resources/hdfs-default.xml |  18 +
 .../hadoop/hdfs/TestBlocksScheduledCounter.java |  18 +-
 .../hdfs/TestDFSInotifyEventInputStream.java|   8 +-
 .../org/apache/hadoop/hdfs/TestDFSRename.java   |   6 +
 .../org/apache/hadoop/hdfs/TestDFSUtil.java |  26 ++
 .../org/apache/hadoop/hdfs/TestFileAppend4.java |   2 +-
 .../apache/hadoop/hdfs/TestFileCreation.java| 119 +++
 .../org/apache/hadoop/hdfs/TestLocalDFS.java|  29 +-
 .../server/datanode/TestBlockPoolManager.java   |  22 ++
 .../hdfs/server/namenode/CreateEditsLog.java|   2 +-
 .../hdfs/server/namenode/TestEditLog.java   |   2 +-
 .../hdfs/server/namenode/TestStartup.java

[21/24] git commit: HDFS-6714. TestBlocksScheduledCounter#testBlocksScheduledCounter should shutdown cluster (vinayakumarb)

2014-09-05 Thread todd
HDFS-6714. TestBlocksScheduledCounter#testBlocksScheduledCounter should 
shutdown cluster (vinayakumarb)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/772d1fb0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/772d1fb0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/772d1fb0

Branch: refs/heads/MR-2841
Commit: 772d1fb01e65ca42ef4365d2a9307da6c2448c3f
Parents: 6104520
Author: Vinayakumar B vinayakum...@apache.org
Authored: Fri Sep 5 10:25:09 2014 +0530
Committer: Vinayakumar B vinayakum...@apache.org
Committed: Fri Sep 5 10:25:09 2014 +0530

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt   |  3 +++
 .../hadoop/hdfs/TestBlocksScheduledCounter.java   | 18 +++---
 2 files changed, 18 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/772d1fb0/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 8964d2d..6a4cf28 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -697,6 +697,9 @@ Release 2.6.0 - UNRELEASED
 
   HDFS-6905. fs-encryption merge triggered release audit failures. (clamb 
via tucu)
 
+  HDFS-6714. TestBlocksScheduledCounter#testBlocksScheduledCounter should
+  shutdown cluster (vinayakumarb)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/772d1fb0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java
index 59dbb30..a66cd1d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestBlocksScheduledCounter.java
@@ -27,6 +27,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
 import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager;
+import org.junit.After;
 import org.junit.Test;
 
 /**
@@ -35,14 +36,25 @@ import org.junit.Test;
  * scheduled to a datanode.
  */
 public class TestBlocksScheduledCounter {
+  MiniDFSCluster cluster = null;
+  FileSystem fs = null;
+
+  @After
+  public void tearDown() throws IOException {
+if (fs != null) {
+  fs.close();
+}
+if(cluster!=null){
+  cluster.shutdown();
+}
+  }
 
   @Test
   public void testBlocksScheduledCounter() throws IOException {
-MiniDFSCluster cluster = new MiniDFSCluster.Builder(new 
HdfsConfiguration())
-   .build();
+cluster = new MiniDFSCluster.Builder(new HdfsConfiguration()).build();
 
 cluster.waitActive();
-FileSystem fs = cluster.getFileSystem();
+fs = cluster.getFileSystem();
 
 //open a file an write a few bytes:
 FSDataOutputStream out = fs.create(new Path(/testBlockScheduledCounter));



git commit: MAPREDUCE-6074. native-task: Fix release audit warnings

2014-09-05 Thread todd
Repository: hadoop
Updated Branches:
  refs/heads/MR-2841 4b3f1e2ce - 7c91f9b14


MAPREDUCE-6074. native-task: Fix release audit warnings


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7c91f9b1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7c91f9b1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7c91f9b1

Branch: refs/heads/MR-2841
Commit: 7c91f9b1484d487e792dca051fbd418697049422
Parents: 4b3f1e2
Author: Todd Lipcon t...@apache.org
Authored: Fri Sep 5 13:41:18 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Fri Sep 5 19:58:41 2014 -0700

--
 LICENSE.txt |  33 +++
 .../CHANGES.MAPREDUCE-2841.txt  |   1 +
 .../hadoop-mapreduce-client-nativetask/LICENSE  | 266 ---
 .../hadoop-mapreduce-client-nativetask/pom.xml  |  12 +-
 .../mapred/nativetask/INativeComparable.java|  27 +-
 .../src/main/native/COPYING |  87 --
 ...oop_mapred_nativetask_NativeBatchProcessor.h |  54 
 ...che_hadoop_mapred_nativetask_NativeRuntime.h |  66 -
 .../src/main/native/test.sh |  11 +
 .../nativetask/buffer/TestBufferPushPull.java   |  13 +-
 .../buffer/TestByteBufferReadWrite.java |  16 +-
 .../nativetask/combinertest/CombinerTest.java   |   4 +-
 .../combinertest/OldAPICombinerTest.java|   5 +-
 .../nativetask/combinertest/WordCount.java  |   3 +-
 .../nativetask/compresstest/CompressMapper.java |   4 +-
 .../nativetask/compresstest/CompressTest.java   |   2 +-
 .../nativetask/handlers/TestCombineHandler.java |  13 +-
 .../TestNativeCollectorOnlyHandler.java |  17 +-
 .../hadoop/mapred/nativetask/kvtest/KVJob.java  |   3 +-
 .../mapred/nativetask/kvtest/TestInputFile.java |   7 +-
 .../nativetask/nonsorttest/NonSortTest.java |   2 +-
 .../serde/TestNativeSerialization.java  |  10 +-
 .../nativetask/testutil/BytesFactory.java   |   2 +-
 .../mapred/nativetask/utils/TestBytesUtil.java  |  20 +-
 hadoop-mapreduce-project/pom.xml|   2 +-
 25 files changed, 143 insertions(+), 537 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7c91f9b1/LICENSE.txt
--
diff --git a/LICENSE.txt b/LICENSE.txt
index 946a6df..99989f1 100644
--- a/LICENSE.txt
+++ b/LICENSE.txt
@@ -288,3 +288,36 @@ lz4_encoder.h,lz4hc.h,lz4hc.c,lz4hc_encoder.h},
- LZ4 homepage : http://fastcompression.blogspot.com/p/lz4.html
- LZ4 source repository : http://code.google.com/p/lz4/
 */
+
+
+For 
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/native/gtest
+-
+Copyright 2008, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+* Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+* Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+* Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+AS IS AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7c91f9b1/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index ea17907..539e7be 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -22,3 +22,4 @@ MAPREDUCE-6056. native-task: move system test working dir

[2/2] git commit: MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc cleanup

2014-09-03 Thread todd
MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc 
cleanup


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/1081d9ce
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/1081d9ce
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/1081d9ce

Branch: refs/heads/MR-2841
Commit: 1081d9cee23aa661d7c9165bc9855865a38b528e
Parents: cce7d1e
Author: Todd Lipcon t...@apache.org
Authored: Wed Sep 3 12:02:47 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Wed Sep 3 12:22:38 2014 -0700

--
 .../CHANGES.MAPREDUCE-2841.txt  |  1 +
 .../hadoop/mapred/nativetask/Command.java   |  8 +
 .../mapred/nativetask/CommandDispatcher.java| 11 ++-
 .../hadoop/mapred/nativetask/Constants.java | 18 ++-
 .../hadoop/mapred/nativetask/DataChannel.java   |  3 ++
 .../hadoop/mapred/nativetask/DataReceiver.java  |  3 ++
 .../mapred/nativetask/HadoopPlatform.java   |  6 ++--
 .../mapred/nativetask/ICombineHandler.java  |  3 ++
 .../mapred/nativetask/INativeComparable.java|  5 +++
 .../mapred/nativetask/INativeHandler.java   |  2 ++
 .../mapred/nativetask/NativeBatchProcessor.java |  3 +-
 .../mapred/nativetask/NativeDataSource.java | 10 ++
 .../mapred/nativetask/NativeDataTarget.java | 14 +++--
 .../NativeMapOutputCollectorDelegator.java  | 11 ---
 .../hadoop/mapred/nativetask/NativeRuntime.java | 10 +++---
 .../hadoop/mapred/nativetask/Platform.java  | 15 -
 .../hadoop/mapred/nativetask/Platforms.java |  7 +++--
 .../mapred/nativetask/StatusReportChecker.java  | 33 ++--
 .../hadoop/mapred/nativetask/TaskContext.java   | 29 +
 .../mapred/nativetask/buffer/BufferType.java|  5 +--
 .../nativetask/buffer/ByteBufferDataReader.java | 10 ++
 .../nativetask/buffer/ByteBufferDataWriter.java | 10 +++---
 .../nativetask/buffer/DataInputStream.java  |  2 ++
 .../nativetask/buffer/DataOutputStream.java | 11 +++
 .../mapred/nativetask/buffer/InputBuffer.java   |  2 ++
 .../mapred/nativetask/buffer/OutputBuffer.java  |  2 ++
 .../nativetask/handlers/BufferPullee.java   |  4 ++-
 .../nativetask/handlers/BufferPuller.java   | 26 ---
 .../nativetask/handlers/BufferPushee.java   |  2 ++
 .../nativetask/handlers/BufferPusher.java   |  3 +-
 .../nativetask/handlers/CombinerHandler.java| 19 +--
 .../mapred/nativetask/handlers/IDataLoader.java |  3 ++
 .../handlers/NativeCollectorOnlyHandler.java| 22 -
 .../serde/BoolWritableSerializer.java   |  2 ++
 .../serde/ByteWritableSerializer.java   |  2 ++
 .../serde/BytesWritableSerializer.java  |  2 ++
 .../nativetask/serde/DefaultSerializer.java |  4 +++
 .../serde/DoubleWritableSerializer.java |  2 ++
 .../serde/FloatWritableSerializer.java  |  3 ++
 .../mapred/nativetask/serde/IKVSerializer.java  | 12 ---
 .../nativetask/serde/INativeSerializer.java |  4 +++
 .../nativetask/serde/IntWritableSerializer.java |  2 ++
 .../mapred/nativetask/serde/KVSerializer.java   | 17 ++
 .../serde/LongWritableSerializer.java   |  2 ++
 .../nativetask/serde/NativeSerialization.java   |  2 ++
 .../serde/NullWritableSerializer.java   |  2 ++
 .../serde/SerializationFramework.java   |  3 ++
 .../mapred/nativetask/serde/TextSerializer.java |  2 ++
 .../serde/VIntWritableSerializer.java   |  3 +-
 .../serde/VLongWritableSerializer.java  |  3 +-
 .../mapred/nativetask/util/BytesUtil.java   |  2 ++
 .../mapred/nativetask/util/ConfigUtil.java  |  5 ++-
 .../nativetask/util/LocalJobOutputFiles.java|  2 ++
 .../nativetask/util/NativeTaskOutput.java   |  2 ++
 .../nativetask/util/NativeTaskOutputFiles.java  |  3 +-
 .../mapred/nativetask/util/OutputUtil.java  |  5 +--
 .../mapred/nativetask/util/ReadWriteBuffer.java |  5 +--
 .../mapred/nativetask/util/SizedWritable.java   |  4 ++-
 .../mapred/nativetask/TestTaskContext.java  |  4 +--
 59 files changed, 264 insertions(+), 148 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
--
diff --git a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt 
b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
index 7c9558e..269a2f6 100644
--- a/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
+++ b/hadoop-mapreduce-project/CHANGES.MAPREDUCE-2841.txt
@@ -19,3 +19,4 @@ MAPREDUCE-5977. Fix or suppress native-task gcc warnings 
(Manu Zhang via todd)
 MAPREDUCE-6054. native-task: Speed up tests (todd)
 MAPREDUCE-6058. native-task: KVTest and LargeKVTest should check mr job is 
sucessful

[1/2] MAPREDUCE-6055. native-task: findbugs, interface annotations, and other misc cleanup

2014-09-03 Thread todd
Repository: hadoop
Updated Branches:
  refs/heads/MR-2841 cce7d1e2f - 1081d9cee


http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
index ec326ca..d8a6595 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/LongWritableSerializer.java
@@ -20,9 +20,11 @@ package org.apache.hadoop.mapred.nativetask.serde;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.nativetask.INativeComparable;
 
+@InterfaceAudience.Private
 public class LongWritableSerializer extends DefaultSerializer implements
 INativeComparable {
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
index f5a033d..5881a46 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NativeSerialization.java
@@ -21,8 +21,10 @@ package org.apache.hadoop.mapred.nativetask.serde;
 import java.io.IOException;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 
+@InterfaceAudience.Private
 public class NativeSerialization {
 
   private final ConcurrentHashMapString, Class? map = new 
ConcurrentHashMapString, Class?();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
index afa4e8e..f6e7cf5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/NullWritableSerializer.java
@@ -20,9 +20,11 @@ package org.apache.hadoop.mapred.nativetask.serde;
 
 import java.io.IOException;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.mapred.nativetask.INativeComparable;
 
+@InterfaceAudience.Private
 public class NullWritableSerializer extends DefaultSerializer implements
 INativeComparable {
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/1081d9ce/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/SerializationFramework.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/src/main/java/org/apache/hadoop/mapred/nativetask/serde/SerializationFramework.java
 

[14/50] [abbrv] git commit: HADOOP-11005. Fix HTTP content type for ReconfigurationServlet. Contributed by Lei Xu.

2014-09-02 Thread todd
HADOOP-11005. Fix HTTP content type for ReconfigurationServlet. Contributed by 
Lei Xu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7119bd49
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7119bd49
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7119bd49

Branch: refs/heads/MR-2841
Commit: 7119bd49c870cf1e6b8c091d87025b439b9468df
Parents: ab638e7
Author: Andrew Wang andrew.w...@cloudera.com
Authored: Thu Aug 28 17:39:50 2014 -0700
Committer: Andrew Wang andrew.w...@cloudera.com
Committed: Thu Aug 28 17:40:55 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt   | 3 +++
 .../main/java/org/apache/hadoop/conf/ReconfigurationServlet.java  | 2 ++
 2 files changed, 5 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7119bd49/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 9fb0cd3..05eb383 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -468,6 +468,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-10880. Move HTTP delegation tokens out of URL querystring to 
 a header. (tucu)
 
+HADOOP-11005. Fix HTTP content type for ReconfigurationServlet.
+(Lei Xu via wang)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7119bd49/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
index 3fa162b..eb1fb6b 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationServlet.java
@@ -200,6 +200,7 @@ public class ReconfigurationServlet extends HttpServlet {
   protected void doGet(HttpServletRequest req, HttpServletResponse resp)
 throws ServletException, IOException {
 LOG.info(GET);
+resp.setContentType(text/html);
 PrintWriter out = resp.getWriter();
 
 Reconfigurable reconf = getReconfigurable(req);
@@ -214,6 +215,7 @@ public class ReconfigurationServlet extends HttpServlet {
   protected void doPost(HttpServletRequest req, HttpServletResponse resp)
 throws ServletException, IOException {
 LOG.info(POST);
+resp.setContentType(text/html);
 PrintWriter out = resp.getWriter();
 
 Reconfigurable reconf = getReconfigurable(req);



[40/50] [abbrv] git commit: HADOOP-10990. Add missed NFSv3 request and response classes. Contributed by Brandon Li

2014-09-02 Thread todd
HADOOP-10990. Add missed NFSv3 request and response classes. Contributed by 
Brandon Li


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bad5f38d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bad5f38d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bad5f38d

Branch: refs/heads/MR-2841
Commit: bad5f38d47f5e93c21641931ac92595c71b05bd7
Parents: e1109fb
Author: Brandon Li brando...@apache.org
Authored: Tue Sep 2 11:27:28 2014 -0700
Committer: Brandon Li brando...@apache.org
Committed: Tue Sep 2 11:27:28 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |  2 +
 .../hadoop/nfs/nfs3/Nfs3FileAttributes.java | 14 ++-
 .../hadoop/nfs/nfs3/request/LINK3Request.java   | 61 ++
 .../hadoop/nfs/nfs3/request/MKNOD3Request.java  | 89 
 .../hadoop/nfs/nfs3/response/LINK3Response.java | 54 
 .../nfs/nfs3/response/MKNOD3Response.java   | 68 +++
 6 files changed, 286 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bad5f38d/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 3f735f8..32de088 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -485,6 +485,8 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11030. Define a variable jackson.version instead of using constant 
 at multiple places. (Juan Yu via kasha)
 
+HADOOP-10990. Add missed NFSv3 request and response classes (brandonli)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bad5f38d/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java
--
diff --git 
a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java
 
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java
index 9936b8d..47126d6 100644
--- 
a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java
+++ 
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/Nfs3FileAttributes.java
@@ -53,9 +53,19 @@ public class Nfs3FileAttributes {
* For Hadoop, currently this field is always zero.
*/
   public static class Specdata3 {
-final static int specdata1 = 0;
-final static int specdata2 = 0;
+final int specdata1;
+final int specdata2;
 
+public Specdata3() {
+  specdata1 = 0;
+  specdata2 = 0;
+}
+
+public Specdata3(int specdata1, int specdata2) {
+  this.specdata1 = specdata1;
+  this.specdata2 = specdata2;
+}
+
 public int getSpecdata1() {
   return specdata1;
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bad5f38d/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
--
diff --git 
a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
 
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
new file mode 100644
index 000..2e959f5
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/request/LINK3Request.java
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * License); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.nfs.nfs3.request;
+
+import java.io.IOException;
+
+import org.apache.hadoop.nfs.nfs3.FileHandle;
+import org.apache.hadoop.oncrpc.XDR;
+
+/**
+ * LINK3 Request
+ */
+public class LINK3Request extends RequestWithHandle {
+  private final FileHandle fromDirHandle;
+  private final String

[13/50] [abbrv] git commit: HDFS-6865. Byte array native checksumming on client side. Contributed by James Thomas.

2014-09-02 Thread todd
HDFS-6865. Byte array native checksumming on client side. Contributed by James 
Thomas.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ab638e77
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ab638e77
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ab638e77

Branch: refs/heads/MR-2841
Commit: ab638e77b811d9592470f7d342cd11a66efbbf0d
Parents: 48aa3b7
Author: Todd Lipcon t...@apache.org
Authored: Thu Aug 28 16:44:09 2014 -0700
Committer: Todd Lipcon t...@apache.org
Committed: Thu Aug 28 16:44:09 2014 -0700

--
 .../apache/hadoop/fs/ChecksumFileSystem.java|   8 +-
 .../java/org/apache/hadoop/fs/ChecksumFs.java   |   8 +-
 .../org/apache/hadoop/fs/FSOutputSummer.java| 107 ---
 .../org/apache/hadoop/util/DataChecksum.java|   2 +
 .../org/apache/hadoop/util/NativeCrc32.java |   2 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../org/apache/hadoop/hdfs/DFSOutputStream.java |  38 ++-
 .../org/apache/hadoop/hdfs/TestFileAppend.java  |   4 +-
 .../security/token/block/TestBlockToken.java|   2 +
 .../namenode/TestBlockUnderConstruction.java|   3 +
 .../namenode/TestDecommissioningStatus.java |   3 +
 11 files changed, 108 insertions(+), 72 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ab638e77/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
index 511ca7f..c8d1b69 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
@@ -381,7 +381,8 @@ public abstract class ChecksumFileSystem extends 
FilterFileSystem {
   long blockSize,
   Progressable progress)
   throws IOException {
-  super(DataChecksum.newCrc32(), fs.getBytesPerSum(), 4);
+  super(DataChecksum.newDataChecksum(DataChecksum.Type.CRC32,
+  fs.getBytesPerSum()));
   int bytesPerSum = fs.getBytesPerSum();
   this.datas = fs.getRawFileSystem().create(file, overwrite, bufferSize, 
  replication, blockSize, progress);
@@ -405,10 +406,11 @@ public abstract class ChecksumFileSystem extends 
FilterFileSystem {
 }
 
 @Override
-protected void writeChunk(byte[] b, int offset, int len, byte[] checksum)
+protected void writeChunk(byte[] b, int offset, int len, byte[] checksum,
+int ckoff, int cklen)
 throws IOException {
   datas.write(b, offset, len);
-  sums.write(checksum);
+  sums.write(checksum, ckoff, cklen);
 }
 
 @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ab638e77/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
index 4be3b29..ab5cd13 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
@@ -337,7 +337,8 @@ public abstract class ChecksumFs extends FilterFs {
   final short replication, final long blockSize, 
   final Progressable progress, final ChecksumOpt checksumOpt,
   final boolean createParent) throws IOException {
-  super(DataChecksum.newCrc32(), fs.getBytesPerSum(), 4);
+  super(DataChecksum.newDataChecksum(DataChecksum.Type.CRC32,
+  fs.getBytesPerSum()));
 
   // checksumOpt is passed down to the raw fs. Unless it implements
   // checksum impelemts internally, checksumOpt will be ignored.
@@ -370,10 +371,11 @@ public abstract class ChecksumFs extends FilterFs {
 }
 
 @Override
-protected void writeChunk(byte[] b, int offset, int len, byte[] checksum)
+protected void writeChunk(byte[] b, int offset, int len, byte[] checksum,
+int ckoff, int cklen)
   throws IOException {
   datas.write(b, offset, len);
-  sums.write(checksum);
+  sums.write(checksum, ckoff, cklen);
 }
 
 @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ab638e77/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java

  1   2   3   4   5   6   >