git commit: HDFS Credential Provider related Unit Test Failure. Contributed by Xiaoyu Yao.

2014-09-07 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/trunk d1fa58292 -> a23144fd8


HDFS Credential Provider related Unit Test Failure. Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a23144fd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a23144fd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a23144fd

Branch: refs/heads/trunk
Commit: a23144fd8a1e399e431f2f272388ec109df37ab1
Parents: d1fa582
Author: cnauroth 
Authored: Sun Sep 7 08:39:20 2014 -0700
Committer: cnauroth 
Committed: Sun Sep 7 08:39:20 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../java/org/apache/hadoop/cli/TestCryptoAdminCLI.java  |  4 +++-
 .../test/java/org/apache/hadoop/hdfs/TestDFSUtil.java   |  4 +++-
 .../org/apache/hadoop/hdfs/TestEncryptionZones.java | 12 +++-
 .../org/apache/hadoop/hdfs/TestReservedRawPaths.java|  3 ++-
 5 files changed, 18 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 3d43171..5a30d0a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -615,6 +615,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6898. DN must reserve space for a full block when an RBW block is
 created. (Arpit Agarwal)
 
+HDFS-7025. HDFS Credential Provider related Unit Test Failure.
+(Xiaoyu Yao via cnauroth)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
index 1c83829..adeabfe 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.HDFSPolicyProvider;
@@ -64,8 +65,9 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS {
 
 tmpDir = new File(System.getProperty("test.build.data", "target"),
 UUID.randomUUID().toString()).getAbsoluteFile();
+final Path jksPath = new Path(tmpDir.toString(), "test.jks");
 conf.set(KeyProviderFactory.KEY_PROVIDER_PATH,
-JavaKeyStoreProvider.SCHEME_NAME + "://file" + tmpDir + "/test.jks");
+JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
 
 dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
 dfsCluster.waitClusterUp();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
index 5ffd3b5..046265f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
@@ -809,8 +810,9 @@ public class TestDFSUtil {
 "target/test-dir"));
 
 Configuration conf = new Configuration();
+final Path jksPath = new Path(testDir.toString(), "test.jks");
 final String ourUrl =
-JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks";
+JavaKeyStoreProvider.

git commit: HDFS Credential Provider related Unit Test Failure. Contributed by Xiaoyu Yao.

2014-09-07 Thread cnauroth
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 eba06e61c -> b87d1d7d0


HDFS Credential Provider related Unit Test Failure. Contributed by Xiaoyu Yao.

(cherry picked from commit a23144fd8a1e399e431f2f272388ec109df37ab1)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b87d1d7d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b87d1d7d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b87d1d7d

Branch: refs/heads/branch-2
Commit: b87d1d7d0abd73da2a2bf548404a517a53ac9eb0
Parents: eba06e6
Author: cnauroth 
Authored: Sun Sep 7 08:39:20 2014 -0700
Committer: cnauroth 
Committed: Sun Sep 7 08:41:08 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../java/org/apache/hadoop/cli/TestCryptoAdminCLI.java  |  4 +++-
 .../test/java/org/apache/hadoop/hdfs/TestDFSUtil.java   |  4 +++-
 .../org/apache/hadoop/hdfs/TestEncryptionZones.java | 12 +++-
 .../org/apache/hadoop/hdfs/TestReservedRawPaths.java|  3 ++-
 5 files changed, 18 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b87d1d7d/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 0393164..d31cb26 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -354,6 +354,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6898. DN must reserve space for a full block when an RBW block is
 created. (Arpit Agarwal)
 
+HDFS-7025. HDFS Credential Provider related Unit Test Failure.
+(Xiaoyu Yao via cnauroth)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b87d1d7d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
index 1c83829..adeabfe 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.HDFSPolicyProvider;
@@ -64,8 +65,9 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS {
 
 tmpDir = new File(System.getProperty("test.build.data", "target"),
 UUID.randomUUID().toString()).getAbsoluteFile();
+final Path jksPath = new Path(tmpDir.toString(), "test.jks");
 conf.set(KeyProviderFactory.KEY_PROVIDER_PATH,
-JavaKeyStoreProvider.SCHEME_NAME + "://file" + tmpDir + "/test.jks");
+JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
 
 dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
 dfsCluster.waitClusterUp();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b87d1d7d/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
index 5ffd3b5..046265f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
@@ -809,8 +810,9 @@ public class TestDFSUtil {
 "target/test-dir"));
 
 Configuration conf = new Configuration();
+final Path jksPath = new Path(testDir.toString(), "test.jks");
 final String ourUrl =
-JavaKeyStoreProvider.

[2/4] git commit: HDFS-6898. DN must reserve space for a full block when an RBW block is created. (Contributed by Arpit Agarwal)

2014-09-07 Thread arp
HDFS-6898. DN must reserve space for a full block when an RBW block is created. 
(Contributed by Arpit Agarwal)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d1fa5829
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d1fa5829
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d1fa5829

Branch: refs/heads/HDFS-6581
Commit: d1fa58292e87bc29b4ef1278368c2be938a0afc4
Parents: cbea1b1
Author: arp 
Authored: Sat Sep 6 20:02:40 2014 -0700
Committer: arp 
Committed: Sat Sep 6 21:04:29 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../hadoop/hdfs/protocol/HdfsConstants.java |   2 +-
 .../server/datanode/ReplicaBeingWritten.java|  12 +-
 .../hdfs/server/datanode/ReplicaInPipeline.java |  33 ++-
 .../hdfs/server/datanode/ReplicaInfo.java   |   7 +
 .../server/datanode/fsdataset/FsVolumeSpi.java  |  11 +
 .../datanode/fsdataset/impl/BlockPoolSlice.java |   6 +-
 .../datanode/fsdataset/impl/FsDatasetImpl.java  |  15 +-
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |  58 +++-
 .../server/datanode/TestDirectoryScanner.java   |   8 +
 .../fsdataset/impl/TestRbwSpaceReservation.java | 288 +++
 .../fsdataset/impl/TestWriteToReplica.java  |   2 +-
 12 files changed, 423 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1fa5829/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 4412b30..3d43171 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -612,6 +612,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6862. Add missing timeout annotations to tests. (Xiaoyu Yao via
 Arpit Agarwal)
 
+HDFS-6898. DN must reserve space for a full block when an RBW block is
+created. (Arpit Agarwal)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1fa5829/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
index 77fe543..240dcd0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
@@ -48,7 +48,7 @@ public class HdfsConstants {
   "org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol";
   
   
-  public static final int MIN_BLOCKS_FOR_WRITE = 5;
+  public static final int MIN_BLOCKS_FOR_WRITE = 1;
 
   // Long that indicates "leave current quota unchanged"
   public static final long QUOTA_DONT_SET = Long.MAX_VALUE;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1fa5829/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
index 728dd38..4a89493 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
@@ -34,10 +34,12 @@ public class ReplicaBeingWritten extends ReplicaInPipeline {
* @param genStamp replica generation stamp
* @param vol volume where replica is located
* @param dir directory path where block and meta files are located
+   * @param bytesToReserve disk space to reserve for this replica, based on
+   *   the estimated maximum block length.
*/
   public ReplicaBeingWritten(long blockId, long genStamp, 
-FsVolumeSpi vol, File dir) {
-super( blockId, genStamp, vol, dir);
+FsVolumeSpi vol, File dir, long bytesToReserve) {
+super(blockId, genStamp, vol, dir, bytesToReserve);
   }
   
   /**
@@ -60,10 +62,12 @@ public class ReplicaBeingWritten extends ReplicaInPipeline {
* @param vol volume where replica is located
* @param dir directory path where block and meta files are located
* @param writer a thread that is writing to this replica
+   *

[4/4] git commit: Merge trunk into HDFS-6581

2014-09-07 Thread arp
Merge trunk into HDFS-6581

Conflicts:

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDirectoryScanner.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/eb8284d5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/eb8284d5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/eb8284d5

Branch: refs/heads/HDFS-6581
Commit: eb8284d50e1aa9f196556ed20b4b5e3f330e65fe
Parents: 31bbeaf a23144f
Author: arp 
Authored: Sun Sep 7 14:46:46 2014 -0700
Committer: arp 
Committed: Sun Sep 7 14:46:46 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   6 +
 .../hadoop/hdfs/protocol/HdfsConstants.java |   2 +-
 .../server/datanode/ReplicaBeingWritten.java|  12 +-
 .../hdfs/server/datanode/ReplicaInPipeline.java |  33 ++-
 .../hdfs/server/datanode/ReplicaInfo.java   |   7 +
 .../server/datanode/fsdataset/FsVolumeSpi.java  |  13 +-
 .../datanode/fsdataset/impl/BlockPoolSlice.java |   6 +-
 .../datanode/fsdataset/impl/FsDatasetImpl.java  |  15 +-
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |  58 +++-
 .../apache/hadoop/cli/TestCryptoAdminCLI.java   |   4 +-
 .../org/apache/hadoop/hdfs/TestDFSUtil.java |   4 +-
 .../apache/hadoop/hdfs/TestEncryptionZones.java |  12 +-
 .../hadoop/hdfs/TestReservedRawPaths.java   |   3 +-
 .../server/datanode/TestDirectoryScanner.java   |   8 +
 .../fsdataset/impl/TestRbwSpaceReservation.java | 288 +++
 .../fsdataset/impl/TestWriteToReplica.java  |   2 +-
 hadoop-yarn-project/CHANGES.txt |   3 +
 .../yarn/webapp/util/TestWebAppUtils.java   |   4 +-
 18 files changed, 448 insertions(+), 32 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/eb8284d5/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInPipeline.java
--

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eb8284d5/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaInfo.java
--

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eb8284d5/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java
--
diff --cc 
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java
index e141cd7,cba23c3..8ebf2b4
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/FsVolumeSpi.java
@@@ -46,6 -46,14 +46,17 @@@ public interface FsVolumeSpi 

public StorageType getStorageType();
  
 +  /** Returns true if the volume is NOT backed by persistent storage. */
 +  public boolean isTransientStorage();
- }
++
+   /**
+* Reserve disk space for an RBW block so a writer does not run out of
+* space before the block is full.
+*/
+   public void reserveSpaceForRbw(long bytesToReserve);
+ 
+   /**
+* Release disk space previously reserved for RBW block.
+*/
+   public void releaseReservedSpace(long bytesToRelease);
 -}
++}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eb8284d5/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
--

http://git-wip-us.apache.org/repos/asf/hadoop/blob/eb8284d5/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
--
diff --cc 
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
index 606b067,4511f21..b243f27
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetImpl.java
@@@ -920,30 -872,12 +920,30 @@@ class FsDatasetImpl implements FsDatase
" and thus cannot be created.");
  }
  // create a new block
 -FsVolumeImpl v = volumes.getNextVolume(storageType, b.getNumBytes());
 -// create a rbw file to hold block in the designated volume
 +FsVolumeImpl v;
 +while (true) {
 +  try {
 +if (allowLazyPersist) {
 + 

[3/4] git commit: HDFS Credential Provider related Unit Test Failure. Contributed by Xiaoyu Yao.

2014-09-07 Thread arp
HDFS Credential Provider related Unit Test Failure. Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a23144fd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a23144fd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a23144fd

Branch: refs/heads/HDFS-6581
Commit: a23144fd8a1e399e431f2f272388ec109df37ab1
Parents: d1fa582
Author: cnauroth 
Authored: Sun Sep 7 08:39:20 2014 -0700
Committer: cnauroth 
Committed: Sun Sep 7 08:39:20 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../java/org/apache/hadoop/cli/TestCryptoAdminCLI.java  |  4 +++-
 .../test/java/org/apache/hadoop/hdfs/TestDFSUtil.java   |  4 +++-
 .../org/apache/hadoop/hdfs/TestEncryptionZones.java | 12 +++-
 .../org/apache/hadoop/hdfs/TestReservedRawPaths.java|  3 ++-
 5 files changed, 18 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 3d43171..5a30d0a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -615,6 +615,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6898. DN must reserve space for a full block when an RBW block is
 created. (Arpit Agarwal)
 
+HDFS-7025. HDFS Credential Provider related Unit Test Failure.
+(Xiaoyu Yao via cnauroth)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
index 1c83829..adeabfe 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.HDFSPolicyProvider;
@@ -64,8 +65,9 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS {
 
 tmpDir = new File(System.getProperty("test.build.data", "target"),
 UUID.randomUUID().toString()).getAbsoluteFile();
+final Path jksPath = new Path(tmpDir.toString(), "test.jks");
 conf.set(KeyProviderFactory.KEY_PROVIDER_PATH,
-JavaKeyStoreProvider.SCHEME_NAME + "://file" + tmpDir + "/test.jks");
+JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
 
 dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
 dfsCluster.waitClusterUp();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
index 5ffd3b5..046265f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
@@ -809,8 +810,9 @@ public class TestDFSUtil {
 "target/test-dir"));
 
 Configuration conf = new Configuration();
+final Path jksPath = new Path(testDir.toString(), "test.jks");
 final String ourUrl =
-JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks";
+JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
 
 File file = new File(testDi

[1/4] git commit: YARN-2519. Credential Provider related unit tests failed on Windows. Contributed by Xiaoyu Yao.

2014-09-07 Thread arp
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-6581 31bbeaf38 -> eb8284d50


YARN-2519. Credential Provider related unit tests failed on Windows. 
Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cbea1b10
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cbea1b10
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cbea1b10

Branch: refs/heads/HDFS-6581
Commit: cbea1b10efd871d04c648af18449dc724685db74
Parents: 88209ce
Author: cnauroth 
Authored: Sat Sep 6 20:05:07 2014 -0700
Committer: cnauroth 
Committed: Sat Sep 6 20:05:07 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt  | 3 +++
 .../java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java | 4 +++-
 2 files changed, 6 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cbea1b10/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 34a206a..beafc22 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -287,6 +287,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2431. NM restart: cgroup is not removed for reacquired containers
 (jlowe)
 
+YARN-2519. Credential Provider related unit tests failed on Windows.
+(Xiaoyu Yao via cnauroth)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cbea1b10/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
index 18600fd..2bd91b4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
@@ -24,6 +24,7 @@ import static org.junit.Assert.assertEquals;
 import java.io.File;
 import java.io.IOException;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.http.HttpServer2.Builder;
 import org.apache.hadoop.security.alias.CredentialProvider;
@@ -74,8 +75,9 @@ public class TestWebAppUtils {
 "target/test-dir"));
 
 Configuration conf = new Configuration();
+final Path jksPath = new Path(testDir.toString(), "test.jks");
 final String ourUrl =
-JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks";
+JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
 
 File file = new File(testDir, "test.jks");
 file.delete();



git commit: YARN-2512. Allowed pattern matching for origins in CrossOriginFilter. Contributed by Jonathan Eagles.

2014-09-07 Thread zjshen
Repository: hadoop
Updated Branches:
  refs/heads/trunk a23144fd8 -> a092cdf32


YARN-2512. Allowed pattern matching for origins in CrossOriginFilter. 
Contributed by Jonathan Eagles.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a092cdf3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a092cdf3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a092cdf3

Branch: refs/heads/trunk
Commit: a092cdf32de4d752456286a9f4dda533d8a62bca
Parents: a23144f
Author: Zhijie Shen 
Authored: Sun Sep 7 17:49:06 2014 -0700
Committer: Zhijie Shen 
Committed: Sun Sep 7 17:49:06 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 +++
 .../timeline/webapp/CrossOriginFilter.java  | 20 ++-
 .../timeline/webapp/TestCrossOriginFilter.java  | 21 +++-
 3 files changed, 42 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a092cdf3/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index beafc22..ed31479 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -184,6 +184,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2508. Cross Origin configuration parameters prefix are not honored
 (Mit Desai via jeagles)
 
+YARN-2512. Allowed pattern matching for origins in CrossOriginFilter.
+(Jonathan Eagles via zjshen)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a092cdf3/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
index d71175f..5a0703d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
@@ -24,6 +24,8 @@ import java.net.URLEncoder;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -204,7 +206,23 @@ public class CrossOriginFilter implements Filter {
 
   @VisibleForTesting
   boolean isOriginAllowed(String origin) {
-return allowAllOrigins || allowedOrigins.contains(origin);
+if (allowAllOrigins) {
+  return true;
+}
+
+for (String allowedOrigin : allowedOrigins) {
+  if (allowedOrigin.contains("*")) {
+String regex = allowedOrigin.replace(".", "\\.").replace("*", ".*");
+Pattern p = Pattern.compile(regex);
+Matcher m = p.matcher(origin);
+if (m.matches()) {
+  return true;
+}
+  } else if (allowedOrigin.equals(origin)) {
+return true;
+  }
+}
+return false;
   }
 
   private boolean areHeadersAllowed(String accessControlRequestHeaders) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a092cdf3/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
index f666c21..ccc9bbf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
@@ -77,7 +77,26 @@ public class TestCrossOriginFilter {
   

git commit: YARN-2512. Allowed pattern matching for origins in CrossOriginFilter. Contributed by Jonathan Eagles.

2014-09-07 Thread zjshen
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 b87d1d7d0 -> f2a5a5d4a


YARN-2512. Allowed pattern matching for origins in CrossOriginFilter. 
Contributed by Jonathan Eagles.

(cherry picked from commit a092cdf32de4d752456286a9f4dda533d8a62bca)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f2a5a5d4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f2a5a5d4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f2a5a5d4

Branch: refs/heads/branch-2
Commit: f2a5a5d4aada5b0a3660e5db281f505e2cd3193a
Parents: b87d1d7
Author: Zhijie Shen 
Authored: Sun Sep 7 17:49:06 2014 -0700
Committer: Zhijie Shen 
Committed: Sun Sep 7 17:52:38 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 +++
 .../timeline/webapp/CrossOriginFilter.java  | 20 ++-
 .../timeline/webapp/TestCrossOriginFilter.java  | 21 +++-
 3 files changed, 42 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f2a5a5d4/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 83274b6..a149028 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -161,6 +161,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2508. Cross Origin configuration parameters prefix are not honored
 (Mit Desai via jeagles)
 
+YARN-2512. Allowed pattern matching for origins in CrossOriginFilter.
+(Jonathan Eagles via zjshen)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f2a5a5d4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
index d71175f..5a0703d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
@@ -24,6 +24,8 @@ import java.net.URLEncoder;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -204,7 +206,23 @@ public class CrossOriginFilter implements Filter {
 
   @VisibleForTesting
   boolean isOriginAllowed(String origin) {
-return allowAllOrigins || allowedOrigins.contains(origin);
+if (allowAllOrigins) {
+  return true;
+}
+
+for (String allowedOrigin : allowedOrigins) {
+  if (allowedOrigin.contains("*")) {
+String regex = allowedOrigin.replace(".", "\\.").replace("*", ".*");
+Pattern p = Pattern.compile(regex);
+Matcher m = p.matcher(origin);
+if (m.matches()) {
+  return true;
+}
+  } else if (allowedOrigin.equals(origin)) {
+return true;
+  }
+}
+return false;
   }
 
   private boolean areHeadersAllowed(String accessControlRequestHeaders) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f2a5a5d4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
index f666c21..ccc9bbf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossO

git commit: YARN-2507. Documented CrossOriginFilter configurations for the timeline server. Contributed by Jonathan Eagles.

2014-09-07 Thread zjshen
Repository: hadoop
Updated Branches:
  refs/heads/trunk a092cdf32 -> 56dc496a1


YARN-2507. Documented CrossOriginFilter configurations for the timeline server. 
Contributed by Jonathan Eagles.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/56dc496a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/56dc496a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/56dc496a

Branch: refs/heads/trunk
Commit: 56dc496a1031621d2b701801de4ec29179d75f2e
Parents: a092cdf
Author: Zhijie Shen 
Authored: Sun Sep 7 18:22:40 2014 -0700
Committer: Zhijie Shen 
Committed: Sun Sep 7 18:22:40 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../src/site/apt/TimelineServer.apt.vm  | 37 
 2 files changed, 40 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/56dc496a/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ed31479..ed9de87 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -187,6 +187,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2512. Allowed pattern matching for origins in CrossOriginFilter.
 (Jonathan Eagles via zjshen)
 
+YARN-2507. Documented CrossOriginFilter configurations for the timeline
+server. (Jonathan Eagles via zjshen)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/56dc496a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
index c704d37..92c7377 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
@@ -102,6 +102,43 @@ YARN Timeline Server
   yarn.timeline-service.handler-thread-count
   10
 
+
+
+  Enables cross-origin support (CORS) for web services where
+  cross-origin web response headers are needed. For example, javascript making
+  a web services request to the timeline server.
+  yarn.timeline-service.http-cross-origin.enabled
+  false
+
+
+
+  Comma separated list of origins that are allowed for web
+  services needing cross-origin (CORS) support. Wildcards (*) and patterns
+  allowed
+  yarn.timeline-service.http-cross-origin.allowed-origins
+  *
+
+
+
+  Comma separated list of methods that are allowed for web
+  services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.allowed-methods
+  GET,POST,HEAD
+
+
+
+  Comma separated list of headers that are allowed for web
+  services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.allowed-headers
+  X-Requested-With,Content-Type,Accept,Origin
+
+
+
+  The number of seconds a pre-flighted request can be cached
+  for web services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.max-age
+  1800
+
 +---+
 
 * Generic-data related Configuration



git commit: YARN-2507. Documented CrossOriginFilter configurations for the timeline server. Contributed by Jonathan Eagles.

2014-09-07 Thread zjshen
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 f2a5a5d4a -> 0974400b7


YARN-2507. Documented CrossOriginFilter configurations for the timeline server. 
Contributed by Jonathan Eagles.

(cherry picked from commit 56dc496a1031621d2b701801de4ec29179d75f2e)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0974400b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0974400b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0974400b

Branch: refs/heads/branch-2
Commit: 0974400b7401cebef0e9212d7400d868271876f9
Parents: f2a5a5d
Author: Zhijie Shen 
Authored: Sun Sep 7 18:22:40 2014 -0700
Committer: Zhijie Shen 
Committed: Sun Sep 7 18:25:13 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../src/site/apt/TimelineServer.apt.vm  | 37 
 2 files changed, 40 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0974400b/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index a149028..8b362a5 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -164,6 +164,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2512. Allowed pattern matching for origins in CrossOriginFilter.
 (Jonathan Eagles via zjshen)
 
+YARN-2507. Documented CrossOriginFilter configurations for the timeline
+server. (Jonathan Eagles via zjshen)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0974400b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
index c704d37..92c7377 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
@@ -102,6 +102,43 @@ YARN Timeline Server
   yarn.timeline-service.handler-thread-count
   10
 
+
+
+  Enables cross-origin support (CORS) for web services where
+  cross-origin web response headers are needed. For example, javascript making
+  a web services request to the timeline server.
+  yarn.timeline-service.http-cross-origin.enabled
+  false
+
+
+
+  Comma separated list of origins that are allowed for web
+  services needing cross-origin (CORS) support. Wildcards (*) and patterns
+  allowed
+  yarn.timeline-service.http-cross-origin.allowed-origins
+  *
+
+
+
+  Comma separated list of methods that are allowed for web
+  services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.allowed-methods
+  GET,POST,HEAD
+
+
+
+  Comma separated list of headers that are allowed for web
+  services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.allowed-headers
+  X-Requested-With,Content-Type,Accept,Origin
+
+
+
+  The number of seconds a pre-flighted request can be cached
+  for web services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.max-age
+  1800
+
 +---+
 
 * Generic-data related Configuration



git commit: YARN-2515. Updated ConverterUtils#toContainerId to parse epoch. Contributed by Tsuyoshi OZAWA

2014-09-07 Thread jianhe
Repository: hadoop
Updated Branches:
  refs/heads/trunk 56dc496a1 -> 0974f434c


YARN-2515. Updated ConverterUtils#toContainerId to parse epoch. Contributed by 
Tsuyoshi OZAWA


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0974f434
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0974f434
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0974f434

Branch: refs/heads/trunk
Commit: 0974f434c47ffbf4b77a8478937fd99106c8ddbd
Parents: 56dc496
Author: Jian He 
Authored: Sun Sep 7 18:25:44 2014 -0700
Committer: Jian He 
Committed: Sun Sep 7 18:29:22 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../hadoop/yarn/api/records/ContainerId.java| 39 +++-
 .../apache/hadoop/yarn/util/ConverterUtils.java | 15 +---
 .../apache/hadoop/yarn/api/TestContainerId.java |  4 ++
 .../hadoop/yarn/util/TestConverterUtils.java|  9 +
 5 files changed, 55 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0974f434/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ed9de87..d54fcd6 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -190,6 +190,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2507. Documented CrossOriginFilter configurations for the timeline
 server. (Jonathan Eagles via zjshen)
 
+YARN-2515. Updated ConverterUtils#toContainerId to parse epoch.
+(Tsuyoshi OZAWA via jianhe)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0974f434/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
index fc7f404..321052b 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.yarn.api.records;
 
-import java.text.NumberFormat;
+import com.google.common.base.Splitter;
 
+import java.text.NumberFormat;
+import java.util.Iterator;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
@@ -33,6 +35,8 @@ import org.apache.hadoop.yarn.util.Records;
 @Public
 @Stable
 public abstract class ContainerId implements Comparable{
+  private static final Splitter _SPLITTER = Splitter.on('_').trimResults();
+  private static final String CONTAINER_PREFIX = "container";
 
   @Private
   @Unstable
@@ -163,5 +167,38 @@ public abstract class ContainerId implements 
Comparable{
 return sb.toString();
   }
 
+  @Public
+  @Unstable
+  public static ContainerId fromString(String containerIdStr) {
+Iterator it = _SPLITTER.split(containerIdStr).iterator();
+if (!it.next().equals(CONTAINER_PREFIX)) {
+  throw new IllegalArgumentException("Invalid ContainerId prefix: "
+  + containerIdStr);
+}
+try {
+  ApplicationAttemptId appAttemptID = toApplicationAttemptId(it);
+  int id = Integer.parseInt(it.next());
+  int epoch = 0;
+  if (it.hasNext()) {
+epoch = Integer.parseInt(it.next());
+  }
+  int cid = (epoch << 22) | id;
+  ContainerId containerId = ContainerId.newInstance(appAttemptID, cid);
+  return containerId;
+} catch (NumberFormatException n) {
+  throw new IllegalArgumentException("Invalid ContainerId: "
+  + containerIdStr, n);
+}
+  }
+
+  private static ApplicationAttemptId toApplicationAttemptId(
+  Iterator it) throws NumberFormatException {
+ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()),
+Integer.parseInt(it.next()));
+ApplicationAttemptId appAttemptId =
+ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next()));
+return appAttemptId;
+  }
+
   protected abstract void build();
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0974f434/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hado

git commit: YARN-2515. Updated ConverterUtils#toContainerId to parse epoch. Contributed by Tsuyoshi OZAWA (cherry picked from commit 0974f434c47ffbf4b77a8478937fd99106c8ddbd)

2014-09-07 Thread jianhe
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 0974400b7 -> c17810a84


YARN-2515. Updated ConverterUtils#toContainerId to parse epoch. Contributed by 
Tsuyoshi OZAWA
(cherry picked from commit 0974f434c47ffbf4b77a8478937fd99106c8ddbd)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c17810a8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c17810a8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c17810a8

Branch: refs/heads/branch-2
Commit: c17810a84c98c808403fe4caf4532efa1e781221
Parents: 0974400
Author: Jian He 
Authored: Sun Sep 7 18:25:44 2014 -0700
Committer: Jian He 
Committed: Sun Sep 7 18:31:07 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../hadoop/yarn/api/records/ContainerId.java| 39 +++-
 .../apache/hadoop/yarn/util/ConverterUtils.java | 15 +---
 .../apache/hadoop/yarn/api/TestContainerId.java |  4 ++
 .../hadoop/yarn/util/TestConverterUtils.java|  9 +
 5 files changed, 55 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c17810a8/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 8b362a5..9124678 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -167,6 +167,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2507. Documented CrossOriginFilter configurations for the timeline
 server. (Jonathan Eagles via zjshen)
 
+YARN-2515. Updated ConverterUtils#toContainerId to parse epoch.
+(Tsuyoshi OZAWA via jianhe)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c17810a8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
index fc7f404..321052b 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.yarn.api.records;
 
-import java.text.NumberFormat;
+import com.google.common.base.Splitter;
 
+import java.text.NumberFormat;
+import java.util.Iterator;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
@@ -33,6 +35,8 @@ import org.apache.hadoop.yarn.util.Records;
 @Public
 @Stable
 public abstract class ContainerId implements Comparable{
+  private static final Splitter _SPLITTER = Splitter.on('_').trimResults();
+  private static final String CONTAINER_PREFIX = "container";
 
   @Private
   @Unstable
@@ -163,5 +167,38 @@ public abstract class ContainerId implements 
Comparable{
 return sb.toString();
   }
 
+  @Public
+  @Unstable
+  public static ContainerId fromString(String containerIdStr) {
+Iterator it = _SPLITTER.split(containerIdStr).iterator();
+if (!it.next().equals(CONTAINER_PREFIX)) {
+  throw new IllegalArgumentException("Invalid ContainerId prefix: "
+  + containerIdStr);
+}
+try {
+  ApplicationAttemptId appAttemptID = toApplicationAttemptId(it);
+  int id = Integer.parseInt(it.next());
+  int epoch = 0;
+  if (it.hasNext()) {
+epoch = Integer.parseInt(it.next());
+  }
+  int cid = (epoch << 22) | id;
+  ContainerId containerId = ContainerId.newInstance(appAttemptID, cid);
+  return containerId;
+} catch (NumberFormatException n) {
+  throw new IllegalArgumentException("Invalid ContainerId: "
+  + containerIdStr, n);
+}
+  }
+
+  private static ApplicationAttemptId toApplicationAttemptId(
+  Iterator it) throws NumberFormatException {
+ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()),
+Integer.parseInt(it.next()));
+ApplicationAttemptId appAttemptId =
+ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next()));
+return appAttemptId;
+  }
+
   protected abstract void build();
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/c17810a8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
--
diff --git 
a/hadoo

[03/19] git commit: HDFS-6862. Add missing timeout annotations to tests. (Contributed by Xiaoyu Yao)

2014-09-07 Thread szetszwo
HDFS-6862. Add missing timeout annotations to tests. (Contributed by Xiaoyu Yao)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9609b730
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9609b730
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9609b730

Branch: refs/heads/HDFS-6584
Commit: 9609b7303a98c8eff676c5a086b08b1ca9ab777c
Parents: b051327
Author: arp 
Authored: Fri Sep 5 11:08:03 2014 -0700
Committer: arp 
Committed: Fri Sep 5 11:10:58 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +
 .../apache/hadoop/hdfs/TestHDFSServerPorts.java | 24 
 .../TestValidateConfigurationSettings.java  | 16 ++---
 .../namenode/ha/TestDelegationTokensWithHA.java | 63 
 .../hdfs/server/namenode/ha/TestHAMetrics.java  | 10 ++--
 .../namenode/ha/TestHAStateTransitions.java | 52 +++-
 .../namenode/ha/TestStandbyCheckpoints.java | 54 +++--
 7 files changed, 97 insertions(+), 125 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9609b730/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 7b8917b..0772ea6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -607,6 +607,9 @@ Release 2.6.0 - UNRELEASED
 
 HDFS-6979. hdfs.dll does not produce .pdb files. (cnauroth)
 
+HDFS-6862. Add missing timeout annotations to tests. (Xiaoyu Yao via
+Arpit Agarwal)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9609b730/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSServerPorts.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSServerPorts.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSServerPorts.java
index 59d1615..ce8a4e7 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSServerPorts.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSServerPorts.java
@@ -17,14 +17,6 @@
  */
 package org.apache.hadoop.hdfs;
 
-import static org.apache.hadoop.hdfs.server.common.Util.fileAsURI;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.UnknownHostException;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
@@ -39,6 +31,14 @@ import org.apache.hadoop.net.DNS;
 import org.apache.hadoop.test.PathUtils;
 import org.junit.Test;
 
+import java.io.File;
+import java.io.IOException;
+import java.net.UnknownHostException;
+
+import static org.apache.hadoop.hdfs.server.common.Util.fileAsURI;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
 /**
  * This test checks correctness of port usage by hdfs components:
  * NameNode, DataNode, SecondaryNamenode and BackupNode.
@@ -245,7 +245,7 @@ public class TestHDFSServerPorts {
 return true;
   }
 
-  @Test
+  @Test(timeout = 30)
   public void testNameNodePorts() throws Exception {
 runTestNameNodePorts(false);
 runTestNameNodePorts(true);
@@ -296,7 +296,7 @@ public class TestHDFSServerPorts {
   /**
* Verify datanode port usage.
*/
-  @Test
+  @Test(timeout = 30)
   public void testDataNodePorts() throws Exception {
 NameNode nn = null;
 try {
@@ -332,7 +332,7 @@ public class TestHDFSServerPorts {
   /**
* Verify secondary namenode port usage.
*/
-  @Test
+  @Test(timeout = 30)
   public void testSecondaryNodePorts() throws Exception {
 NameNode nn = null;
 try {
@@ -361,7 +361,7 @@ public class TestHDFSServerPorts {
 /**
  * Verify BackupNode port usage.
  */
-  @Test
+@Test(timeout = 30)
 public void testBackupNodePorts() throws Exception {
   NameNode nn = null;
   try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9609b730/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestValidateConfigurationSettings.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs

[18/19] git commit: YARN-2515. Updated ConverterUtils#toContainerId to parse epoch. Contributed by Tsuyoshi OZAWA

2014-09-07 Thread szetszwo
YARN-2515. Updated ConverterUtils#toContainerId to parse epoch. Contributed by 
Tsuyoshi OZAWA


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0974f434
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0974f434
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0974f434

Branch: refs/heads/HDFS-6584
Commit: 0974f434c47ffbf4b77a8478937fd99106c8ddbd
Parents: 56dc496
Author: Jian He 
Authored: Sun Sep 7 18:25:44 2014 -0700
Committer: Jian He 
Committed: Sun Sep 7 18:29:22 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../hadoop/yarn/api/records/ContainerId.java| 39 +++-
 .../apache/hadoop/yarn/util/ConverterUtils.java | 15 +---
 .../apache/hadoop/yarn/api/TestContainerId.java |  4 ++
 .../hadoop/yarn/util/TestConverterUtils.java|  9 +
 5 files changed, 55 insertions(+), 15 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0974f434/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ed9de87..d54fcd6 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -190,6 +190,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2507. Documented CrossOriginFilter configurations for the timeline
 server. (Jonathan Eagles via zjshen)
 
+YARN-2515. Updated ConverterUtils#toContainerId to parse epoch.
+(Tsuyoshi OZAWA via jianhe)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0974f434/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
index fc7f404..321052b 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ContainerId.java
@@ -18,8 +18,10 @@
 
 package org.apache.hadoop.yarn.api.records;
 
-import java.text.NumberFormat;
+import com.google.common.base.Splitter;
 
+import java.text.NumberFormat;
+import java.util.Iterator;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Stable;
@@ -33,6 +35,8 @@ import org.apache.hadoop.yarn.util.Records;
 @Public
 @Stable
 public abstract class ContainerId implements Comparable{
+  private static final Splitter _SPLITTER = Splitter.on('_').trimResults();
+  private static final String CONTAINER_PREFIX = "container";
 
   @Private
   @Unstable
@@ -163,5 +167,38 @@ public abstract class ContainerId implements 
Comparable{
 return sb.toString();
   }
 
+  @Public
+  @Unstable
+  public static ContainerId fromString(String containerIdStr) {
+Iterator it = _SPLITTER.split(containerIdStr).iterator();
+if (!it.next().equals(CONTAINER_PREFIX)) {
+  throw new IllegalArgumentException("Invalid ContainerId prefix: "
+  + containerIdStr);
+}
+try {
+  ApplicationAttemptId appAttemptID = toApplicationAttemptId(it);
+  int id = Integer.parseInt(it.next());
+  int epoch = 0;
+  if (it.hasNext()) {
+epoch = Integer.parseInt(it.next());
+  }
+  int cid = (epoch << 22) | id;
+  ContainerId containerId = ContainerId.newInstance(appAttemptID, cid);
+  return containerId;
+} catch (NumberFormatException n) {
+  throw new IllegalArgumentException("Invalid ContainerId: "
+  + containerIdStr, n);
+}
+  }
+
+  private static ApplicationAttemptId toApplicationAttemptId(
+  Iterator it) throws NumberFormatException {
+ApplicationId appId = ApplicationId.newInstance(Long.parseLong(it.next()),
+Integer.parseInt(it.next()));
+ApplicationAttemptId appAttemptId =
+ApplicationAttemptId.newInstance(appId, Integer.parseInt(it.next()));
+return appAttemptId;
+  }
+
   protected abstract void build();
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0974f434/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-y

[02/19] git commit: HDFS-6979. Fix minor error in CHANGES.txt. Contributed by Chris Nauroth.

2014-09-07 Thread szetszwo
HDFS-6979. Fix minor error in CHANGES.txt. Contributed by Chris Nauroth.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b051327a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b051327a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b051327a

Branch: refs/heads/HDFS-6584
Commit: b051327ab6a01774e1dad59e1e547dd16f603789
Parents: fab9bc5
Author: cnauroth 
Authored: Fri Sep 5 11:07:41 2014 -0700
Committer: cnauroth 
Committed: Fri Sep 5 11:07:41 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b051327a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 680af55..7b8917b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -605,7 +605,7 @@ Release 2.6.0 - UNRELEASED
 HDFS-6831. Inconsistency between 'hdfs dfsadmin' and 'hdfs dfsadmin -help'.
 (Xiaoyu Yao via Arpit Agarwal)
 
-HDFS-6979. hdfs.dll not produce .pdb files. (cnauroth)
+HDFS-6979. hdfs.dll does not produce .pdb files. (cnauroth)
 
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   



[06/19] git commit: HADOOP-11065. Rat check should exclude **/build/**. (kasha)

2014-09-07 Thread szetszwo
HADOOP-11065. Rat check should exclude **/build/**. (kasha)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0571b456
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0571b456
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0571b456

Branch: refs/heads/HDFS-6584
Commit: 0571b4561bad7e0230920e52d3758a3658fcf20d
Parents: 7a62515
Author: Karthik Kambatla 
Authored: Fri Sep 5 13:08:59 2014 -0700
Committer: Karthik Kambatla 
Committed: Fri Sep 5 13:09:10 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 2 ++
 pom.xml | 1 +
 2 files changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0571b456/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index afd1cc7..d20bf08 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -777,6 +777,8 @@ Release 2.5.1 - UNRELEASED
 
 HADOOP-11001. Fix test-patch to work with the git repo. (kasha)
 
+HADOOP-11065. Rat check should exclude "**/build/**". (kasha)
+
 Release 2.5.0 - 2014-08-11
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0571b456/pom.xml
--
diff --git a/pom.xml b/pom.xml
index a4f8241..5cc30c2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -324,6 +324,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xs
 .gitignore
 .git/**
 .idea/**
+   **/build/**
  

   



[11/19] git commit: HDFS-6986. DistributedFileSystem must get delegation tokens from configured KeyProvider. (zhz via tucu)

2014-09-07 Thread szetszwo
HDFS-6986. DistributedFileSystem must get delegation tokens from configured 
KeyProvider. (zhz via tucu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3b35f816
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3b35f816
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3b35f816

Branch: refs/heads/HDFS-6584
Commit: 3b35f81603bbfae119762b50bcb46de70a421368
Parents: 0f3c19c
Author: Alejandro Abdelnur 
Authored: Fri Sep 5 22:33:48 2014 -0700
Committer: Alejandro Abdelnur 
Committed: Fri Sep 5 22:33:48 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 ++
 .../java/org/apache/hadoop/hdfs/DFSClient.java  |  4 ++
 .../hadoop/hdfs/DistributedFileSystem.java  | 24 +++
 .../apache/hadoop/hdfs/TestEncryptionZones.java | 43 
 4 files changed, 74 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3b35f816/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 0772ea6..333bdce 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -711,6 +711,9 @@ Release 2.6.0 - UNRELEASED
   HDFS-6714. TestBlocksScheduledCounter#testBlocksScheduledCounter should
   shutdown cluster (vinayakumarb)
 
+  HDFS-6986. DistributedFileSystem must get delegation tokens from 
configured 
+  KeyProvider. (zhz via tucu)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3b35f816/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
index 8daf912..e4215f0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
@@ -3084,4 +3084,8 @@ public class DFSClient implements java.io.Closeable, 
RemotePeerFactory,
   DFSHedgedReadMetrics getHedgedReadMetrics() {
 return HEDGED_READ_METRIC;
   }
+
+  public KeyProviderCryptoExtension getKeyProvider() {
+return provider;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3b35f816/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
index bf7d62e..dbdf5c1 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
@@ -84,8 +84,10 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.crypto.key.KeyProviderDelegationTokenExtension;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
@@ -1946,6 +1948,28 @@ public class DistributedFileSystem extends FileSystem {
 }.resolve(this, absF);
   }
 
+  @Override
+  public Token[] addDelegationTokens(
+  final String renewer, Credentials credentials) throws IOException {
+Token[] tokens = super.addDelegationTokens(renewer, credentials);
+if (dfs.getKeyProvider() != null) {
+  KeyProviderDelegationTokenExtension keyProviderDelegationTokenExtension =
+  KeyProviderDelegationTokenExtension.
+  createKeyProviderDelegationTokenExtension(dfs.getKeyProvider());
+  Token[] kpTokens = keyProviderDelegationTokenExtension.
+  addDelegationTokens(renewer, credentials);
+  if (tokens != null && kpTokens != null) {
+Token[] all = new Token[tokens.length + kpTokens.length];
+System.arraycopy(tokens, 0, all, 0, tokens.length);
+System.arraycopy(kpTokens, 0, all, tokens.length, kpTokens.length);
+tokens = all;
+  } else {
+tokens = (tokens != null) ? tokens : kpTokens;
+  }
+}
+return tokens;
+  }
+
   public DFSInotifyEventInputStream getInotifyEve

[13/19] git commit: YARN-2519. Credential Provider related unit tests failed on Windows. Contributed by Xiaoyu Yao.

2014-09-07 Thread szetszwo
YARN-2519. Credential Provider related unit tests failed on Windows. 
Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cbea1b10
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cbea1b10
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cbea1b10

Branch: refs/heads/HDFS-6584
Commit: cbea1b10efd871d04c648af18449dc724685db74
Parents: 88209ce
Author: cnauroth 
Authored: Sat Sep 6 20:05:07 2014 -0700
Committer: cnauroth 
Committed: Sat Sep 6 20:05:07 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt  | 3 +++
 .../java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java | 4 +++-
 2 files changed, 6 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cbea1b10/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 34a206a..beafc22 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -287,6 +287,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2431. NM restart: cgroup is not removed for reacquired containers
 (jlowe)
 
+YARN-2519. Credential Provider related unit tests failed on Windows.
+(Xiaoyu Yao via cnauroth)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cbea1b10/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
index 18600fd..2bd91b4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/util/TestWebAppUtils.java
@@ -24,6 +24,7 @@ import static org.junit.Assert.assertEquals;
 import java.io.File;
 import java.io.IOException;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.http.HttpServer2.Builder;
 import org.apache.hadoop.security.alias.CredentialProvider;
@@ -74,8 +75,9 @@ public class TestWebAppUtils {
 "target/test-dir"));
 
 Configuration conf = new Configuration();
+final Path jksPath = new Path(testDir.toString(), "test.jks");
 final String ourUrl =
-JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks";
+JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
 
 File file = new File(testDir, "test.jks");
 file.delete();



[19/19] git commit: Merge branch 'trunk' into HDFS-6584

2014-09-07 Thread szetszwo
Merge branch 'trunk' into HDFS-6584

Conflicts:
hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt

hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f1432e24
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f1432e24
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f1432e24

Branch: refs/heads/HDFS-6584
Commit: f1432e24244034f2009fc91e4bae390c6fcc78b0
Parents: 22a41dc 0974f43
Author: Tsz-Wo Nicholas Sze 
Authored: Mon Sep 8 10:54:48 2014 +0800
Committer: Tsz-Wo Nicholas Sze 
Committed: Mon Sep 8 10:54:48 2014 +0800

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   8 +
 .../crypto/key/kms/KMSClientProvider.java   |   6 +-
 hadoop-common-project/hadoop-kms/pom.xml|   4 +-
 .../hadoop/crypto/key/kms/server/MiniKMS.java   | 197 +
 .../hadoop/crypto/key/kms/server/TestKMS.java   |  88 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  12 +-
 .../java/org/apache/hadoop/hdfs/DFSClient.java  |   4 +
 .../hadoop/hdfs/DistributedFileSystem.java  |  24 ++
 .../hadoop/hdfs/protocol/HdfsConstants.java |   2 +-
 .../server/blockmanagement/BlockManager.java|  21 +-
 .../server/blockmanagement/DatanodeManager.java |   6 +-
 .../server/blockmanagement/HostFileManager.java |   4 +
 .../server/datanode/ReplicaBeingWritten.java|  12 +-
 .../hdfs/server/datanode/ReplicaInPipeline.java |  33 ++-
 .../hdfs/server/datanode/ReplicaInfo.java   |   7 +
 .../server/datanode/fsdataset/FsVolumeSpi.java  |  11 +
 .../datanode/fsdataset/impl/BlockPoolSlice.java |   6 +-
 .../datanode/fsdataset/impl/FsDatasetImpl.java  |  15 +-
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |  58 +++-
 .../hdfs/server/namenode/FSNamesystem.java  |  46 +--
 .../apache/hadoop/cli/TestCryptoAdminCLI.java   |   4 +-
 .../org/apache/hadoop/hdfs/TestDFSUtil.java |   4 +-
 .../apache/hadoop/hdfs/TestEncryptionZones.java |  55 +++-
 .../hadoop/hdfs/TestReservedRawPaths.java   |   3 +-
 .../server/datanode/TestDirectoryScanner.java   |   8 +
 .../fsdataset/impl/TestRbwSpaceReservation.java | 288 +++
 .../fsdataset/impl/TestWriteToReplica.java  |   2 +-
 .../hdfs/server/namenode/NameNodeAdapter.java   |   2 +-
 hadoop-yarn-project/CHANGES.txt |  15 +
 .../hadoop/yarn/api/records/ContainerId.java|  39 ++-
 .../apache/hadoop/yarn/util/ConverterUtils.java |  15 +-
 .../apache/hadoop/yarn/api/TestContainerId.java |   4 +
 .../hadoop/yarn/util/TestConverterUtils.java|   9 +
 .../yarn/webapp/util/TestWebAppUtils.java   |   4 +-
 .../timeline/webapp/CrossOriginFilter.java  |  20 +-
 .../webapp/CrossOriginFilterInitializer.java|  12 +-
 .../timeline/webapp/TestCrossOriginFilter.java  |  21 +-
 .../TestCrossOriginFilterInitializer.java   |   7 +-
 .../src/site/apt/TimelineServer.apt.vm  |  37 +++
 39 files changed, 952 insertions(+), 161 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f1432e24/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f1432e24/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java
--

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f1432e24/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java
--

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f1432e24/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
--

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f1432e24/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
--
diff --cc 
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index 956900d,6176188..9b030e7
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@@ -398,10 -396,22 +399,26 @@@ public class BlockManager 
  }
}
  
 +  public BlockStoragePolicy getStoragePolicy(final String policyName) {
 +return storagePolicySuite.getPolicy(policyName);
 +  }
 +
+   public long getReplicationRecheckInterval() {
+ return replicationRecheckInterv

[08/19] git commit: YARN-2508. Cross Origin configuration parameters prefix are not honored (Mit Desai via jeagles)

2014-09-07 Thread szetszwo
YARN-2508. Cross Origin configuration parameters prefix are not honored (Mit 
Desai via jeagles)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e6420fec
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e6420fec
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e6420fec

Branch: refs/heads/HDFS-6584
Commit: e6420fec0af9b8d4f424098688ae4926ff527fcf
Parents: 21c0cde
Author: Jonathan Eagles 
Authored: Fri Sep 5 19:42:40 2014 -0500
Committer: Jonathan Eagles 
Committed: Fri Sep 5 19:42:40 2014 -0500

--
 hadoop-yarn-project/CHANGES.txt |  3 +++
 .../timeline/webapp/CrossOriginFilterInitializer.java   | 12 +++-
 .../webapp/TestCrossOriginFilterInitializer.java|  7 ++-
 3 files changed, 16 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e6420fec/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index 1a5ea07..34a206a 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -181,6 +181,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2511. Allowed all origins by default when CrossOriginFilter is
 enabled. (Jonathan Eagles via zjshen)
 
+YARN-2508. Cross Origin configuration parameters prefix are not honored
+(Mit Desai via jeagles)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e6420fec/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilterInitializer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilterInitializer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilterInitializer.java
index 69e0188..148cc63 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilterInitializer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilterInitializer.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.yarn.server.timeline.webapp;
 
+import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
@@ -37,6 +38,15 @@ public class CrossOriginFilterInitializer extends 
FilterInitializer {
   }
 
   static Map getFilterParameters(Configuration conf) {
-return conf.getValByRegex(PREFIX);
+Map filterParams =
+new HashMap();
+for (Map.Entry entry : conf.getValByRegex(PREFIX)
+.entrySet()) {
+  String name = entry.getKey();
+  String value = entry.getValue();
+  name = name.substring(PREFIX.length());
+  filterParams.put(name, value);
+}
+return filterParams;
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e6420fec/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilterInitializer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilterInitializer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilterInitializer.java
index 3199aac..cf26368 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilterInitializer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilterInitializer.java
@@ -42,11 +42,8 @@ public class TestCrossOriginFilterInitializer {
 CrossOriginFilterInitializer.getFilterParameters(conf);
 
 // retrieve values
-String rootvalue =
-filterParameters.get(CrossOriginFilterInitializer.PREFIX + 
"rootparam");
-String nestedvalue =
-filterParameters.get(CrossOriginFilterIni

[15/19] git commit: HDFS Credential Provider related Unit Test Failure. Contributed by Xiaoyu Yao.

2014-09-07 Thread szetszwo
HDFS Credential Provider related Unit Test Failure. Contributed by Xiaoyu Yao.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a23144fd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a23144fd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a23144fd

Branch: refs/heads/HDFS-6584
Commit: a23144fd8a1e399e431f2f272388ec109df37ab1
Parents: d1fa582
Author: cnauroth 
Authored: Sun Sep 7 08:39:20 2014 -0700
Committer: cnauroth 
Committed: Sun Sep 7 08:39:20 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  3 +++
 .../java/org/apache/hadoop/cli/TestCryptoAdminCLI.java  |  4 +++-
 .../test/java/org/apache/hadoop/hdfs/TestDFSUtil.java   |  4 +++-
 .../org/apache/hadoop/hdfs/TestEncryptionZones.java | 12 +++-
 .../org/apache/hadoop/hdfs/TestReservedRawPaths.java|  3 ++-
 5 files changed, 18 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 3d43171..5a30d0a 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -615,6 +615,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6898. DN must reserve space for a full block when an RBW block is
 created. (Arpit Agarwal)
 
+HDFS-7025. HDFS Credential Provider related Unit Test Failure.
+(Xiaoyu Yao via cnauroth)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
index 1c83829..adeabfe 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.crypto.key.JavaKeyStoreProvider;
 import org.apache.hadoop.crypto.key.KeyProvider;
 import org.apache.hadoop.crypto.key.KeyProviderFactory;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.HDFSPolicyProvider;
@@ -64,8 +65,9 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS {
 
 tmpDir = new File(System.getProperty("test.build.data", "target"),
 UUID.randomUUID().toString()).getAbsoluteFile();
+final Path jksPath = new Path(tmpDir.toString(), "test.jks");
 conf.set(KeyProviderFactory.KEY_PROVIDER_PATH,
-JavaKeyStoreProvider.SCHEME_NAME + "://file" + tmpDir + "/test.jks");
+JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());
 
 dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
 dfsCluster.waitClusterUp();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a23144fd/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
index 5ffd3b5..046265f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
@@ -59,6 +59,7 @@ import org.apache.hadoop.HadoopIllegalArgumentException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
@@ -809,8 +810,9 @@ public class TestDFSUtil {
 "target/test-dir"));
 
 Configuration conf = new Configuration();
+final Path jksPath = new Path(testDir.toString(), "test.jks");
 final String ourUrl =
-JavaKeyStoreProvider.SCHEME_NAME + "://file/" + testDir + "/test.jks";
+JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri();
 
 File file = new File(testDi

[04/19] git commit: HDFS-6998. warning message 'ssl.client.truststore.location has not been set' gets printed for hftp command. (Contributed by Xiaoyu Yao)

2014-09-07 Thread szetszwo
HDFS-6998. warning message 'ssl.client.truststore.location has not been set' 
gets printed for hftp command. (Contributed by Xiaoyu Yao)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/71269f70
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/71269f70
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/71269f70

Branch: refs/heads/HDFS-6584
Commit: 71269f70971dc7aa7bcb5e78b19cb3f04fdaa2f4
Parents: 9609b73
Author: arp 
Authored: Fri Sep 5 11:14:10 2014 -0700
Committer: arp 
Committed: Fri Sep 5 11:14:10 2014 -0700

--
 .../org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java | 2 +-
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt   | 3 +++
 2 files changed, 4 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/71269f70/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
index aabb815..4b81e17 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/FileBasedKeyStoresFactory.java
@@ -212,7 +212,7 @@ public class FileBasedKeyStoresFactory implements 
KeyStoresFactory {
   LOG.debug(mode.toString() + " Loaded TrustStore: " + truststoreLocation);
   trustManagers = new TrustManager[]{trustManager};
 } else {
-  LOG.warn("The property '" + locationProperty + "' has not been set, " +
+  LOG.debug("The property '" + locationProperty + "' has not been set, " +
   "no TrustStore will be loaded");
   trustManagers = null;
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/71269f70/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 0772ea6..5c4aeea 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -610,6 +610,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6862. Add missing timeout annotations to tests. (Xiaoyu Yao via
 Arpit Agarwal)
 
+HDFS-6998. warning message 'ssl.client.truststore.location has not been
+set' gets printed for hftp command. (Xiaoyu Yao via Arpit Agarwal)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an



[05/19] git commit: HADOOP-11052. hadoop_verify_secure_prereq's results aren't checked in bin/hdfs (aw)

2014-09-07 Thread szetszwo
HADOOP-11052. hadoop_verify_secure_prereq's results aren't checked in bin/hdfs 
(aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7a62515c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7a62515c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7a62515c

Branch: refs/heads/HDFS-6584
Commit: 7a62515c8628430a163415e42c9526a123db213c
Parents: 71269f7
Author: Allen Wittenauer 
Authored: Fri Sep 5 11:31:49 2014 -0700
Committer: Allen Wittenauer 
Committed: Fri Sep 5 11:31:49 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt  |  3 +++
 .../hadoop-common/src/main/bin/hadoop-functions.sh   |  4 ++--
 hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs| 15 +++
 3 files changed, 12 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a62515c/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index d38fae9..afd1cc7 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -330,6 +330,9 @@ Trunk (Unreleased)
 
 HADOOP-11033. shell scripts ignore JAVA_HOME on OS X. (aw)
 
+HADOOP-11052. hadoop_verify_secure_prereq's results aren't checked 
+in bin/hdfs (aw)
+
   OPTIMIZATIONS
 
 HADOOP-7761. Improve the performance of raw comparisons. (todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a62515c/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index d430188..1677cc0 100644
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -644,9 +644,9 @@ function hadoop_verify_secure_prereq
   # this.
   
   # ${EUID} comes from the shell itself!
-  if [[ "${EUID}" -ne 0 ]] || [[ -n "${HADOOP_SECURE_COMMAND}" ]]; then
+  if [[ "${EUID}" -ne 0 ]] && [[ -z "${HADOOP_SECURE_COMMAND}" ]]; then
 hadoop_error "ERROR: You must be a privileged in order to run a secure 
serice."
-return 1
+exit 1
   else
 return 0
   fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a62515c/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index 6872a0e..2300dbf 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -225,14 +225,13 @@ esac
 
 if [[ -n "${secure_service}" ]]; then
   HADOOP_SECURE_USER="${secure_user}"
-  if hadoop_verify_secure_prereq; then
-hadoop_setup_secure_service
-
priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${COMMAND-$HOSTNAME}.out"
-
priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${COMMAND-$HOSTNAME}.err"
-
priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${COMMAND-$HOSTNAME}.pid"
-
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${COMMAND}-${HOSTNAME}.out"
-
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${COMMAND}.pid"
-  fi
+  hadoop_verify_secure_prereq
+  hadoop_setup_secure_service
+  
priv_outfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${COMMAND-$HOSTNAME}.out"
+  
priv_errfile="${HADOOP_LOG_DIR}/privileged-${HADOOP_IDENT_STRING}-${COMMAND-$HOSTNAME}.err"
+  
priv_pidfile="${HADOOP_PID_DIR}/privileged-${HADOOP_IDENT_STRING}-${COMMAND-$HOSTNAME}.pid"
+  
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${COMMAND}-${HOSTNAME}.out"
+  
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_SECURE_USER}-${HADOOP_IDENT_STRING}-${COMMAND}.pid"
 else
   
daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${COMMAND}-${HOSTNAME}.out"
   
daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${COMMAND}.pid"



[10/19] git commit: HADOOP-11070. Create MiniKMS for testing. (tucu)

2014-09-07 Thread szetszwo
HADOOP-11070. Create MiniKMS for testing. (tucu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/71c8d735
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/71c8d735
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/71c8d735

Branch: refs/heads/HDFS-6584
Commit: 71c8d735f5038e3b516947f12180d7568b6979dc
Parents: e6420fe
Author: Alejandro Abdelnur 
Authored: Fri Sep 5 14:09:22 2014 -0700
Committer: Alejandro Abdelnur 
Committed: Fri Sep 5 21:59:12 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt |   2 +
 hadoop-common-project/hadoop-kms/pom.xml|   4 +-
 .../hadoop/crypto/key/kms/server/MiniKMS.java   | 197 +++
 .../hadoop/crypto/key/kms/server/TestKMS.java   |  82 +---
 4 files changed, 211 insertions(+), 74 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/71c8d735/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 88804cd..9aef131 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -507,6 +507,8 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11060. Create a CryptoCodec test that verifies interoperability 
 between the JCE and OpenSSL implementations. (hitliuyi via tucu)
 
+HADOOP-11070. Create MiniKMS for testing. (tucu)
+
   OPTIMIZATIONS
 
 HADOOP-10838. Byte array native checksumming. (James Thomas via todd)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/71c8d735/hadoop-common-project/hadoop-kms/pom.xml
--
diff --git a/hadoop-common-project/hadoop-kms/pom.xml 
b/hadoop-common-project/hadoop-kms/pom.xml
index 3bb97c5..629ffda 100644
--- a/hadoop-common-project/hadoop-kms/pom.xml
+++ b/hadoop-common-project/hadoop-kms/pom.xml
@@ -222,9 +222,9 @@
 
 
   
-
+
 
-
+
   
 
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/71c8d735/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
new file mode 100644
index 000..5a6d4c5
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/MiniKMS.java
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.crypto.key.kms.server;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.mortbay.jetty.Connector;
+import org.mortbay.jetty.Server;
+import org.mortbay.jetty.security.SslSocketConnector;
+import org.mortbay.jetty.webapp.WebAppContext;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.Writer;
+import java.net.InetAddress;
+import java.net.MalformedURLException;
+import java.net.ServerSocket;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+public class MiniKMS {
+
+  private static Server createJettyServer(String keyStore, String password) {
+try {
+  boolean ssl = keyStore != null;
+  InetAddress localhost = InetAddress.getByName("localhost");
+  String host = "localhost";
+  ServerSocket ss = new ServerSocket(0, 50, localhost);
+  int port = ss.getLocalPort();
+  ss.close();
+  Server server = new Server(0);
+  if (!ssl) {
+server.getConnectors()[0].setHost(host);
+server.getConnectors()[0].setPort(port);
+  } else {
+SslSocketConnector c = new SslSocketConnector(

[12/19] git commit: HDFS-6940. Refactoring to allow ConsensusNode implementation. Contributed by Konstantin Shvachko.

2014-09-07 Thread szetszwo
HDFS-6940. Refactoring to allow ConsensusNode implementation.
Contributed by Konstantin Shvachko.

Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/88209ce1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/88209ce1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/88209ce1

Branch: refs/heads/HDFS-6584
Commit: 88209ce181b5ecc55c0ae2bceff4893ab4817e88
Parents: 3b35f81
Author: Konstantin V Shvachko 
Authored: Sat Sep 6 12:07:52 2014 -0700
Committer: Konstantin V Shvachko 
Committed: Sat Sep 6 12:07:52 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |  2 +
 .../server/blockmanagement/BlockManager.java| 23 --
 .../server/blockmanagement/DatanodeManager.java |  6 ++-
 .../server/blockmanagement/HostFileManager.java |  4 ++
 .../hdfs/server/namenode/FSNamesystem.java  | 46 +++-
 .../hdfs/server/namenode/NameNodeAdapter.java   |  2 +-
 6 files changed, 57 insertions(+), 26 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/88209ce1/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 333bdce..4412b30 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -444,6 +444,8 @@ Release 2.6.0 - UNRELEASED
 HDFS-6376. Distcp data between two HA clusters requires another 
configuration.
 (Dave Marion and Haohui Mai via jing9)
 
+HDFS-6940. Refactoring to allow ConsensusNode implementation. (shv)
+
   OPTIMIZATIONS
 
 HDFS-6690. Deduplicate xattr names in memory. (wang)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88209ce1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
index 8470680..6176188 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java
@@ -164,7 +164,7 @@ public class BlockManager {
   final BlocksMap blocksMap;
 
   /** Replication thread. */
-  final Daemon replicationThread = new Daemon(new ReplicationMonitor());
+  Daemon replicationThread;
   
   /** Store blocks -> datanodedescriptor(s) map of corrupt replicas */
   final CorruptReplicasMap corruptReplicas = new CorruptReplicasMap();
@@ -263,6 +263,7 @@ public class BlockManager {
 this.namesystem = namesystem;
 datanodeManager = new DatanodeManager(this, namesystem, conf);
 heartbeatManager = datanodeManager.getHeartbeatManager();
+setReplicationMonitor(new ReplicationMonitor());
 
 final long pendingPeriod = conf.getLong(
 DFSConfigKeys.DFS_NAMENODE_STARTUP_DELAY_BLOCK_DELETION_SEC_KEY,
@@ -394,7 +395,23 @@ public class BlockManager {
   lifetimeMin*60*1000L, 0, null, encryptionAlgorithm);
 }
   }
-  
+
+  public long getReplicationRecheckInterval() {
+return replicationRecheckInterval;
+  }
+
+  public AtomicLong excessBlocksCount() {
+return excessBlocksCount;
+  }
+
+  public void clearInvalidateBlocks() {
+invalidateBlocks.clear();
+  }
+
+  void setReplicationMonitor(Runnable replicationMonitor) {
+replicationThread = new Daemon(replicationMonitor);
+  }
+
   public void setBlockPoolId(String blockPoolId) {
 if (isBlockTokenEnabled()) {
   blockTokenSecretManager.setBlockPoolId(blockPoolId);
@@ -1616,7 +1633,7 @@ public class BlockManager {
* If there were any replication requests that timed out, reap them
* and put them back into the neededReplication queue
*/
-  private void processPendingReplications() {
+  void processPendingReplications() {
 Block[] timedOutItems = pendingReplications.getTimedOutBlocks();
 if (timedOutItems != null) {
   namesystem.writeLock();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/88209ce1/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/DatanodeManager.java
index 709f060..55d616f 100644
--- 
a/hadoop-

[09/19] git commit: HADOOP-11069. KMSClientProvider should use getAuthenticationMethod() to determine if in proxyuser mode or not. (tucu)

2014-09-07 Thread szetszwo
HADOOP-11069. KMSClientProvider should use getAuthenticationMethod() to 
determine if in proxyuser mode or not. (tucu)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0f3c19c1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0f3c19c1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0f3c19c1

Branch: refs/heads/HDFS-6584
Commit: 0f3c19c1bb9e341d8aed132ba3eb9e7fc7588306
Parents: 71c8d73
Author: Alejandro Abdelnur 
Authored: Fri Sep 5 10:04:07 2014 -0700
Committer: Alejandro Abdelnur 
Committed: Fri Sep 5 21:59:12 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt| 3 +++
 .../org/apache/hadoop/crypto/key/kms/KMSClientProvider.java| 6 +++---
 .../java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java  | 6 +++---
 3 files changed, 9 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f3c19c1/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index 9aef131..c77fddc 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -765,6 +765,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11067. warning message 'ssl.client.truststore.location has not
 been set' gets printed for hftp command. (Xiaoyu Yao via Arpit Agarwal)
 
+HADOOP-11069. KMSClientProvider should use getAuthenticationMethod() to
+determine if in proxyuser mode or not. (tucu)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f3c19c1/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
index a4e336c..acbe096 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
@@ -385,9 +385,9 @@ public class KMSClientProvider extends KeyProvider 
implements CryptoExtension,
   // if current UGI is different from UGI at constructor time, behave as
   // proxyuser
   UserGroupInformation currentUgi = UserGroupInformation.getCurrentUser();
-  final String doAsUser =
-  (loginUgi.getShortUserName().equals(currentUgi.getShortUserName()))
-  ? null : currentUgi.getShortUserName();
+  final String doAsUser = (currentUgi.getAuthenticationMethod() ==
+  UserGroupInformation.AuthenticationMethod.PROXY)
+  ? currentUgi.getShortUserName() : null;
 
   // creating the HTTP connection using the current UGI at constructor time
   conn = loginUgi.doAs(new PrivilegedExceptionAction() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0f3c19c1/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
--
diff --git 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
index f381fa0..b921c84 100644
--- 
a/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
+++ 
b/hadoop-common-project/hadoop-kms/src/test/java/org/apache/hadoop/crypto/key/kms/server/TestKMS.java
@@ -1157,7 +1157,7 @@ public class TestKMS {
 final URI uri = createKMSUri(getKMSUrl());
 
 // proxyuser client using kerberos credentials
-UserGroupInformation clientUgi = UserGroupInformation.
+final UserGroupInformation clientUgi = UserGroupInformation.
 loginUserFromKeytabAndReturnUGI("client", 
keytab.getAbsolutePath());
 clientUgi.doAs(new PrivilegedExceptionAction() {
   @Override
@@ -1167,7 +1167,7 @@ public class TestKMS {
 
 // authorized proxyuser
 UserGroupInformation fooUgi =
-UserGroupInformation.createRemoteUser("foo");
+UserGroupInformation.createProxyUser("foo", clientUgi);
 fooUgi.doAs(new PrivilegedExceptionAction() {
   @Override
   public Void run() throws Exception {
@@ -1179,7 +1179,7 @@ public class TestKMS {
 
 // unauthorized proxyuser

[16/19] git commit: YARN-2512. Allowed pattern matching for origins in CrossOriginFilter. Contributed by Jonathan Eagles.

2014-09-07 Thread szetszwo
YARN-2512. Allowed pattern matching for origins in CrossOriginFilter. 
Contributed by Jonathan Eagles.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a092cdf3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a092cdf3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a092cdf3

Branch: refs/heads/HDFS-6584
Commit: a092cdf32de4d752456286a9f4dda533d8a62bca
Parents: a23144f
Author: Zhijie Shen 
Authored: Sun Sep 7 17:49:06 2014 -0700
Committer: Zhijie Shen 
Committed: Sun Sep 7 17:49:06 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 +++
 .../timeline/webapp/CrossOriginFilter.java  | 20 ++-
 .../timeline/webapp/TestCrossOriginFilter.java  | 21 +++-
 3 files changed, 42 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a092cdf3/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index beafc22..ed31479 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -184,6 +184,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2508. Cross Origin configuration parameters prefix are not honored
 (Mit Desai via jeagles)
 
+YARN-2512. Allowed pattern matching for origins in CrossOriginFilter.
+(Jonathan Eagles via zjshen)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a092cdf3/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
index d71175f..5a0703d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/timeline/webapp/CrossOriginFilter.java
@@ -24,6 +24,8 @@ import java.net.URLEncoder;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import javax.servlet.Filter;
 import javax.servlet.FilterChain;
@@ -204,7 +206,23 @@ public class CrossOriginFilter implements Filter {
 
   @VisibleForTesting
   boolean isOriginAllowed(String origin) {
-return allowAllOrigins || allowedOrigins.contains(origin);
+if (allowAllOrigins) {
+  return true;
+}
+
+for (String allowedOrigin : allowedOrigins) {
+  if (allowedOrigin.contains("*")) {
+String regex = allowedOrigin.replace(".", "\\.").replace("*", ".*");
+Pattern p = Pattern.compile(regex);
+Matcher m = p.matcher(origin);
+if (m.matches()) {
+  return true;
+}
+  } else if (allowedOrigin.equals(origin)) {
+return true;
+  }
+}
+return false;
   }
 
   private boolean areHeadersAllowed(String accessControlRequestHeaders) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a092cdf3/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
index f666c21..ccc9bbf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/test/java/org/apache/hadoop/yarn/server/timeline/webapp/TestCrossOriginFilter.java
@@ -77,7 +77,26 @@ public class TestCrossOriginFilter {
 // Object under test
 CrossOriginFilter filter = new CrossOriginFilter(

[07/19] git commit: HADOOP-11067 [HDFS-6998]. Fix CHANGES.txt

2014-09-07 Thread szetszwo
HADOOP-11067 [HDFS-6998]. Fix CHANGES.txt


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/21c0cdee
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/21c0cdee
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/21c0cdee

Branch: refs/heads/HDFS-6584
Commit: 21c0cdeec1034b18ad3a2d5b71941a84bcea5ebe
Parents: 0571b45
Author: arp 
Authored: Fri Sep 5 11:18:20 2014 -0700
Committer: arp 
Committed: Fri Sep 5 14:29:57 2014 -0700

--
 hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ---
 2 files changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/21c0cdee/hadoop-common-project/hadoop-common/CHANGES.txt
--
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt 
b/hadoop-common-project/hadoop-common/CHANGES.txt
index d20bf08..88804cd 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -760,6 +760,9 @@ Release 2.6.0 - UNRELEASED
 HADOOP-11063. KMS cannot deploy on Windows, because class names are too 
long.
 (cnauroth)
 
+HADOOP-11067. warning message 'ssl.client.truststore.location has not
+been set' gets printed for hftp command. (Xiaoyu Yao via Arpit Agarwal)
+
 Release 2.5.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/21c0cdee/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 5c4aeea..0772ea6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -610,9 +610,6 @@ Release 2.6.0 - UNRELEASED
 HDFS-6862. Add missing timeout annotations to tests. (Xiaoyu Yao via
 Arpit Agarwal)
 
-HDFS-6998. warning message 'ssl.client.truststore.location has not been
-set' gets printed for hftp command. (Xiaoyu Yao via Arpit Agarwal)
-
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an



[17/19] git commit: YARN-2507. Documented CrossOriginFilter configurations for the timeline server. Contributed by Jonathan Eagles.

2014-09-07 Thread szetszwo
YARN-2507. Documented CrossOriginFilter configurations for the timeline server. 
Contributed by Jonathan Eagles.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/56dc496a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/56dc496a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/56dc496a

Branch: refs/heads/HDFS-6584
Commit: 56dc496a1031621d2b701801de4ec29179d75f2e
Parents: a092cdf
Author: Zhijie Shen 
Authored: Sun Sep 7 18:22:40 2014 -0700
Committer: Zhijie Shen 
Committed: Sun Sep 7 18:22:40 2014 -0700

--
 hadoop-yarn-project/CHANGES.txt |  3 ++
 .../src/site/apt/TimelineServer.apt.vm  | 37 
 2 files changed, 40 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/56dc496a/hadoop-yarn-project/CHANGES.txt
--
diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt
index ed31479..ed9de87 100644
--- a/hadoop-yarn-project/CHANGES.txt
+++ b/hadoop-yarn-project/CHANGES.txt
@@ -187,6 +187,9 @@ Release 2.6.0 - UNRELEASED
 YARN-2512. Allowed pattern matching for origins in CrossOriginFilter.
 (Jonathan Eagles via zjshen)
 
+YARN-2507. Documented CrossOriginFilter configurations for the timeline
+server. (Jonathan Eagles via zjshen)
+
   OPTIMIZATIONS
 
   BUG FIXES

http://git-wip-us.apache.org/repos/asf/hadoop/blob/56dc496a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
index c704d37..92c7377 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/TimelineServer.apt.vm
@@ -102,6 +102,43 @@ YARN Timeline Server
   yarn.timeline-service.handler-thread-count
   10
 
+
+
+  Enables cross-origin support (CORS) for web services where
+  cross-origin web response headers are needed. For example, javascript making
+  a web services request to the timeline server.
+  yarn.timeline-service.http-cross-origin.enabled
+  false
+
+
+
+  Comma separated list of origins that are allowed for web
+  services needing cross-origin (CORS) support. Wildcards (*) and patterns
+  allowed
+  yarn.timeline-service.http-cross-origin.allowed-origins
+  *
+
+
+
+  Comma separated list of methods that are allowed for web
+  services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.allowed-methods
+  GET,POST,HEAD
+
+
+
+  Comma separated list of headers that are allowed for web
+  services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.allowed-headers
+  X-Requested-With,Content-Type,Accept,Origin
+
+
+
+  The number of seconds a pre-flighted request can be cached
+  for web services needing cross-origin (CORS) support.
+  yarn.timeline-service.http-cross-origin.max-age
+  1800
+
 +---+
 
 * Generic-data related Configuration



[14/19] git commit: HDFS-6898. DN must reserve space for a full block when an RBW block is created. (Contributed by Arpit Agarwal)

2014-09-07 Thread szetszwo
HDFS-6898. DN must reserve space for a full block when an RBW block is created. 
(Contributed by Arpit Agarwal)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d1fa5829
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d1fa5829
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d1fa5829

Branch: refs/heads/HDFS-6584
Commit: d1fa58292e87bc29b4ef1278368c2be938a0afc4
Parents: cbea1b1
Author: arp 
Authored: Sat Sep 6 20:02:40 2014 -0700
Committer: arp 
Committed: Sat Sep 6 21:04:29 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt |   3 +
 .../hadoop/hdfs/protocol/HdfsConstants.java |   2 +-
 .../server/datanode/ReplicaBeingWritten.java|  12 +-
 .../hdfs/server/datanode/ReplicaInPipeline.java |  33 ++-
 .../hdfs/server/datanode/ReplicaInfo.java   |   7 +
 .../server/datanode/fsdataset/FsVolumeSpi.java  |  11 +
 .../datanode/fsdataset/impl/BlockPoolSlice.java |   6 +-
 .../datanode/fsdataset/impl/FsDatasetImpl.java  |  15 +-
 .../datanode/fsdataset/impl/FsVolumeImpl.java   |  58 +++-
 .../server/datanode/TestDirectoryScanner.java   |   8 +
 .../fsdataset/impl/TestRbwSpaceReservation.java | 288 +++
 .../fsdataset/impl/TestWriteToReplica.java  |   2 +-
 12 files changed, 423 insertions(+), 22 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1fa5829/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 4412b30..3d43171 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -612,6 +612,9 @@ Release 2.6.0 - UNRELEASED
 HDFS-6862. Add missing timeout annotations to tests. (Xiaoyu Yao via
 Arpit Agarwal)
 
+HDFS-6898. DN must reserve space for a full block when an RBW block is
+created. (Arpit Agarwal)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1fa5829/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
index 77fe543..240dcd0 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsConstants.java
@@ -48,7 +48,7 @@ public class HdfsConstants {
   "org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol";
   
   
-  public static final int MIN_BLOCKS_FOR_WRITE = 5;
+  public static final int MIN_BLOCKS_FOR_WRITE = 1;
 
   // Long that indicates "leave current quota unchanged"
   public static final long QUOTA_DONT_SET = Long.MAX_VALUE;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d1fa5829/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
index 728dd38..4a89493 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/ReplicaBeingWritten.java
@@ -34,10 +34,12 @@ public class ReplicaBeingWritten extends ReplicaInPipeline {
* @param genStamp replica generation stamp
* @param vol volume where replica is located
* @param dir directory path where block and meta files are located
+   * @param bytesToReserve disk space to reserve for this replica, based on
+   *   the estimated maximum block length.
*/
   public ReplicaBeingWritten(long blockId, long genStamp, 
-FsVolumeSpi vol, File dir) {
-super( blockId, genStamp, vol, dir);
+FsVolumeSpi vol, File dir, long bytesToReserve) {
+super(blockId, genStamp, vol, dir, bytesToReserve);
   }
   
   /**
@@ -60,10 +62,12 @@ public class ReplicaBeingWritten extends ReplicaInPipeline {
* @param vol volume where replica is located
* @param dir directory path where block and meta files are located
* @param writer a thread that is writing to this replica
+   *

[01/19] git commit: HDFS-6979. hdfs.dll not produce .pdb files. Contributed by Chris Nauroth.

2014-09-07 Thread szetszwo
Repository: hadoop
Updated Branches:
  refs/heads/HDFS-6584 22a41dce4 -> f1432e242


HDFS-6979. hdfs.dll not produce .pdb files. Contributed by Chris Nauroth.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/fab9bc58
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/fab9bc58
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/fab9bc58

Branch: refs/heads/HDFS-6584
Commit: fab9bc58ec03ea81cd5ce8a8746a4ee588f7bb08
Parents: 9e941d9
Author: cnauroth 
Authored: Fri Sep 5 11:03:58 2014 -0700
Committer: cnauroth 
Committed: Fri Sep 5 11:03:58 2014 -0700

--
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 ++
 hadoop-hdfs-project/hadoop-hdfs/pom.xml | 6 +++---
 2 files changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/fab9bc58/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 5a087d2..680af55 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -605,6 +605,8 @@ Release 2.6.0 - UNRELEASED
 HDFS-6831. Inconsistency between 'hdfs dfsadmin' and 'hdfs dfsadmin -help'.
 (Xiaoyu Yao via Arpit Agarwal)
 
+HDFS-6979. hdfs.dll not produce .pdb files. (cnauroth)
+
 BREAKDOWN OF HDFS-6134 AND HADOOP-10150 SUBTASKS AND RELATED JIRAS
   
   HDFS-6387. HDFS CLI admin tool for creating & deleting an

http://git-wip-us.apache.org/repos/asf/hadoop/blob/fab9bc58/hadoop-hdfs-project/hadoop-hdfs/pom.xml
--
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml 
b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index 2c4ddf6..ecdd1ae 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -415,11 +415,11 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd";>
 
 
-  
+  
 
 
 
-  
+  
 
   
 
@@ -437,7 +437,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd";>
   
   
 
-
+