Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestAclWithSnapshot.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestAclWithSnapshot.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestAclWithSnapshot.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestAclWithSnapshot.java Thu May 15 13:03:22 2014 @@ -119,14 +119,14 @@ public class TestAclWithSnapshot { assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0750, path); + assertPermission((short)010750, path); s = hdfs.getAclStatus(snapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0750, snapshotPath); + assertPermission((short)010750, snapshotPath); assertDirPermissionGranted(fsAsBruce, BRUCE, snapshotPath); assertDirPermissionDenied(fsAsDiana, DIANA, snapshotPath); @@ -153,14 +153,14 @@ public class TestAclWithSnapshot { assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "diana", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0550, path); + assertPermission((short)010550, path); s = hdfs.getAclStatus(snapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0750, snapshotPath); + assertPermission((short)010750, snapshotPath); assertDirPermissionDenied(fsAsBruce, BRUCE, path); assertDirPermissionGranted(fsAsDiana, DIANA, path); @@ -202,24 +202,24 @@ public class TestAclWithSnapshot { AclStatus s = hdfs.getAclStatus(filePath); AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, filePath); + assertPermission((short)010550, filePath); s = hdfs.getAclStatus(subdirPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, subdirPath); + assertPermission((short)010550, subdirPath); s = hdfs.getAclStatus(fileSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, fileSnapshotPath); + assertPermission((short)010550, fileSnapshotPath); assertFilePermissionGranted(fsAsBruce, BRUCE, fileSnapshotPath); assertFilePermissionDenied(fsAsDiana, DIANA, fileSnapshotPath); s = hdfs.getAclStatus(subdirSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, subdirSnapshotPath); + assertPermission((short)010550, subdirSnapshotPath); assertDirPermissionGranted(fsAsBruce, BRUCE, subdirSnapshotPath); assertDirPermissionDenied(fsAsDiana, DIANA, subdirSnapshotPath); @@ -251,14 +251,14 @@ public class TestAclWithSnapshot { AclStatus s = hdfs.getAclStatus(filePath); AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0570, filePath); + assertPermission((short)010570, filePath); assertFilePermissionDenied(fsAsBruce, BRUCE, filePath); assertFilePermissionGranted(fsAsDiana, DIANA, filePath); s = hdfs.getAclStatus(subdirPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0570, subdirPath); + assertPermission((short)010570, subdirPath); assertDirPermissionDenied(fsAsBruce, BRUCE, subdirPath); assertDirPermissionGranted(fsAsDiana, DIANA, subdirPath); @@ -268,14 +268,14 @@ public class TestAclWithSnapshot { s = hdfs.getAclStatus(fileSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, fileSnapshotPath); + assertPermission((short)010550, fileSnapshotPath); assertFilePermissionGranted(fsAsBruce, BRUCE, fileSnapshotPath); assertFilePermissionDenied(fsAsDiana, DIANA, fileSnapshotPath); s = hdfs.getAclStatus(subdirSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, subdirSnapshotPath); + assertPermission((short)010550, subdirSnapshotPath); assertDirPermissionGranted(fsAsBruce, BRUCE, subdirSnapshotPath); assertDirPermissionDenied(fsAsDiana, DIANA, subdirSnapshotPath); } @@ -302,14 +302,14 @@ public class TestAclWithSnapshot { assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0750, path); + assertPermission((short)010750, path); s = hdfs.getAclStatus(snapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0750, snapshotPath); + assertPermission((short)010750, snapshotPath); assertDirPermissionGranted(fsAsBruce, BRUCE, snapshotPath); assertDirPermissionDenied(fsAsDiana, DIANA, snapshotPath); @@ -336,7 +336,7 @@ public class TestAclWithSnapshot { assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0750, snapshotPath); + assertPermission((short)010750, snapshotPath); assertDirPermissionDenied(fsAsBruce, BRUCE, path); assertDirPermissionDenied(fsAsDiana, DIANA, path); @@ -378,24 +378,24 @@ public class TestAclWithSnapshot { AclStatus s = hdfs.getAclStatus(filePath); AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, filePath); + assertPermission((short)010550, filePath); s = hdfs.getAclStatus(subdirPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, subdirPath); + assertPermission((short)010550, subdirPath); s = hdfs.getAclStatus(fileSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, fileSnapshotPath); + assertPermission((short)010550, fileSnapshotPath); assertFilePermissionGranted(fsAsBruce, BRUCE, fileSnapshotPath); assertFilePermissionDenied(fsAsDiana, DIANA, fileSnapshotPath); s = hdfs.getAclStatus(subdirSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, subdirSnapshotPath); + assertPermission((short)010550, subdirSnapshotPath); assertDirPermissionGranted(fsAsBruce, BRUCE, subdirSnapshotPath); assertDirPermissionDenied(fsAsDiana, DIANA, subdirSnapshotPath); @@ -437,14 +437,14 @@ public class TestAclWithSnapshot { s = hdfs.getAclStatus(fileSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, fileSnapshotPath); + assertPermission((short)010550, fileSnapshotPath); assertFilePermissionGranted(fsAsBruce, BRUCE, fileSnapshotPath); assertFilePermissionDenied(fsAsDiana, DIANA, fileSnapshotPath); s = hdfs.getAclStatus(subdirSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0550, subdirSnapshotPath); + assertPermission((short)010550, subdirSnapshotPath); assertDirPermissionGranted(fsAsBruce, BRUCE, subdirSnapshotPath); assertDirPermissionDenied(fsAsDiana, DIANA, subdirSnapshotPath); } @@ -470,7 +470,7 @@ public class TestAclWithSnapshot { AclStatus s = hdfs.getAclStatus(path); AclEntry[] returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(expected, returned); - assertPermission((short)0770, path); + assertPermission((short)010770, path); assertDirPermissionGranted(fsAsBruce, BRUCE, path); assertDirPermissionGranted(fsAsDiana, DIANA, path); } @@ -514,7 +514,7 @@ public class TestAclWithSnapshot { aclEntry(DEFAULT, GROUP, NONE), aclEntry(DEFAULT, MASK, READ_EXECUTE), aclEntry(DEFAULT, OTHER, NONE) }, returned); - assertPermission((short)0700, path); + assertPermission((short)010700, path); s = hdfs.getAclStatus(snapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); @@ -524,7 +524,7 @@ public class TestAclWithSnapshot { aclEntry(DEFAULT, GROUP, NONE), aclEntry(DEFAULT, MASK, READ_EXECUTE), aclEntry(DEFAULT, OTHER, NONE) }, returned); - assertPermission((short)0700, snapshotPath); + assertPermission((short)010700, snapshotPath); assertDirPermissionDenied(fsAsBruce, BRUCE, snapshotPath); } @@ -596,14 +596,14 @@ public class TestAclWithSnapshot { assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0660, filePath); + assertPermission((short)010660, filePath); s = hdfs.getAclStatus(fileSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0660, filePath); + assertPermission((short)010660, filePath); aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", READ)); @@ -632,14 +632,14 @@ public class TestAclWithSnapshot { assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0660, filePath); + assertPermission((short)010660, filePath); s = hdfs.getAclStatus(fileSnapshotPath); returned = s.getEntries().toArray(new AclEntry[0]); assertArrayEquals(new AclEntry[] { aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, NONE) }, returned); - assertPermission((short)0660, filePath); + assertPermission((short)010660, filePath); aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", READ));
Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdmin.java Thu May 15 13:03:22 2014 @@ -176,6 +176,7 @@ public class TestDFSHAAdmin { @Test public void testTransitionToActive() throws Exception { + Mockito.doReturn(STANDBY_READY_RESULT).when(mockProtocol).getServiceStatus(); assertEquals(0, runTool("-transitionToActive", "nn1")); Mockito.verify(mockProtocol).transitionToActive( reqInfoCaptor.capture()); Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestDFSHAAdminMiniCluster.java Thu May 15 13:03:22 2014 @@ -31,6 +31,7 @@ import org.apache.commons.logging.LogFac import org.apache.commons.logging.impl.Log4JLogger; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.HAAdmin; +import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSNNTopology; @@ -204,6 +205,70 @@ public class TestDFSHAAdminMiniCluster { assertEquals(0, runTool("-checkHealth", "nn2")); } + /** + * Test case to check whether both the name node is active or not + * @throws Exception + */ + @Test + public void testTransitionToActiveWhenOtherNamenodeisActive() + throws Exception { + NameNode nn1 = cluster.getNameNode(0); + NameNode nn2 = cluster.getNameNode(1); + if(nn1.getState() != null && !nn1.getState(). + equals(HAServiceState.STANDBY.name()) ) { + cluster.transitionToStandby(0); + } + if(nn2.getState() != null && !nn2.getState(). + equals(HAServiceState.STANDBY.name()) ) { + cluster.transitionToStandby(1); + } + //Making sure both the namenode are in standby state + assertTrue(nn1.isStandbyState()); + assertTrue(nn2.isStandbyState()); + // Triggering the transition for both namenode to Active + runTool("-transitionToActive", "nn1"); + runTool("-transitionToActive", "nn2"); + + assertFalse("Both namenodes cannot be active", nn1.isActiveState() + && nn2.isActiveState()); + + /* This test case doesn't allow nn2 to transition to Active even with + forceActive switch since nn1 is already active */ + if(nn1.getState() != null && !nn1.getState(). + equals(HAServiceState.STANDBY.name()) ) { + cluster.transitionToStandby(0); + } + if(nn2.getState() != null && !nn2.getState(). + equals(HAServiceState.STANDBY.name()) ) { + cluster.transitionToStandby(1); + } + //Making sure both the namenode are in standby state + assertTrue(nn1.isStandbyState()); + assertTrue(nn2.isStandbyState()); + + runTool("-transitionToActive", "nn1"); + runTool("-transitionToActive", "nn2","--forceactive"); + + assertFalse("Both namenodes cannot be active even though with forceActive", + nn1.isActiveState() && nn2.isActiveState()); + + /* In this test case, we have deliberately shut down nn1 and this will + cause HAAAdmin#isOtherTargetNodeActive to throw an Exception + and transitionToActive for nn2 with forceActive switch will succeed + even with Exception */ + cluster.shutdownNameNode(0); + if(nn2.getState() != null && !nn2.getState(). + equals(HAServiceState.STANDBY.name()) ) { + cluster.transitionToStandby(1); + } + //Making sure both the namenode (nn2) is in standby state + assertTrue(nn2.isStandbyState()); + assertFalse(cluster.isNameNodeUp(0)); + + runTool("-transitionToActive", "nn2", "--forceactive"); + assertTrue("Namenode nn2 should be active", nn2.isActiveState()); + } + private int runTool(String ... args) throws Exception { errOutBytes.reset(); LOG.info("Running: DFSHAAdmin " + Joiner.on(" ").join(args)); Modified: hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java Thu May 15 13:03:22 2014 @@ -94,10 +94,4 @@ public class WebHdfsTestUtil { Assert.assertEquals(expectedResponseCode, conn.getResponseCode()); return WebHdfsFileSystem.jsonParse(conn, false); } - - public static FSDataOutputStream write(final WebHdfsFileSystem webhdfs, - final HttpOpParam.Op op, final HttpURLConnection conn, - final int bufferSize) throws IOException { - return webhdfs.write(op, conn, bufferSize); - } }