Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java Thu Nov 21 20:06:09 2013 @@ -556,7 +556,7 @@ public class TestDFSUtil { Configuration conf = createWebHDFSHAConfiguration(LOGICAL_HOST_NAME, NS1_NN1_ADDR, NS1_NN2_ADDR); Map<String, Map<String, InetSocketAddress>> map = - DFSUtil.getHaNnHttpAddresses(conf); + DFSUtil.getHaNnWebHdfsAddresses(conf, "webhdfs"); assertEquals(NS1_NN1_ADDR, map.get("ns1").get("nn1").toString()); assertEquals(NS1_NN2_ADDR, map.get("ns1").get("nn2").toString()); @@ -574,7 +574,7 @@ public class TestDFSUtil { Configuration conf = createWebHDFSHAConfiguration(LOGICAL_HOST_NAME, NS1_NN1_ADDR, NS1_NN2_ADDR); URI uri = new URI("webhdfs://ns1"); assertTrue(HAUtil.isLogicalUri(conf, uri)); - InetSocketAddress[] addrs = DFSUtil.resolve(uri, DEFAULT_PORT, conf); + InetSocketAddress[] addrs = DFSUtil.resolveWebHdfsUri(uri, conf); assertArrayEquals(new InetSocketAddress[] { new InetSocketAddress(NS1_NN1_HOST, DEFAULT_PORT), new InetSocketAddress(NS1_NN2_HOST, DEFAULT_PORT),
Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java Thu Nov 21 20:06:09 2013 @@ -147,7 +147,7 @@ public class TestDelegationTokenForProxy public void testWebHdfsDoAs() throws Exception { WebHdfsTestUtil.LOG.info("START: testWebHdfsDoAs()"); WebHdfsTestUtil.LOG.info("ugi.getShortUserName()=" + ugi.getShortUserName()); - final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config); + final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, config, WebHdfsFileSystem.SCHEME); final Path root = new Path("/"); cluster.getFileSystem().setPermission(root, new FsPermission((short)0777)); Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/FSImageTestUtil.java Thu Nov 21 20:06:09 2013 @@ -554,7 +554,7 @@ public abstract class FSImageTestUtil { * get NameSpace quota. */ public static long getNSQuota(FSNamesystem ns) { - return ns.dir.rootDir.getNsQuota(); + return ns.dir.rootDir.getQuotaCounts().get(Quota.NAMESPACE); } public static void assertNNFilesMatch(MiniDFSCluster cluster) throws Exception { Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/OfflineEditsViewerHelper.java Thu Nov 21 20:06:09 2013 @@ -42,7 +42,7 @@ import org.apache.hadoop.hdfs.MiniDFSClu import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; -import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective; +import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; @@ -241,20 +241,20 @@ public class OfflineEditsViewerHelper { .setMode(new FsPermission((short)0700)) .setWeight(1989)); // OP_ADD_PATH_BASED_CACHE_DIRECTIVE 33 - long id = dfs.addPathBasedCacheDirective( - new PathBasedCacheDirective.Builder(). + long id = dfs.addCacheDirective( + new CacheDirectiveInfo.Builder(). setPath(new Path("/bar")). setReplication((short)1). setPool(pool). build()); // OP_MODIFY_PATH_BASED_CACHE_DIRECTIVE 38 - dfs.modifyPathBasedCacheDirective( - new PathBasedCacheDirective.Builder(). + dfs.modifyCacheDirective( + new CacheDirectiveInfo.Builder(). setId(id). setPath(new Path("/bar2")). build()); // OP_REMOVE_PATH_BASED_CACHE_DIRECTIVE 34 - dfs.removePathBasedCacheDirective(id); + dfs.removeCacheDirective(id); // OP_REMOVE_CACHE_POOL 37 dfs.removeCachePool(pool); // sync to disk, otherwise we parse partial edits Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogs.java Thu Nov 21 20:06:09 2013 @@ -163,7 +163,7 @@ public class TestAuditLogs { setupAuditLogs(); - WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf); + WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); InputStream istream = webfs.open(file); int val = istream.read(); istream.close(); @@ -182,7 +182,7 @@ public class TestAuditLogs { setupAuditLogs(); - WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf); + WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); FileStatus st = webfs.getFileStatus(file); verifyAuditLogs(true); @@ -222,7 +222,7 @@ public class TestAuditLogs { setupAuditLogs(); try { - WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf); + WebHdfsFileSystem webfs = WebHdfsTestUtil.getWebHdfsFileSystemAs(userGroupInfo, conf, WebHdfsFileSystem.SCHEME); InputStream istream = webfs.open(file); int val = istream.read(); fail("open+read must not succeed, got " + val); Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestRetryCacheWithHA.java Thu Nov 21 20:06:09 2013 @@ -61,7 +61,8 @@ import org.apache.hadoop.hdfs.protocol.E import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlocks; -import org.apache.hadoop.hdfs.protocol.PathBasedCacheDirective; +import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; +import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoUnderConstruction; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.INodeFile; @@ -737,14 +738,14 @@ public class TestRetryCacheWithHA { } } - /** addPathBasedCacheDirective */ - class AddPathBasedCacheDirectiveOp extends AtMostOnceOp { - private PathBasedCacheDirective directive; + /** addCacheDirective */ + class AddCacheDirectiveInfoOp extends AtMostOnceOp { + private CacheDirectiveInfo directive; private Long result; - AddPathBasedCacheDirectiveOp(DFSClient client, - PathBasedCacheDirective directive) { - super("addPathBasedCacheDirective", client); + AddCacheDirectiveInfoOp(DFSClient client, + CacheDirectiveInfo directive) { + super("addCacheDirective", client); this.directive = directive; } @@ -755,15 +756,15 @@ public class TestRetryCacheWithHA { @Override void invoke() throws Exception { - result = client.addPathBasedCacheDirective(directive); + result = client.addCacheDirective(directive); } @Override boolean checkNamenodeBeforeReturn() throws Exception { for (int i = 0; i < CHECKTIMES; i++) { - RemoteIterator<PathBasedCacheDirective> iter = - dfs.listPathBasedCacheDirectives( - new PathBasedCacheDirective.Builder(). + RemoteIterator<CacheDirectiveEntry> iter = + dfs.listCacheDirectives( + new CacheDirectiveInfo.Builder(). setPool(directive.getPool()). setPath(directive.getPath()). build()); @@ -781,15 +782,15 @@ public class TestRetryCacheWithHA { } } - /** modifyPathBasedCacheDirective */ - class ModifyPathBasedCacheDirectiveOp extends AtMostOnceOp { - private final PathBasedCacheDirective directive; + /** modifyCacheDirective */ + class ModifyCacheDirectiveInfoOp extends AtMostOnceOp { + private final CacheDirectiveInfo directive; private final short newReplication; private long id; - ModifyPathBasedCacheDirectiveOp(DFSClient client, - PathBasedCacheDirective directive, short newReplication) { - super("modifyPathBasedCacheDirective", client); + ModifyCacheDirectiveInfoOp(DFSClient client, + CacheDirectiveInfo directive, short newReplication) { + super("modifyCacheDirective", client); this.directive = directive; this.newReplication = newReplication; } @@ -797,13 +798,13 @@ public class TestRetryCacheWithHA { @Override void prepare() throws Exception { dfs.addCachePool(new CachePoolInfo(directive.getPool())); - id = client.addPathBasedCacheDirective(directive); + id = client.addCacheDirective(directive); } @Override void invoke() throws Exception { - client.modifyPathBasedCacheDirective( - new PathBasedCacheDirective.Builder(). + client.modifyCacheDirective( + new CacheDirectiveInfo.Builder(). setId(id). setReplication(newReplication). build()); @@ -812,14 +813,14 @@ public class TestRetryCacheWithHA { @Override boolean checkNamenodeBeforeReturn() throws Exception { for (int i = 0; i < CHECKTIMES; i++) { - RemoteIterator<PathBasedCacheDirective> iter = - dfs.listPathBasedCacheDirectives( - new PathBasedCacheDirective.Builder(). + RemoteIterator<CacheDirectiveEntry> iter = + dfs.listCacheDirectives( + new CacheDirectiveInfo.Builder(). setPool(directive.getPool()). setPath(directive.getPath()). build()); while (iter.hasNext()) { - PathBasedCacheDirective result = iter.next(); + CacheDirectiveInfo result = iter.next().getInfo(); if ((result.getId() == id) && (result.getReplication().shortValue() == newReplication)) { return true; @@ -836,15 +837,15 @@ public class TestRetryCacheWithHA { } } - /** removePathBasedCacheDirective */ - class RemovePathBasedCacheDirectiveOp extends AtMostOnceOp { - private PathBasedCacheDirective directive; + /** removeCacheDirective */ + class RemoveCacheDirectiveInfoOp extends AtMostOnceOp { + private CacheDirectiveInfo directive; private long id; - RemovePathBasedCacheDirectiveOp(DFSClient client, String pool, + RemoveCacheDirectiveInfoOp(DFSClient client, String pool, String path) { - super("removePathBasedCacheDirective", client); - this.directive = new PathBasedCacheDirective.Builder(). + super("removeCacheDirective", client); + this.directive = new CacheDirectiveInfo.Builder(). setPool(pool). setPath(new Path(path)). build(); @@ -853,20 +854,20 @@ public class TestRetryCacheWithHA { @Override void prepare() throws Exception { dfs.addCachePool(new CachePoolInfo(directive.getPool())); - id = dfs.addPathBasedCacheDirective(directive); + id = dfs.addCacheDirective(directive); } @Override void invoke() throws Exception { - client.removePathBasedCacheDirective(id); + client.removeCacheDirective(id); } @Override boolean checkNamenodeBeforeReturn() throws Exception { for (int i = 0; i < CHECKTIMES; i++) { - RemoteIterator<PathBasedCacheDirective> iter = - dfs.listPathBasedCacheDirectives( - new PathBasedCacheDirective.Builder(). + RemoteIterator<CacheDirectiveEntry> iter = + dfs.listCacheDirectives( + new CacheDirectiveInfo.Builder(). setPool(directive.getPool()). setPath(directive.getPath()). build()); @@ -1072,10 +1073,10 @@ public class TestRetryCacheWithHA { } @Test (timeout=60000) - public void testAddPathBasedCacheDirective() throws Exception { + public void testAddCacheDirectiveInfo() throws Exception { DFSClient client = genClientWithDummyHandler(); - AtMostOnceOp op = new AddPathBasedCacheDirectiveOp(client, - new PathBasedCacheDirective.Builder(). + AtMostOnceOp op = new AddCacheDirectiveInfoOp(client, + new CacheDirectiveInfo.Builder(). setPool("pool"). setPath(new Path("/path")). build()); @@ -1083,10 +1084,10 @@ public class TestRetryCacheWithHA { } @Test (timeout=60000) - public void testModifyPathBasedCacheDirective() throws Exception { + public void testModifyCacheDirectiveInfo() throws Exception { DFSClient client = genClientWithDummyHandler(); - AtMostOnceOp op = new ModifyPathBasedCacheDirectiveOp(client, - new PathBasedCacheDirective.Builder(). + AtMostOnceOp op = new ModifyCacheDirectiveInfoOp(client, + new CacheDirectiveInfo.Builder(). setPool("pool"). setPath(new Path("/path")). setReplication((short)1).build(), @@ -1095,9 +1096,9 @@ public class TestRetryCacheWithHA { } @Test (timeout=60000) - public void testRemovePathBasedCacheDescriptor() throws Exception { + public void testRemoveCacheDescriptor() throws Exception { DFSClient client = genClientWithDummyHandler(); - AtMostOnceOp op = new RemovePathBasedCacheDirectiveOp(client, "pool", + AtMostOnceOp op = new RemoveCacheDirectiveInfoOp(client, "pool", "/path"); testClientRetryWithFailover(op); } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java Thu Nov 21 20:06:09 2013 @@ -65,6 +65,7 @@ public class TestHttpsFileSystem { cluster.getFileSystem().create(new Path("/test")).close(); InetSocketAddress addr = cluster.getNameNode().getHttpsAddress(); nnAddr = addr.getHostName() + ":" + addr.getPort(); + conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr); } @AfterClass @@ -80,4 +81,15 @@ public class TestHttpsFileSystem { Assert.assertTrue(fs.exists(new Path("/test"))); fs.close(); } + + @Test + public void testSWebHdfsFileSystem() throws Exception { + FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, "swebhdfs"); + final Path f = new Path("/testswebhdfs"); + FSDataOutputStream os = fs.create(f); + os.write(23); + os.close(); + Assert.assertTrue(fs.exists(f)); + fs.close(); + } } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java Thu Nov 21 20:06:09 2013 @@ -34,6 +34,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.TestDFSClientRetries; import org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods; @@ -101,7 +102,7 @@ public class TestWebHDFS { try { cluster.waitActive(); - final FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); + final FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME); final Path dir = new Path("/test/largeFile"); Assert.assertTrue(fs.mkdirs(dir)); @@ -229,9 +230,9 @@ public class TestWebHDFS { new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); try { cluster.waitActive(); - WebHdfsTestUtil.getWebHdfsFileSystem(conf).setPermission( - new Path("/"), - new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); + WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME) + .setPermission(new Path("/"), + new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL)); // trick the NN into not believing it's not the superuser so we can // tell if the correct user is used by listStatus @@ -243,8 +244,9 @@ public class TestWebHDFS { .doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws IOException, URISyntaxException { - FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); - Path d = new Path("/my-dir"); + FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, + WebHdfsFileSystem.SCHEME); + Path d = new Path("/my-dir"); Assert.assertTrue(fs.mkdirs(d)); for (int i=0; i < listLimit*3; i++) { Path p = new Path(d, "file-"+i); @@ -258,4 +260,16 @@ public class TestWebHDFS { cluster.shutdown(); } } + + /** + * WebHdfs should be enabled by default after HDFS-5532 + * + * @throws Exception + */ + @Test + public void testWebHdfsEnabledByDefault() throws Exception { + Configuration conf = new HdfsConfiguration(); + Assert.assertTrue(conf.getBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, + false)); + } } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java Thu Nov 21 20:06:09 2013 @@ -82,7 +82,7 @@ public class TestWebHdfsFileSystemContra final UserGroupInformation current = UserGroupInformation.getCurrentUser(); ugi = UserGroupInformation.createUserForTesting( current.getShortUserName() + "x", new String[]{"user"}); - fs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf); + fs = WebHdfsTestUtil.getWebHdfsFileSystemAs(ugi, conf, WebHdfsFileSystem.SCHEME); defaultWorkingDirectory = fs.getWorkingDirectory().toUri().getPath(); } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTimeouts.java Thu Nov 21 20:06:09 2013 @@ -18,35 +18,32 @@ package org.apache.hadoop.hdfs.web; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; -import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; -import java.net.SocketAddress; import java.net.SocketTimeoutException; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.List; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.NetUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; /** * This test suite checks that WebHdfsFileSystem sets connection timeouts and @@ -77,7 +74,7 @@ public class TestWebHdfsTimeouts { serverSocket = new ServerSocket(0, CONNECTION_BACKLOG); nnHttpAddress = new InetSocketAddress("localhost", serverSocket.getLocalPort()); conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "localhost:" + serverSocket.getLocalPort()); - fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf); + fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME); fs.connectionFactory = connectionFactory; clients = new ArrayList<SocketChannel>(); serverThread = null; Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/WebHdfsTestUtil.java Thu Nov 21 20:06:09 2013 @@ -46,20 +46,36 @@ public class WebHdfsTestUtil { return conf; } - public static WebHdfsFileSystem getWebHdfsFileSystem(final Configuration conf - ) throws IOException, URISyntaxException { - final String uri = WebHdfsFileSystem.SCHEME + "://" - + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); + public static WebHdfsFileSystem getWebHdfsFileSystem( + final Configuration conf, String scheme) throws IOException, + URISyntaxException { + final String uri; + + if (WebHdfsFileSystem.SCHEME.equals(scheme)) { + uri = WebHdfsFileSystem.SCHEME + "://" + + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); + } else if (SWebHdfsFileSystem.SCHEME.equals(scheme)) { + uri = SWebHdfsFileSystem.SCHEME + "://" + + conf.get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY); + } else { + throw new IllegalArgumentException("unknown scheme:" + scheme); + } return (WebHdfsFileSystem)FileSystem.get(new URI(uri), conf); } public static WebHdfsFileSystem getWebHdfsFileSystemAs( - final UserGroupInformation ugi, final Configuration conf + final UserGroupInformation ugi, final Configuration conf + ) throws IOException, InterruptedException { + return getWebHdfsFileSystemAs(ugi, conf, WebHdfsFileSystem.SCHEME); + } + + public static WebHdfsFileSystem getWebHdfsFileSystemAs( + final UserGroupInformation ugi, final Configuration conf, String scheme ) throws IOException, InterruptedException { return ugi.doAs(new PrivilegedExceptionAction<WebHdfsFileSystem>() { @Override public WebHdfsFileSystem run() throws Exception { - return getWebHdfsFileSystem(conf); + return getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME); } }); } Modified: hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml?rev=1544306&r1=1544305&r2=1544306&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml (original) +++ hadoop/common/branches/HDFS-2832/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/testCacheAdminConf.xml Thu Nov 21 20:06:09 2013 @@ -90,7 +90,7 @@ <comparators> <comparator> <type>SubstringComparator</type> - <expected-output>poolparty bob bobgroup rwxrwxrwx 51</expected-output> + <expected-output>poolparty bob bobgroup rwxrwxrwx 51</expected-output> </comparator> </comparators> </test> @@ -129,11 +129,11 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>bar alice alicegroup rwxr-xr-x 100 </expected-output> + <expected-output>bar alice alicegroup rwxr-xr-x 100</expected-output> </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>foo bob bob rw-rw-r-- 100 </expected-output> + <expected-output>foo bob bob rw-rw-r-- 100</expected-output> </comparator> </comparators> </test> @@ -156,7 +156,7 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>foo bob bob rw-rw-r-- 100 </expected-output> + <expected-output>foo bob bob rw-rw-r-- 100</expected-output> </comparator> </comparators> </test> @@ -180,15 +180,15 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>1 pool1 1 /foo</expected-output> + <expected-output> 1 pool1 1 /foo</expected-output> </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>2 pool1 1 /bar</expected-output> + <expected-output> 2 pool1 1 /bar</expected-output> </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>3 pool1 2 /baz</expected-output> + <expected-output> 3 pool1 2 /baz</expected-output> </comparator> </comparators> </test> @@ -234,11 +234,11 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>8 pool2 1 /baz</expected-output> + <expected-output> 8 pool2 1 /baz</expected-output> </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>9 pool2 1 /buz</expected-output> + <expected-output> 9 pool2 1 /buz</expected-output> </comparator> </comparators> </test> @@ -265,11 +265,11 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>10 pool1 1 /foo</expected-output> + <expected-output> 10 pool1 1 /foo</expected-output> </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>12 pool2 1 /foo</expected-output> + <expected-output> 12 pool2 1 /foo</expected-output> </comparator> </comparators> </test> @@ -296,7 +296,7 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>16 pool2 1 /foo</expected-output> + <expected-output> 16 pool2 1 /foo</expected-output> </comparator> </comparators> </test> @@ -320,7 +320,7 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>19 pool1 1 /bar</expected-output> + <expected-output> 19 pool1 1 /bar</expected-output> </comparator> </comparators> </test> @@ -349,11 +349,11 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>22 pool1 1 /bar</expected-output> + <expected-output> 22 pool1 1 /bar</expected-output> </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>24 pool2 1 /bar</expected-output> + <expected-output> 24 pool2 1 /bar</expected-output> </comparator> </comparators> </test> @@ -379,7 +379,7 @@ </comparator> <comparator> <type>SubstringComparator</type> - <expected-output>25 pool1 1 /bar3</expected-output> + <expected-output> 25 pool1 1 /bar3</expected-output> </comparator> </comparators> </test>