Author: jing9 Date: Thu Jul 17 17:39:54 2014 New Revision: 1611412 URL: http://svn.apache.org/r1611412 Log: HDFS-6478. Merge r1611410 from trunk.
Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1611412&r1=1611411&r2=1611412&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Thu Jul 17 17:39:54 2014 @@ -69,6 +69,9 @@ Release 2.6.0 - UNRELEASED HDFS-6689. NFS doesn't return correct lookup access for direcories (brandonli) + HDFS-6478. RemoteException can't be retried properly for non-HA scenario. + (Ming Ma via jing9) + Release 2.5.0 - UNRELEASED INCOMPATIBLE CHANGES Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java?rev=1611412&r1=1611411&r2=1611412&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java Thu Jul 17 17:39:54 2014 @@ -333,19 +333,18 @@ public class NameNodeProxies { address, conf, ugi, NamenodeProtocolPB.class); if (withRetries) { // create the proxy with retries RetryPolicy timeoutPolicy = RetryPolicies.exponentialBackoffRetry(5, 200, - TimeUnit.MILLISECONDS); - Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap - = new HashMap<Class<? extends Exception>, RetryPolicy>(); - RetryPolicy methodPolicy = RetryPolicies.retryByException(timeoutPolicy, - exceptionToPolicyMap); - Map<String, RetryPolicy> methodNameToPolicyMap - = new HashMap<String, RetryPolicy>(); - methodNameToPolicyMap.put("getBlocks", methodPolicy); - methodNameToPolicyMap.put("getAccessKeys", methodPolicy); - proxy = (NamenodeProtocolPB) RetryProxy.create(NamenodeProtocolPB.class, - proxy, methodNameToPolicyMap); + TimeUnit.MILLISECONDS); + Map<String, RetryPolicy> methodNameToPolicyMap + = new HashMap<String, RetryPolicy>(); + methodNameToPolicyMap.put("getBlocks", timeoutPolicy); + methodNameToPolicyMap.put("getAccessKeys", timeoutPolicy); + NamenodeProtocol translatorProxy = + new NamenodeProtocolTranslatorPB(proxy); + return (NamenodeProtocol) RetryProxy.create( + NamenodeProtocol.class, translatorProxy, methodNameToPolicyMap); + } else { + return new NamenodeProtocolTranslatorPB(proxy); } - return new NamenodeProtocolTranslatorPB(proxy); } private static ClientProtocol createNNProxyWithClientProtocol( @@ -379,29 +378,27 @@ public class NameNodeProxies { = new HashMap<Class<? extends Exception>, RetryPolicy>(); remoteExceptionToPolicyMap.put(AlreadyBeingCreatedException.class, createPolicy); - - Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap - = new HashMap<Class<? extends Exception>, RetryPolicy>(); - exceptionToPolicyMap.put(RemoteException.class, RetryPolicies - .retryByRemoteException(defaultPolicy, - remoteExceptionToPolicyMap)); - RetryPolicy methodPolicy = RetryPolicies.retryByException( - defaultPolicy, exceptionToPolicyMap); + + RetryPolicy methodPolicy = RetryPolicies.retryByRemoteException( + defaultPolicy, remoteExceptionToPolicyMap); Map<String, RetryPolicy> methodNameToPolicyMap = new HashMap<String, RetryPolicy>(); methodNameToPolicyMap.put("create", methodPolicy); - - proxy = (ClientNamenodeProtocolPB) RetryProxy.create( - ClientNamenodeProtocolPB.class, - new DefaultFailoverProxyProvider<ClientNamenodeProtocolPB>( - ClientNamenodeProtocolPB.class, proxy), + + ClientProtocol translatorProxy = + new ClientNamenodeProtocolTranslatorPB(proxy); + return (ClientProtocol) RetryProxy.create( + ClientProtocol.class, + new DefaultFailoverProxyProvider<ClientProtocol>( + ClientProtocol.class, translatorProxy), methodNameToPolicyMap, defaultPolicy); + } else { + return new ClientNamenodeProtocolTranslatorPB(proxy); } - return new ClientNamenodeProtocolTranslatorPB(proxy); } - + private static Object createNameNodeProxy(InetSocketAddress address, Configuration conf, UserGroupInformation ugi, Class<?> xface) throws IOException { Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java?rev=1611412&r1=1611411&r2=1611412&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java Thu Jul 17 17:39:54 2014 @@ -97,7 +97,7 @@ public class DatanodeProtocolClientSideT RPC.setProtocolEngine(conf, DatanodeProtocolPB.class, ProtobufRpcEngine.class); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); - rpcProxy = createNamenodeWithRetry(createNamenode(nameNodeAddr, conf, ugi)); + rpcProxy = createNamenode(nameNodeAddr, conf, ugi); } private static DatanodeProtocolPB createNamenode( @@ -109,33 +109,6 @@ public class DatanodeProtocolClientSideT org.apache.hadoop.ipc.Client.getPingInterval(conf), null).getProxy(); } - /** Create a {@link NameNode} proxy */ - static DatanodeProtocolPB createNamenodeWithRetry( - DatanodeProtocolPB rpcNamenode) { - RetryPolicy createPolicy = RetryPolicies - .retryUpToMaximumCountWithFixedSleep(5, - HdfsConstants.LEASE_SOFTLIMIT_PERIOD, TimeUnit.MILLISECONDS); - - Map<Class<? extends Exception>, RetryPolicy> remoteExceptionToPolicyMap = - new HashMap<Class<? extends Exception>, RetryPolicy>(); - remoteExceptionToPolicyMap.put(AlreadyBeingCreatedException.class, - createPolicy); - - Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap = - new HashMap<Class<? extends Exception>, RetryPolicy>(); - exceptionToPolicyMap.put(RemoteException.class, RetryPolicies - .retryByRemoteException(RetryPolicies.TRY_ONCE_THEN_FAIL, - remoteExceptionToPolicyMap)); - RetryPolicy methodPolicy = RetryPolicies.retryByException( - RetryPolicies.TRY_ONCE_THEN_FAIL, exceptionToPolicyMap); - Map<String, RetryPolicy> methodNameToPolicyMap = new HashMap<String, RetryPolicy>(); - - methodNameToPolicyMap.put("create", methodPolicy); - - return (DatanodeProtocolPB) RetryProxy.create(DatanodeProtocolPB.class, - rpcNamenode, methodNameToPolicyMap); - } - @Override public void close() throws IOException { RPC.stopProxy(rpcProxy); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java?rev=1611412&r1=1611411&r2=1611412&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java Thu Jul 17 17:39:54 2014 @@ -47,6 +47,7 @@ import org.apache.hadoop.hdfs.server.pro import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; +import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; @@ -61,7 +62,7 @@ import com.google.protobuf.ServiceExcept @InterfaceAudience.Private @InterfaceStability.Stable public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, - ProtocolMetaInterface, Closeable { + ProtocolMetaInterface, Closeable, ProtocolTranslator { /** RpcController is not used and hence is set to null */ private final static RpcController NULL_CONTROLLER = null; @@ -89,6 +90,11 @@ public class NamenodeProtocolTranslatorP } @Override + public Object getUnderlyingProxyObject() { + return rpcProxy; + } + + @Override public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size) throws IOException { GetBlocksRequestProto req = GetBlocksRequestProto.newBuilder() Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java?rev=1611412&r1=1611411&r2=1611412&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileCreation.java Thu Jul 17 17:39:54 2014 @@ -30,6 +30,8 @@ import static org.apache.hadoop.hdfs.DFS import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_REPLICATION_KEY; +import static org.apache.hadoop.test.MetricsAsserts.assertCounter; +import static org.apache.hadoop.test.MetricsAsserts.getMetrics; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -79,6 +81,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Time; @@ -97,6 +100,8 @@ public class TestFileCreation { ((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ALL); ((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL); } + private static final String RPC_DETAILED_METRICS = + "RpcDetailedActivityForPort"; static final long seed = 0xDEADBEEFL; static final int blockSize = 8192; @@ -371,7 +376,7 @@ public class TestFileCreation { conf.setBoolean(DFSConfigKeys.DFS_PERMISSIONS_ENABLED_KEY, false); final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build(); FileSystem fs = cluster.getFileSystem(); - + UserGroupInformation otherUgi = UserGroupInformation.createUserForTesting( "testuser", new String[]{"testgroup"}); FileSystem fs2 = otherUgi.doAs(new PrivilegedExceptionAction<FileSystem>() { @@ -380,12 +385,16 @@ public class TestFileCreation { return FileSystem.get(cluster.getConfiguration(0)); } }); - + + String metricsName = RPC_DETAILED_METRICS + cluster.getNameNodePort(); + try { Path p = new Path("/testfile"); FSDataOutputStream stm1 = fs.create(p); stm1.write(1); + assertCounter("CreateNumOps", 1L, getMetrics(metricsName)); + // Create file again without overwrite try { fs2.create(p, false); @@ -394,7 +403,9 @@ public class TestFileCreation { GenericTestUtils.assertExceptionContains("already being created by", abce); } - + // NameNodeProxies' createNNProxyWithClientProtocol has 5 retries. + assertCounter("AlreadyBeingCreatedExceptionNumOps", + 6L, getMetrics(metricsName)); FSDataOutputStream stm2 = fs2.create(p, true); stm2.write(2); stm2.close(); Modified: hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java?rev=1611412&r1=1611411&r2=1611412&view=diff ============================================================================== --- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java (original) +++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestIsMethodSupported.java Thu Jul 17 17:39:54 2014 @@ -25,14 +25,16 @@ import java.net.InetSocketAddress; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocolPB.ClientDatanodeProtocolTranslatorPB; -import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB; +import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB; import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.InterDatanodeProtocolTranslatorPB; import org.apache.hadoop.hdfs.protocolPB.JournalProtocolTranslatorPB; -import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolTranslatorPB; +import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.protocol.JournalProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; +import org.apache.hadoop.ipc.RPC; +import org.apache.hadoop.ipc.RpcClientUtil; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.RefreshUserMappingsProtocol; import org.apache.hadoop.security.UserGroupInformation; @@ -76,16 +78,22 @@ public class TestIsMethodSupported { @Test public void testNamenodeProtocol() throws IOException { - NamenodeProtocolTranslatorPB translator = - (NamenodeProtocolTranslatorPB) NameNodeProxies.createNonHAProxy(conf, + NamenodeProtocol np = + NameNodeProxies.createNonHAProxy(conf, nnAddress, NamenodeProtocol.class, UserGroupInformation.getCurrentUser(), true).getProxy(); - boolean exists = translator.isMethodSupported("rollEditLog"); + + boolean exists = RpcClientUtil.isMethodSupported(np, + NamenodeProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER, + RPC.getProtocolVersion(NamenodeProtocolPB.class), "rollEditLog"); + assertTrue(exists); - exists = translator.isMethodSupported("bogusMethod"); + exists = RpcClientUtil.isMethodSupported(np, + NamenodeProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER, + RPC.getProtocolVersion(NamenodeProtocolPB.class), "bogusMethod"); assertFalse(exists); } - + @Test public void testDatanodeProtocol() throws IOException { DatanodeProtocolClientSideTranslatorPB translator = @@ -107,16 +115,18 @@ public class TestIsMethodSupported { NetUtils.getDefaultSocketFactory(conf)); assertTrue(translator.isMethodSupported("refreshNamenodes")); } - + @Test public void testClientNamenodeProtocol() throws IOException { - ClientNamenodeProtocolTranslatorPB translator = - (ClientNamenodeProtocolTranslatorPB) NameNodeProxies.createNonHAProxy( + ClientProtocol cp = + NameNodeProxies.createNonHAProxy( conf, nnAddress, ClientProtocol.class, UserGroupInformation.getCurrentUser(), true).getProxy(); - assertTrue(translator.isMethodSupported("mkdirs")); + RpcClientUtil.isMethodSupported(cp, + ClientNamenodeProtocolPB.class, RPC.RpcKind.RPC_PROTOCOL_BUFFER, + RPC.getProtocolVersion(ClientNamenodeProtocolPB.class), "mkdirs"); } - + @Test public void tesJournalProtocol() throws IOException { JournalProtocolTranslatorPB translator = (JournalProtocolTranslatorPB)