Author: jing9
Date: Wed Nov 27 18:20:14 2013
New Revision: 1546151

URL: http://svn.apache.org/r1546151
Log:
HDFS-5545. Allow specifying endpoints for listeners in HttpServer. Contributed 
by Haohui Mai.

Modified:
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1546151&r1=1546150&r2=1546151&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Nov 27 
18:20:14 2013
@@ -221,6 +221,9 @@ Trunk (Unreleased)
     HDFS-5556. Add some more NameNode cache statistics, cache pool stats
     (cmccabe)
 
+    HDFS-5545. Allow specifying endpoints for listeners in HttpServer. (Haohui
+    Mai via jing9)
+
   OPTIMIZATIONS
     HDFS-5349. DNA_CACHE and DNA_UNCACHE should be by blockId only. (cmccabe)
 

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=1546151&r1=1546150&r2=1546151&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 Wed Nov 27 18:20:14 2013
@@ -75,6 +75,7 @@ import org.apache.hadoop.hdfs.server.nam
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem;
 import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.hadoop.http.HttpServer;
 import org.apache.hadoop.ipc.ProtobufRpcEngine;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.net.NetUtils;
@@ -1410,4 +1411,19 @@ public class DFSUtil {
     return (value == null || value.isEmpty()) ?
         defaultKey : DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_KEYTAB_KEY;
   }
+
+  public static HttpServer.Builder loadSslConfToHttpServerBuilder(
+      HttpServer.Builder builder, Configuration sslConf) {
+    return builder
+        .needsClientAuth(
+            sslConf.getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
+                DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT))
+        .keyPassword(sslConf.get("ssl.server.keystore.keypassword"))
+        .keyStore(sslConf.get("ssl.server.keystore.location"),
+            sslConf.get("ssl.server.keystore.password"),
+            sslConf.get("ssl.server.keystore.type", "jks"))
+        .trustStore(sslConf.get("ssl.server.truststore.location"),
+            sslConf.get("ssl.server.truststore.password"),
+            sslConf.get("ssl.server.truststore.type", "jks"));
+  }
 }
\ No newline at end of file

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java?rev=1546151&r1=1546150&r2=1546151&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeHttpServer.java
 Wed Nov 27 18:20:14 2013
@@ -23,6 +23,8 @@ import static org.apache.hadoop.hdfs.DFS
 
 import java.io.IOException;
 import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
 
 import javax.servlet.ServletContext;
 
@@ -69,8 +71,15 @@ public class JournalNodeHttpServer {
         bindAddr.getHostName()));
 
     int tmpInfoPort = bindAddr.getPort();
+    URI httpEndpoint;
+    try {
+      httpEndpoint = new URI("http://"; + NetUtils.getHostPortString(bindAddr));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+
     httpServer = new HttpServer.Builder().setName("journal")
-        .setBindAddress(bindAddr.getHostName()).setPort(tmpInfoPort)
+        .addEndpoint(httpEndpoint)
         .setFindPort(tmpInfoPort == 0).setConf(conf).setACL(
             new AccessControlList(conf.get(DFS_ADMIN, " ")))
         .setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
@@ -85,7 +94,7 @@ public class JournalNodeHttpServer {
     httpServer.start();
 
     // The web-server port can be ephemeral... ensure we have the correct info
-    infoPort = httpServer.getPort();
+    infoPort = httpServer.getConnectorAddress(0).getPort();
 
     LOG.info("Journal Web-server up at: " + bindAddr + ":" + infoPort);
   }
@@ -104,7 +113,7 @@ public class JournalNodeHttpServer {
    * Return the actual address bound to by the running server.
    */
   public InetSocketAddress getAddress() {
-    InetSocketAddress addr = httpServer.getListenerAddress();
+    InetSocketAddress addr = httpServer.getConnectorAddress(0);
     assert addr.getPort() != 0;
     return addr;
   }

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java?rev=1546151&r1=1546150&r2=1546151&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java
 Wed Nov 27 18:20:14 2013
@@ -22,6 +22,7 @@ import com.google.common.annotations.Vis
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.protobuf.BlockingService;
+
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -181,6 +182,7 @@ public class DataNode extends Configured
   private volatile boolean heartbeatsDisabledForTests = false;
   private DataStorage storage = null;
   private HttpServer infoServer = null;
+  private int infoPort;
   private int infoSecurePort;
   DataNodeMetrics metrics;
   private InetSocketAddress streamingAddr;
@@ -310,27 +312,33 @@ public class DataNode extends Configured
     String infoHost = infoSocAddr.getHostName();
     int tmpInfoPort = infoSocAddr.getPort();
     HttpServer.Builder builder = new HttpServer.Builder().setName("datanode")
-        .setBindAddress(infoHost).setPort(tmpInfoPort)
+        .addEndpoint(URI.create("http://"; + 
NetUtils.getHostPortString(infoSocAddr)))
         .setFindPort(tmpInfoPort == 0).setConf(conf)
         .setACL(new AccessControlList(conf.get(DFS_ADMIN, " ")));
-    this.infoServer = (secureResources == null) ? builder.build() :
-        builder.setConnector(secureResources.getListener()).build();
 
     LOG.info("Opened info server at " + infoHost + ":" + tmpInfoPort);
     if (conf.getBoolean(DFS_HTTPS_ENABLE_KEY, false)) {
-      boolean needClientAuth = conf.getBoolean(DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
-                                               
DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
       InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get(
           DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 0));
-      Configuration sslConf = new HdfsConfiguration(false);
-      
sslConf.addResource(conf.get(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-          "ssl-server.xml"));
-      this.infoServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth);
+      builder.addEndpoint(URI.create("https://";
+          + NetUtils.getHostPortString(secInfoSocAddr)));
+      Configuration sslConf = new Configuration(false);
+      sslConf.setBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, conf
+          .getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
+              DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT));
+      sslConf.addResource(conf.get(
+          DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+          DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
+      DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf);
+
       if(LOG.isDebugEnabled()) {
         LOG.debug("Datanode listening for SSL on " + secInfoSocAddr);
       }
       infoSecurePort = secInfoSocAddr.getPort();
     }
+
+    this.infoServer = (secureResources == null) ? builder.build() :
+      builder.setConnector(secureResources.getListener()).build();
     this.infoServer.addInternalServlet(null, "/streamFile/*", 
StreamFile.class);
     this.infoServer.addInternalServlet(null, "/getFileChecksum/*",
         FileChecksumServlets.GetServlet.class);
@@ -346,6 +354,7 @@ public class DataNode extends Configured
           WebHdfsFileSystem.PATH_PREFIX + "/*");
     }
     this.infoServer.start();
+    this.infoPort = infoServer.getConnectorAddress(0).getPort();
   }
   
   private void startPlugins(Configuration conf) {
@@ -2276,7 +2285,7 @@ public class DataNode extends Configured
    * @return the datanode's http port
    */
   public int getInfoPort() {
-    return infoServer.getPort();
+    return infoPort;
   }
 
   /**

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1546151&r1=1546150&r2=1546151&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
 Wed Nov 27 18:20:14 2013
@@ -41,6 +41,7 @@ import org.apache.hadoop.ha.HealthCheckF
 import org.apache.hadoop.ha.ServiceFailedException;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Trash;
+
 import static org.apache.hadoop.hdfs.DFSConfigKeys.*;
 import static org.apache.hadoop.util.ExitUtil.terminate;
 import static org.apache.hadoop.util.ToolRunner.confirmPrompt;

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java?rev=1546151&r1=1546150&r2=1546151&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
 Wed Nov 27 18:20:14 2013
@@ -21,6 +21,7 @@ import static org.apache.hadoop.hdfs.DFS
 
 import java.io.IOException;
 import java.net.InetSocketAddress;
+import java.net.URI;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -69,25 +70,45 @@ public class NameNodeHttpServer {
     this.bindAddress = bindAddress;
   }
   
-  public void start() throws IOException {
+  void start() throws IOException {
     final String infoHost = bindAddress.getHostName();
     int infoPort = bindAddress.getPort();
-    httpServer = new HttpServer.Builder().setName("hdfs")
-        .setBindAddress(infoHost).setPort(infoPort)
+    HttpServer.Builder builder = new HttpServer.Builder().setName("hdfs")
+        .addEndpoint(URI.create(("http://"; + 
NetUtils.getHostPortString(bindAddress))))
         .setFindPort(infoPort == 0).setConf(conf).setACL(
             new AccessControlList(conf.get(DFS_ADMIN, " ")))
         .setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
         .setUsernameConfKey(
             DFSConfigKeys.DFS_NAMENODE_INTERNAL_SPNEGO_USER_NAME_KEY)
         .setKeytabConfKey(DFSUtil.getSpnegoKeytabKey(conf,
-            DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY)).build();
+            DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY));
+
+    boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, 
false);
+    if (certSSL) {
+      httpsAddress = NetUtils.createSocketAddr(conf.get(
+          DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
+          DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT));
+
+      builder.addEndpoint(URI.create("https://";
+          + NetUtils.getHostPortString(httpsAddress)));
+      Configuration sslConf = new Configuration(false);
+      sslConf.setBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY, conf
+          .getBoolean(DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
+              DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT));
+      sslConf.addResource(conf.get(
+          DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+          DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
+      DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf);
+    }
+
+    httpServer = builder.build();
     if (WebHdfsFileSystem.isEnabled(conf, HttpServer.LOG)) {
       //add SPNEGO authentication filter for webhdfs
       final String name = "SPNEGO";
       final String classname = AuthFilter.class.getName();
       final String pathSpec = WebHdfsFileSystem.PATH_PREFIX + "/*";
       Map<String, String> params = getAuthFilterParams(conf);
-      httpServer.defineFilter(httpServer.getWebAppContext(), name, classname, 
params,
+      HttpServer.defineFilter(httpServer.getWebAppContext(), name, classname, 
params,
           new String[]{pathSpec});
       HttpServer.LOG.info("Added filter '" + name + "' (class=" + classname + 
")");
 
@@ -97,34 +118,19 @@ public class NameNodeHttpServer {
           + ";" + Param.class.getPackage().getName(), pathSpec);
       }
 
-    boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, 
false);
+    httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
+    httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
+    setupServlets(httpServer, conf);
+    httpServer.start();
+    httpAddress = httpServer.getConnectorAddress(0);
     if (certSSL) {
-      boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", 
false);
-      httpsAddress = NetUtils.createSocketAddr(conf.get(
-          DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
-          DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT));
-
-      Configuration sslConf = new Configuration(false);
-      sslConf.addResource(conf.get(
-          DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-          DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
-      httpServer.addSslListener(httpsAddress, sslConf, needClientAuth);
+      httpsAddress = httpServer.getConnectorAddress(1);
       // assume same ssl port for all datanodes
       InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get(
         DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 50475));
       httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY, 
datanodeSslPort
         .getPort());
     }
-    httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
-    httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
-    setupServlets(httpServer, conf);
-    httpServer.start();
-    httpAddress = new InetSocketAddress(bindAddress.getAddress(),
-        httpServer.getPort());
-    if (certSSL) {
-      httpsAddress = new InetSocketAddress(bindAddress.getAddress(),
-          httpServer.getConnectorPort(1));
-    }
   }
   
   private Map<String, String> getAuthFilterParams(Configuration conf)

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java?rev=1546151&r1=1546150&r2=1546151&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
 Wed Nov 27 18:20:14 2013
@@ -30,6 +30,7 @@ import java.io.FilenameFilter;
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.URI;
+import java.net.URISyntaxException;
 import java.security.PrivilegedAction;
 import java.security.PrivilegedExceptionAction;
 import java.util.Collection;
@@ -214,7 +215,7 @@ public class SecondaryNameNode implement
   
   /**
    * Initialize SecondaryNameNode.
-   * @param commandLineOpts 
+   * @param commandLineOpts
    */
   private void initialize(final Configuration conf,
       CommandLineOpts commandLineOpts) throws IOException {
@@ -256,8 +257,15 @@ public class SecondaryNameNode implement
 
     // initialize the webserver for uploading files.
     int tmpInfoPort = infoSocAddr.getPort();
+    URI httpEndpoint;
+    try {
+      httpEndpoint = new URI("http://"; + 
NetUtils.getHostPortString(infoSocAddr));
+    } catch (URISyntaxException e) {
+      throw new IOException(e);
+    }
+
     infoServer = new HttpServer.Builder().setName("secondary")
-        .setBindAddress(infoBindAddress).setPort(tmpInfoPort)
+        .addEndpoint(httpEndpoint)
         .setFindPort(tmpInfoPort == 0).setConf(conf).setACL(
             new AccessControlList(conf.get(DFS_ADMIN, " ")))
         .setSecurityEnabled(UserGroupInformation.isSecurityEnabled())
@@ -275,7 +283,7 @@ public class SecondaryNameNode implement
     LOG.info("Web server init done");
 
     // The web-server port can be ephemeral... ensure we have the correct info
-    infoPort = infoServer.getPort();
+    infoPort = infoServer.getConnectorAddress(0).getPort();
 
     conf.set(DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY, infoBindAddress + ":" + 
infoPort);
     LOG.info("Secondary Web-server up at: " + infoBindAddress + ":" + 
infoPort);


Reply via email to