Author: jing9
Date: Sat Nov 16 01:06:27 2013
New Revision: 1542439

URL: http://svn.apache.org/r1542439
Log:
HDFS-5502. Merge change r1542438 from trunk.

Added:
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java
      - copied unchanged from r1542438, 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java
Removed:
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestNameNodeHttpServer.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/ssl-client.xml
Modified:
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpDelegationToken.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
Sat Nov 16 01:06:27 2013
@@ -186,6 +186,8 @@ Release 2.3.0 - UNRELEASED
 
     HDFS-5438. Flaws in block report processing can cause data loss. (kihwal)
 
+    HDFS-5502. Fix HTTPS support in HsftpFileSystem. (Haohui Mai via jing9)
+
 Release 2.2.1 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java
 Sat Nov 16 01:06:27 2013
@@ -793,6 +793,10 @@ public class NameNode implements NameNod
     return httpServer.getHttpAddress();
   }
 
+  public InetSocketAddress getHttpsAddress() {
+    return httpServer.getHttpsAddress();
+  }
+
   /**
    * Verify that configured directories exist, then
    * Interactively confirm that formatting is desired 

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
 Sat Nov 16 01:06:27 2013
@@ -119,7 +119,12 @@ public class NameNodeHttpServer {
     httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
     setupServlets(httpServer, conf);
     httpServer.start();
-    httpAddress = new InetSocketAddress(bindAddress.getAddress(), 
httpServer.getPort());
+    httpAddress = new InetSocketAddress(bindAddress.getAddress(),
+        httpServer.getPort());
+    if (certSSL) {
+      httpsAddress = new InetSocketAddress(bindAddress.getAddress(),
+          httpServer.getConnectorPort(1));
+    }
   }
   
   private Map<String, String> getAuthFilterParams(Configuration conf)

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
 Sat Nov 16 01:06:27 2013
@@ -218,6 +218,8 @@ public class DelegationTokenFetcher {
           .append(renewer);
     }
 
+    boolean isHttps = nnUri.getScheme().equals("https");
+
     HttpURLConnection conn = null;
     DataInputStream dis = null;
     InetSocketAddress serviceAddr = NetUtils.createSocketAddr(nnUri
@@ -234,7 +236,7 @@ public class DelegationTokenFetcher {
       dis = new DataInputStream(in);
       ts.readFields(dis);
       for (Token<?> token : ts.getAllTokens()) {
-        token.setKind(HftpFileSystem.TOKEN_KIND);
+        token.setKind(isHttps ? HsftpFileSystem.TOKEN_KIND : 
HftpFileSystem.TOKEN_KIND);
         SecurityUtil.setTokenService(token, serviceAddr);
       }
       return ts;

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
 Sat Nov 16 01:06:27 2013
@@ -86,7 +86,7 @@ public class HftpFileSystem extends File
     HttpURLConnection.setFollowRedirects(true);
   }
 
-  URLConnectionFactory connectionFactory = 
URLConnectionFactory.DEFAULT_CONNECTION_FACTORY;
+  URLConnectionFactory connectionFactory;
 
   public static final Text TOKEN_KIND = new Text("HFTP delegation");
 
@@ -98,7 +98,7 @@ public class HftpFileSystem extends File
   public static final String HFTP_TIMEZONE = "UTC";
   public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
 
-  private TokenAspect<HftpFileSystem> tokenAspect = new 
TokenAspect<HftpFileSystem>(this, TOKEN_KIND);
+  protected TokenAspect<HftpFileSystem> tokenAspect;
   private Token<?> delegationToken;
   private Token<?> renewToken;
 
@@ -172,6 +172,16 @@ public class HftpFileSystem extends File
     return SCHEME;
   }
 
+  /**
+   * Initialize connectionFactory and tokenAspect. This function is intended to
+   * be overridden by HsFtpFileSystem.
+   */
+  protected void initConnectionFactoryAndTokenAspect(Configuration conf)
+      throws IOException {
+    tokenAspect = new TokenAspect<HftpFileSystem>(this, TOKEN_KIND);
+    connectionFactory = URLConnectionFactory.DEFAULT_CONNECTION_FACTORY;
+  }
+
   @Override
   public void initialize(final URI name, final Configuration conf)
   throws IOException {
@@ -179,6 +189,7 @@ public class HftpFileSystem extends File
     setConf(conf);
     this.ugi = UserGroupInformation.getCurrentUser();
     this.nnUri = getNamenodeUri(name);
+
     try {
       this.hftpURI = new URI(name.getScheme(), name.getAuthority(),
                              null, null, null);
@@ -186,6 +197,7 @@ public class HftpFileSystem extends File
       throw new IllegalArgumentException(e);
     }
 
+    initConnectionFactoryAndTokenAspect(conf);
     if (UserGroupInformation.isSecurityEnabled()) {
       tokenAspect.initDelegationToken(ugi);
     }
@@ -212,8 +224,8 @@ public class HftpFileSystem extends File
      *
      * For other operations, however, the client has to send a
      * HDFS_DELEGATION_KIND token over the wire so that it can talk to Hadoop
-     * 0.20.3 clusters. Later releases fix this problem. See HDFS-5440 for more
-     * details.
+     * 0.20.203 clusters. Later releases fix this problem. See HDFS-5440 for
+     * more details.
      */
     renewToken = token;
     delegationToken = new Token<T>(token);
@@ -229,13 +241,12 @@ public class HftpFileSystem extends File
       return ugi.doAs(new PrivilegedExceptionAction<Token<?>>() {
         @Override
         public Token<?> run() throws IOException {
-          final String nnHttpUrl = nnUri.toString();
           Credentials c;
           try {
             c = DelegationTokenFetcher.getDTfromRemote(connectionFactory, 
nnUri, renewer);
           } catch (IOException e) {
             if (e.getCause() instanceof ConnectException) {
-              LOG.warn("Couldn't connect to " + nnHttpUrl +
+              LOG.warn("Couldn't connect to " + nnUri +
                   ", assuming security is disabled");
               return null;
             }

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
 Sat Nov 16 01:06:27 2013
@@ -18,31 +18,14 @@
 
 package org.apache.hadoop.hdfs.web;
 
-import java.io.FileInputStream;
 import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.InetSocketAddress;
-import java.net.URI;
-import java.net.URL;
-import java.security.KeyStore;
-import java.security.cert.X509Certificate;
-
-import javax.net.ssl.HostnameVerifier;
-import javax.net.ssl.HttpsURLConnection;
-import javax.net.ssl.KeyManager;
-import javax.net.ssl.KeyManagerFactory;
-import javax.net.ssl.SSLContext;
-import javax.net.ssl.SSLSession;
-import javax.net.ssl.TrustManager;
-import javax.net.ssl.TrustManagerFactory;
-import javax.net.ssl.X509TrustManager;
+import java.security.GeneralSecurityException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.HdfsConfiguration;
-import org.apache.hadoop.util.Time;
+import org.apache.hadoop.io.Text;
 
 /**
  * An implementation of a protocol for accessing filesystems over HTTPS. The
@@ -55,9 +38,8 @@ import org.apache.hadoop.util.Time;
 @InterfaceAudience.Private
 @InterfaceStability.Evolving
 public class HsftpFileSystem extends HftpFileSystem {
-
-  private static final long MM_SECONDS_PER_DAY = 1000 * 60 * 60 * 24;
-  private volatile int ExpWarnDays = 0;
+  public static final Text TOKEN_KIND = new Text("HSFTP delegation");
+  public static final String SCHEME = "hsftp";
 
   /**
    * Return the protocol scheme for the FileSystem.
@@ -67,7 +49,7 @@ public class HsftpFileSystem extends Hft
    */
   @Override
   public String getScheme() {
-    return "hsftp";
+    return SCHEME;
   }
 
   /**
@@ -79,66 +61,17 @@ public class HsftpFileSystem extends Hft
   }
 
   @Override
-  public void initialize(URI name, Configuration conf) throws IOException {
-    super.initialize(name, conf);
-    setupSsl(conf);
-    ExpWarnDays = conf.getInt("ssl.expiration.warn.days", 30);
-  }
+  protected void initConnectionFactoryAndTokenAspect(Configuration conf) 
throws IOException {
+    tokenAspect = new TokenAspect<HftpFileSystem>(this, TOKEN_KIND);
 
-  /**
-   * Set up SSL resources
-   *
-   * @throws IOException
-   */
-  private static void setupSsl(Configuration conf) throws IOException {
-    Configuration sslConf = new HdfsConfiguration(false);
-    
sslConf.addResource(conf.get(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
-                             
DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_DEFAULT));
-    FileInputStream fis = null;
+    connectionFactory = new URLConnectionFactory(
+        URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT);
     try {
-      SSLContext sc = SSLContext.getInstance("SSL");
-      KeyManager[] kms = null;
-      TrustManager[] tms = null;
-      if (sslConf.get("ssl.client.keystore.location") != null) {
-        // initialize default key manager with keystore file and pass
-        KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509");
-        KeyStore ks = KeyStore.getInstance(sslConf.get(
-            "ssl.client.keystore.type", "JKS"));
-        char[] ksPass = sslConf.get("ssl.client.keystore.password", "changeit")
-            .toCharArray();
-        fis = new FileInputStream(sslConf.get("ssl.client.keystore.location",
-            "keystore.jks"));
-        ks.load(fis, ksPass);
-        kmf.init(ks, sslConf.get("ssl.client.keystore.keypassword", "changeit")
-            .toCharArray());
-        kms = kmf.getKeyManagers();
-        fis.close();
-        fis = null;
-      }
-      // initialize default trust manager with truststore file and pass
-      if (sslConf.getBoolean("ssl.client.do.not.authenticate.server", false)) {
-        // by pass trustmanager validation
-        tms = new DummyTrustManager[] { new DummyTrustManager() };
-      } else {
-        TrustManagerFactory tmf = TrustManagerFactory.getInstance("PKIX");
-        KeyStore ts = KeyStore.getInstance(sslConf.get(
-            "ssl.client.truststore.type", "JKS"));
-        char[] tsPass = sslConf.get("ssl.client.truststore.password",
-            "changeit").toCharArray();
-        fis = new FileInputStream(sslConf.get("ssl.client.truststore.location",
-            "truststore.jks"));
-        ts.load(fis, tsPass);
-        tmf.init(ts);
-        tms = tmf.getTrustManagers();
-      }
-      sc.init(kms, tms, new java.security.SecureRandom());
-      HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory());
-    } catch (Exception e) {
-      throw new IOException("Could not initialize SSLContext", e);
-    } finally {
-      if (fis != null) {
-        fis.close();
-      }
+      connectionFactory.setConnConfigurator(URLConnectionFactory
+          .newSslConnConfigurator(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
+              conf));
+    } catch (GeneralSecurityException e) {
+      throw new IOException(e);
     }
   }
 
@@ -147,70 +80,4 @@ public class HsftpFileSystem extends Hft
     return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
                             DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
   }
-
-  @Override
-  protected HttpURLConnection openConnection(String path, String query)
-      throws IOException {
-    query = addDelegationTokenParam(query);
-    final URL url = new URL(getUnderlyingProtocol(), nnUri.getHost(),
-        nnUri.getPort(), path + '?' + query);
-    HttpsURLConnection conn;
-    conn = (HttpsURLConnection)connectionFactory.openConnection(url);
-    // bypass hostname verification
-    conn.setHostnameVerifier(new DummyHostnameVerifier());
-    conn.setRequestMethod("GET");
-    conn.connect();
-
-    // check cert expiration date
-    final int warnDays = ExpWarnDays;
-    if (warnDays > 0) { // make sure only check once
-      ExpWarnDays = 0;
-      long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY + Time.now();
-      X509Certificate[] clientCerts = (X509Certificate[]) conn
-          .getLocalCertificates();
-      if (clientCerts != null) {
-        for (X509Certificate cert : clientCerts) {
-          long expTime = cert.getNotAfter().getTime();
-          if (expTime < expTimeThreshold) {
-            StringBuilder sb = new StringBuilder();
-            sb.append("\n Client certificate "
-                + cert.getSubjectX500Principal().getName());
-            int dayOffSet = (int) ((expTime - Time.now()) / 
MM_SECONDS_PER_DAY);
-            sb.append(" have " + dayOffSet + " days to expire");
-            LOG.warn(sb.toString());
-          }
-        }
-      }
-    }
-    return (HttpURLConnection) conn;
-  }
-
-  /**
-   * Dummy hostname verifier that is used to bypass hostname checking
-   */
-  protected static class DummyHostnameVerifier implements HostnameVerifier {
-    @Override
-    public boolean verify(String hostname, SSLSession session) {
-      return true;
-    }
-  }
-
-  /**
-   * Dummy trustmanager that is used to trust all server certificates
-   */
-  protected static class DummyTrustManager implements X509TrustManager {
-    @Override
-    public void checkClientTrusted(X509Certificate[] chain, String authType) {
-    }
-
-    @Override
-    public void checkServerTrusted(X509Certificate[] chain, String authType) {
-    }
-
-    @Override
-    public X509Certificate[] getAcceptedIssuers() {
-      return null;
-    }
-  }
-
 }

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/TokenAspect.java
 Sat Nov 16 01:06:27 2013
@@ -57,6 +57,7 @@ final class TokenAspect<T extends FileSy
     @Override
     public boolean handleKind(Text kind) {
       return kind.equals(HftpFileSystem.TOKEN_KIND)
+          || kind.equals(HsftpFileSystem.TOKEN_KIND)
           || kind.equals(WebHdfsFileSystem.TOKEN_KIND);
     }
 
@@ -75,8 +76,11 @@ final class TokenAspect<T extends FileSy
       final InetSocketAddress address = 
SecurityUtil.getTokenServiceAddr(token);
       Text kind = token.getKind();
       final URI uri;
+
       if (kind.equals(HftpFileSystem.TOKEN_KIND)) {
         uri = DFSUtil.createUri(HftpFileSystem.SCHEME, address);
+      } else if (kind.equals(HsftpFileSystem.TOKEN_KIND)) {
+        uri = DFSUtil.createUri(HsftpFileSystem.SCHEME, address);
       } else if (kind.equals(WebHdfsFileSystem.TOKEN_KIND)) {
         uri = DFSUtil.createUri(WebHdfsFileSystem.SCHEME, address);
       } else {

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java
 Sat Nov 16 01:06:27 2013
@@ -22,6 +22,11 @@ import java.io.IOException;
 import java.net.HttpURLConnection;
 import java.net.URL;
 import java.net.URLConnection;
+import java.security.GeneralSecurityException;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLSocketFactory;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -32,6 +37,7 @@ import org.apache.hadoop.security.UserGr
 import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
 import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
+import org.apache.hadoop.security.ssl.SSLFactory;
 
 /**
  * Utilities for handling URLs
@@ -64,6 +70,35 @@ public class URLConnectionFactory {
     }
   };
 
+  /**
+   * Create a new ConnectionConfigurator for SSL connections
+   */
+  static ConnectionConfigurator newSslConnConfigurator(final int timeout,
+      Configuration conf) throws IOException, GeneralSecurityException {
+    final SSLFactory factory;
+    final SSLSocketFactory sf;
+    final HostnameVerifier hv;
+
+    factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
+    factory.init();
+    sf = factory.createSSLSocketFactory();
+    hv = factory.getHostnameVerifier();
+
+    return new ConnectionConfigurator() {
+      @Override
+      public HttpURLConnection configure(HttpURLConnection conn)
+          throws IOException {
+        if (conn instanceof HttpsURLConnection) {
+          HttpsURLConnection c = (HttpsURLConnection) conn;
+          c.setSSLSocketFactory(sf);
+          c.setHostnameVerifier(hv);
+        }
+        URLConnectionFactory.setTimeouts(conn, timeout);
+        return conn;
+      }
+    };
+  }
+
   public URLConnectionFactory(int socketTimeout) {
     this.socketTimeout = socketTimeout;
   }

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpDelegationToken.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpDelegationToken.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpDelegationToken.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpDelegationToken.java
 Sat Nov 16 01:06:27 2013
@@ -18,233 +18,77 @@
 
 package org.apache.hadoop.hdfs.web;
 
-import static 
org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
-import static org.junit.Assert.*;
-
+import static org.mockito.Matchers.anyBoolean;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutputStream;
 import java.io.IOException;
-import java.net.ServerSocket;
-import java.net.Socket;
+import java.net.HttpURLConnection;
 import java.net.URI;
-import java.security.PrivilegedExceptionAction;
+import java.net.URISyntaxException;
+import java.net.URL;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.hdfs.web.HftpFileSystem;
-import org.apache.hadoop.hdfs.web.HsftpFileSystem;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.SecurityUtilTestHelper;
+import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
+import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
+import org.junit.Assert;
 import org.junit.Test;
+import org.mockito.Mockito;
 import org.mockito.internal.util.reflection.Whitebox;
 
 public class TestHftpDelegationToken {
 
+  /**
+   * Test whether HftpFileSystem maintain wire-compatibility for 0.20.203 when
+   * obtaining delegation token. See HDFS-5440 for more details.
+   */
   @Test
-  public void testHdfsDelegationToken() throws Exception {
-    SecurityUtilTestHelper.setTokenServiceUseIp(true);
-
-    final Configuration conf = new Configuration();
-    conf.set(HADOOP_SECURITY_AUTHENTICATION, "kerberos");
-    UserGroupInformation.setConfiguration(conf);
-    UserGroupInformation user =
-      UserGroupInformation.createUserForTesting("oom",
-                                                new String[]{"memory"});
-    Token<?> token = new Token<TokenIdentifier>
-      (new byte[0], new byte[0],
-       DelegationTokenIdentifier.HDFS_DELEGATION_KIND,
-       new Text("127.0.0.1:8020"));
-    user.addToken(token);
-    Token<?> token2 = new Token<TokenIdentifier>
-      (null, null, new Text("other token"), new Text("127.0.0.1:8021"));
-    user.addToken(token2);
-    assertEquals("wrong tokens in user", 2, user.getTokens().size());
-    FileSystem fs =
-      user.doAs(new PrivilegedExceptionAction<FileSystem>() {
-         @Override
-    public FileSystem run() throws Exception {
-            return FileSystem.get(new URI("hftp://localhost:50470/";), conf);
-         }
-       });
-    assertSame("wrong kind of file system", HftpFileSystem.class,
-                 fs.getClass());
-    assertSame("wrong token", token,
-        Whitebox.getInternalState(fs, "renewToken"));
-  }
-
-  @Test
-  public void testSelectHftpDelegationToken() throws Exception {
-    SecurityUtilTestHelper.setTokenServiceUseIp(true);
-
-    Configuration conf = new Configuration();
-    conf.setClass("fs.hftp.impl", HftpFileSystem.class, FileSystem.class);
-
-    int httpPort = 80;
-    int httpsPort = 443;
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, httpPort);
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, httpsPort);
-
-    // test with implicit default port
-    URI fsUri = URI.create("hftp://localhost";);
-    HftpFileSystem fs = (HftpFileSystem) FileSystem.newInstance(fsUri, conf);
-    assertEquals(httpPort, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpPort, conf);
-
-    // test with explicit default port
-    // Make sure it uses the port from the hftp URI.
-    fsUri = URI.create("hftp://localhost:"+httpPort);
-    fs = (HftpFileSystem) FileSystem.newInstance(fsUri, conf);
-    assertEquals(httpPort, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpPort, conf);
-
-    // test with non-default port
-    // Make sure it uses the port from the hftp URI.
-    fsUri = URI.create("hftp://localhost:"+(httpPort+1));
-    fs = (HftpFileSystem) FileSystem.newInstance(fsUri, conf);
-    assertEquals(httpPort+1, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpPort + 1, conf);
-
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 5);
-  }
-
-  @Test
-  public void testSelectHsftpDelegationToken() throws Exception {
-    SecurityUtilTestHelper.setTokenServiceUseIp(true);
-
-    Configuration conf = new Configuration();
-    conf.setClass("fs.hsftp.impl", HsftpFileSystem.class, FileSystem.class);
-
-    int httpPort = 80;
-    int httpsPort = 443;
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, httpPort);
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, httpsPort);
-
-    // test with implicit default port
-    URI fsUri = URI.create("hsftp://localhost";);
-    HsftpFileSystem fs = (HsftpFileSystem) FileSystem.newInstance(fsUri, conf);
-    assertEquals(httpsPort, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpsPort, conf);
-
-    // test with explicit default port
-    fsUri = URI.create("hsftp://localhost:"+httpsPort);
-    fs = (HsftpFileSystem) FileSystem.newInstance(fsUri, conf);
-    assertEquals(httpsPort, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpsPort, conf);
-
-    // test with non-default port
-    fsUri = URI.create("hsftp://localhost:"+(httpsPort+1));
-    fs = (HsftpFileSystem) FileSystem.newInstance(fsUri, conf);
-    assertEquals(httpsPort+1, fs.getCanonicalUri().getPort());
-    checkTokenSelection(fs, httpsPort+1, conf);
-
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 5);
-  }
-
-
-  @Test
-  public void testInsecureRemoteCluster()  throws Exception {
-    final ServerSocket socket = new ServerSocket(0); // just reserve a port
-    socket.close();
+  public void testTokenCompatibilityFor203() throws IOException,
+      URISyntaxException, AuthenticationException {
     Configuration conf = new Configuration();
-    URI fsUri = URI.create("hsftp://localhost:"+socket.getLocalPort());
-    assertNull(FileSystem.newInstance(fsUri, conf).getDelegationToken(null));
-  }
-
-  @Test
-  public void testSecureClusterError()  throws Exception {
-    final ServerSocket socket = new ServerSocket(0);
-    Thread t = new Thread() {
-      @Override
-      public void run() {
-        while (true) { // fetching does a few retries
-          try {
-            Socket s = socket.accept();
-            s.getOutputStream().write(1234);
-            s.shutdownOutput();
-          } catch (Exception e) {
-            break;
-          }
-        }
-      }
-    };
-    t.start();
-
-    try {
-      Configuration conf = new Configuration();
-      URI fsUri = URI.create("hsftp://localhost:"+socket.getLocalPort());
-      Exception ex = null;
-      try {
-        FileSystem.newInstance(fsUri, conf).getDelegationToken(null);
-      } catch (Exception e) {
-        ex = e;
-      }
-      assertNotNull(ex);
-      assertNotNull(ex.getCause());
-      assertEquals("Remote host closed connection during handshake",
-                   ex.getCause().getMessage());
-    } finally {
-      t.interrupt();
-    }
-  }
+    HftpFileSystem fs = new HftpFileSystem();
 
-  private void checkTokenSelection(HftpFileSystem fs,
-                                   int port,
-                                   Configuration conf) throws IOException {
-    UserGroupInformation ugi =
-        UserGroupInformation.createUserForTesting(fs.getUri().getAuthority(), 
new String[]{});
-
-    @SuppressWarnings("unchecked")
-    TokenAspect<HftpFileSystem> aspect = (TokenAspect<HftpFileSystem>) 
Whitebox.getInternalState(fs, "tokenAspect");
-
-    // use ip-based tokens
-    SecurityUtilTestHelper.setTokenServiceUseIp(true);
-
-    // test fallback to hdfs token
-    Token<?> hdfsToken = new Token<TokenIdentifier>(
-        new byte[0], new byte[0],
-        DelegationTokenIdentifier.HDFS_DELEGATION_KIND,
-        new Text("127.0.0.1:8020"));
-    ugi.addToken(hdfsToken);
-
-    // test fallback to hdfs token
-    Token<?> token = aspect.selectDelegationToken(ugi);
-    assertNotNull(token);
-    assertEquals(hdfsToken, token);
-
-    // test hftp is favored over hdfs
-    Token<?> hftpToken = new Token<TokenIdentifier>(
-        new byte[0], new byte[0],
-        HftpFileSystem.TOKEN_KIND, new Text("127.0.0.1:"+port));
-    ugi.addToken(hftpToken);
-    token = aspect.selectDelegationToken(ugi);
-    assertNotNull(token);
-    assertEquals(hftpToken, token);
-
-    // switch to using host-based tokens, no token should match
-    SecurityUtilTestHelper.setTokenServiceUseIp(false);
-    token = aspect.selectDelegationToken(ugi);
-    assertNull(token);
-
-    // test fallback to hdfs token
-    hdfsToken = new Token<TokenIdentifier>(
-        new byte[0], new byte[0],
-        DelegationTokenIdentifier.HDFS_DELEGATION_KIND,
-        new Text("localhost:8020"));
-    ugi.addToken(hdfsToken);
-    token = aspect.selectDelegationToken(ugi);
-    assertNotNull(token);
-    assertEquals(hdfsToken, token);
-
-    // test hftp is favored over hdfs
-    hftpToken = new Token<TokenIdentifier>(
-        new byte[0], new byte[0],
-        HftpFileSystem.TOKEN_KIND, new Text("localhost:"+port));
-    ugi.addToken(hftpToken);
-    token = aspect.selectDelegationToken(ugi);
-    assertNotNull(token);
-    assertEquals(hftpToken, token);
+    Token<?> token = new Token<TokenIdentifier>(new byte[0], new byte[0],
+        DelegationTokenIdentifier.HDFS_DELEGATION_KIND, new Text(
+            "127.0.0.1:8020"));
+    Credentials cred = new Credentials();
+    cred.addToken(HftpFileSystem.TOKEN_KIND, token);
+    ByteArrayOutputStream os = new ByteArrayOutputStream();
+    cred.write(new DataOutputStream(os));
+
+    HttpURLConnection conn = mock(HttpURLConnection.class);
+    doReturn(new ByteArrayInputStream(os.toByteArray())).when(conn)
+        .getInputStream();
+    doReturn(HttpURLConnection.HTTP_OK).when(conn).getResponseCode();
+
+    URLConnectionFactory factory = mock(URLConnectionFactory.class);
+    doReturn(conn).when(factory).openConnection(Mockito.<URL> any(),
+        anyBoolean());
+
+    fs.initialize(new URI("hftp://127.0.0.1:8020";), conf);
+    fs.connectionFactory = factory;
+
+    UserGroupInformation ugi = UserGroupInformation.createUserForTesting("foo",
+        new String[] { "bar" });
+
+    TokenAspect<HftpFileSystem> tokenAspect = new TokenAspect<HftpFileSystem>(
+        fs, HftpFileSystem.TOKEN_KIND);
+
+    tokenAspect.initDelegationToken(ugi);
+    tokenAspect.ensureTokenInitialized();
+
+    Assert.assertSame(HftpFileSystem.TOKEN_KIND, fs.getRenewToken().getKind());
+
+    Token<?> tok = (Token<?>) Whitebox.getInternalState(fs, "delegationToken");
+    Assert.assertNotSame("Not making a copy of the remote token", token, tok);
+    Assert.assertEquals(token.getKind(), tok.getKind());
   }
 }

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java?rev=1542439&r1=1542438&r2=1542439&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java
 Sat Nov 16 01:06:27 2013
@@ -22,6 +22,7 @@ import static org.junit.Assert.assertEqu
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
+import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.HttpURLConnection;
@@ -29,23 +30,22 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.net.URL;
 import java.net.URLConnection;
-import java.util.Random;
 
-import org.apache.commons.logging.impl.Log4JLogger;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.BlockLocation;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
 import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
+import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.util.ServletUtil;
-import org.apache.log4j.Level;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -53,8 +53,10 @@ import org.junit.BeforeClass;
 import org.junit.Test;
 
 public class TestHftpFileSystem {
-  private static final Random RAN = new Random();
-
+  private static final String BASEDIR = System.getProperty("test.build.dir",
+      "target/test-dir") + "/" + TestHftpFileSystem.class.getSimpleName();
+  private static String keystoresDir;
+  private static String sslConfDir;
   private static Configuration config = null;
   private static MiniDFSCluster cluster = null;
   private static String blockPoolId = null;
@@ -83,25 +85,28 @@ public class TestHftpFileSystem {
       new Path("/foo\">bar/foo\">bar"), };
 
   @BeforeClass
-  public static void setUp() throws IOException {
-    ((Log4JLogger) HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);
-
-    final long seed = RAN.nextLong();
-    System.out.println("seed=" + seed);
-    RAN.setSeed(seed);
-
+  public static void setUp() throws Exception {
     config = new Configuration();
     cluster = new MiniDFSCluster.Builder(config).numDataNodes(2).build();
     blockPoolId = cluster.getNamesystem().getBlockPoolId();
     hftpUri = "hftp://";
         + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
+    File base = new File(BASEDIR);
+    FileUtil.fullyDelete(base);
+    base.mkdirs();
+    keystoresDir = new File(BASEDIR).getAbsolutePath();
+    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestHftpFileSystem.class);
+
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, config, false);
   }
 
   @AfterClass
-  public static void tearDown() throws IOException {
+  public static void tearDown() throws Exception {
     if (cluster != null) {
       cluster.shutdown();
     }
+    FileUtil.fullyDelete(new File(BASEDIR));
+    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
   }
 
   @Before
@@ -352,9 +357,12 @@ public class TestHftpFileSystem {
     Configuration conf = new Configuration();
     URI uri = URI.create("hftp://localhost";);
     HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
-    URLConnection conn = fs.connectionFactory.openConnection(new 
URL("http://localhost";));
-    assertEquals(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, 
conn.getConnectTimeout());
-    assertEquals(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT, 
conn.getReadTimeout());
+    URLConnection conn = fs.connectionFactory.openConnection(new URL(
+        "http://localhost";));
+    assertEquals(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
+        conn.getConnectTimeout());
+    assertEquals(URLConnectionFactory.DEFAULT_SOCKET_TIMEOUT,
+        conn.getReadTimeout());
   }
 
   // /


Reply via email to