This is an automated email from the ASF dual-hosted git repository.

eyang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new d78854b  HDFS-14434.  Ignore user.name query parameter in secure 
WebHDFS.              Contributed by KWON BYUNGCHANG
d78854b is described below

commit d78854b928bb877f26b11b5b212a100a79941f35
Author: Eric Yang <ey...@apache.org>
AuthorDate: Tue May 28 17:31:35 2019 -0400

    HDFS-14434.  Ignore user.name query parameter in secure WebHDFS.
                 Contributed by KWON BYUNGCHANG
---
 .../apache/hadoop/hdfs/web/WebHdfsFileSystem.java  |  16 +-
 .../hadoop/hdfs/server/common/JspHelper.java       |   8 +-
 .../hadoop/hdfs/server/common/TestJspHelper.java   |  88 +++++----
 .../apache/hadoop/hdfs/web/TestWebHdfsTokens.java  | 217 +++++++++++++--------
 .../org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java |  47 +++--
 5 files changed, 236 insertions(+), 140 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
index fe30a9a..e8049e9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
@@ -170,6 +170,7 @@ public class WebHdfsFileSystem extends FileSystem
   private InetSocketAddress nnAddrs[];
   private int currentNNAddrIndex;
   private boolean disallowFallbackToInsecureCluster;
+  private boolean isInsecureCluster;
   private String restCsrfCustomHeader;
   private Set<String> restCsrfMethodsToIgnore;
 
@@ -282,6 +283,7 @@ public class WebHdfsFileSystem extends FileSystem
 
     this.workingDir = makeQualified(new Path(getHomeDirectoryString(ugi)));
     this.canRefreshDelegationToken = UserGroupInformation.isSecurityEnabled();
+    this.isInsecureCluster = !this.canRefreshDelegationToken;
     this.disallowFallbackToInsecureCluster = !conf.getBoolean(
         CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_KEY,
         
CommonConfigurationKeys.IPC_CLIENT_FALLBACK_TO_SIMPLE_AUTH_ALLOWED_DEFAULT);
@@ -367,6 +369,7 @@ public class WebHdfsFileSystem extends FileSystem
             LOG.debug("Fetched new token: {}", token);
           } else { // security is disabled
             canRefreshDelegationToken = false;
+            isInsecureCluster = true;
           }
         }
       }
@@ -413,8 +416,7 @@ public class WebHdfsFileSystem extends FileSystem
     if (cachedHomeDirectory == null) {
       final HttpOpParam.Op op = GetOpParam.Op.GETHOMEDIRECTORY;
       try {
-        String pathFromDelegatedFS = new FsPathResponseRunner<String>(op, null,
-            new UserParam(ugi)) {
+        String pathFromDelegatedFS = new FsPathResponseRunner<String>(op, 
null){
           @Override
           String decodeResponse(Map<?, ?> json) throws IOException {
             return JsonUtilClient.getPath(json);
@@ -576,7 +578,8 @@ public class WebHdfsFileSystem extends FileSystem
     return url;
   }
 
-  Param<?,?>[] getAuthParameters(final HttpOpParam.Op op) throws IOException {
+  private synchronized Param<?, ?>[] getAuthParameters(final HttpOpParam.Op op)
+      throws IOException {
     List<Param<?,?>> authParams = Lists.newArrayList();
     // Skip adding delegation token for token operations because these
     // operations require authentication.
@@ -593,7 +596,12 @@ public class WebHdfsFileSystem extends FileSystem
         authParams.add(new DoAsParam(userUgi.getShortUserName()));
         userUgi = realUgi;
       }
-      authParams.add(new UserParam(userUgi.getShortUserName()));
+      UserParam userParam = new UserParam((userUgi.getShortUserName()));
+
+      //in insecure, use user.name parameter, in secure, use spnego auth
+      if(isInsecureCluster) {
+        authParams.add(userParam);
+      }
     }
     return authParams.toArray(new Param<?,?>[0]);
   }
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
index eb488e8..2c65c3f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
@@ -118,12 +118,9 @@ public class JspHelper {
       remoteUser = request.getRemoteUser();
       final String tokenString = 
request.getParameter(DELEGATION_PARAMETER_NAME);
       if (tokenString != null) {
-        // Token-based connections need only verify the effective user, and
-        // disallow proxying to different user.  Proxy authorization checks
-        // are not required since the checks apply to issuing a token.
+
+        // user.name, doas param is ignored in the token-based auth
         ugi = getTokenUGI(context, request, tokenString, conf);
-        checkUsername(ugi.getShortUserName(), usernameFromQuery);
-        checkUsername(ugi.getShortUserName(), doAsUserFromQuery);
       } else if (remoteUser == null) {
         throw new IOException(
             "Security enabled but user not authenticated by filter");
@@ -137,7 +134,6 @@ public class JspHelper {
 
     if (ugi == null) { // security is off, or there's no token
       ugi = UserGroupInformation.createRemoteUser(remoteUser);
-      checkUsername(ugi.getShortUserName(), usernameFromQuery);
       if (UserGroupInformation.isSecurityEnabled()) {
         // This is not necessarily true, could have been auth'ed by user-facing
         // filter
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
index 119db8c..5a1661c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
@@ -158,7 +158,7 @@ public class TestJspHelper {
     Token<DelegationTokenIdentifier> token = new 
Token<DelegationTokenIdentifier>(
         dtId, new DummySecretManager(0, 0, 0, 0));
     String tokenString = token.encodeToUrlString();
-    
+
     // token with no auth-ed user
     request = getMockRequest(null, null, null);
     when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
@@ -168,7 +168,7 @@ public class TestJspHelper {
     Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
     Assert.assertEquals(ugi.getShortUserName(), user);
     checkUgiFromToken(ugi);
-    
+
     // token with auth-ed user
     request = getMockRequest(realUser, null, null);
     when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
@@ -198,32 +198,40 @@ public class TestJspHelper {
     Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
     Assert.assertEquals(ugi.getShortUserName(), user);    
     checkUgiFromToken(ugi);
-    
-    // can't proxy with a token!
+
+    // if present token, ignore doas parameter
     request = getMockRequest(null, null, "rogue");
     when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
-    try {
-      JspHelper.getUGI(context, request, conf);
-      Assert.fail("bad request allowed");
-    } catch (IOException ioe) {
-      Assert.assertEquals(
-          "Usernames not matched: name=rogue != expected="+user,
-          ioe.getMessage());
-    }
-    
-    // can't proxy with a token!
+
+    ugi = JspHelper.getUGI(context, request, conf);
+    Assert.assertNotNull(ugi.getRealUser());
+    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
+    Assert.assertEquals(ugi.getShortUserName(), user);
+    checkUgiFromToken(ugi);
+
+    // if present token, ignore user.name parameter
+    request = getMockRequest(null, "rogue", null);
+    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+        tokenString);
+
+    ugi = JspHelper.getUGI(context, request, conf);
+    Assert.assertNotNull(ugi.getRealUser());
+    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
+    Assert.assertEquals(ugi.getShortUserName(), user);
+    checkUgiFromToken(ugi);
+
+    // if present token, ignore user.name and doas parameter
     request = getMockRequest(null, user, "rogue");
     when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
-    try {
-      JspHelper.getUGI(context, request, conf);
-      Assert.fail("bad request allowed");
-    } catch (IOException ioe) {
-      Assert.assertEquals(
-          "Usernames not matched: name=rogue != expected="+user,
-          ioe.getMessage());
-    }
+
+    ugi = JspHelper.getUGI(context, request, conf);
+    Assert.assertNotNull(ugi.getRealUser());
+    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
+    Assert.assertEquals(ugi.getShortUserName(), user);
+    checkUgiFromToken(ugi);
+
   }
   
   @Test
@@ -271,16 +279,12 @@ public class TestJspHelper {
     Assert.assertEquals(ugi.getShortUserName(), realUser);
     checkUgiFromAuth(ugi);
     
-    // ugi for remote user != real user 
+    // if there is remote user via SPNEGO, ignore user.name param
     request = getMockRequest(realUser, user, null);
-    try {
-      JspHelper.getUGI(context, request, conf);
-      Assert.fail("bad request allowed");
-    } catch (IOException ioe) {
-      Assert.assertEquals(
-          "Usernames not matched: name="+user+" != expected="+realUser,
-          ioe.getMessage());
-    }
+    ugi = JspHelper.getUGI(context, request, conf);
+    Assert.assertNull(ugi.getRealUser());
+    Assert.assertEquals(ugi.getShortUserName(), realUser);
+    checkUgiFromAuth(ugi);
   }
   
   @Test
@@ -335,17 +339,16 @@ public class TestJspHelper {
     Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
     Assert.assertEquals(ugi.getShortUserName(), user);
     checkUgiFromAuth(ugi);
-    
-    // proxy ugi for user via remote user != real user
+
+    // if there is remote user via SPNEGO, ignore user.name, doas param
     request = getMockRequest(realUser, user, user);
-    try {
-      JspHelper.getUGI(context, request, conf);
-      Assert.fail("bad request allowed");
-    } catch (IOException ioe) {
-      Assert.assertEquals(
-          "Usernames not matched: name="+user+" != expected="+realUser,
-          ioe.getMessage());
-    }
+    ugi = JspHelper.getUGI(context, request, conf);
+    Assert.assertNotNull(ugi.getRealUser());
+    Assert.assertEquals(ugi.getRealUser().getShortUserName(), realUser);
+    Assert.assertEquals(ugi.getShortUserName(), user);
+    checkUgiFromAuth(ugi);
+
+
     
     // try to get get a proxy user with unauthorized user
     try {
@@ -368,6 +371,9 @@ public class TestJspHelper {
     }
   }
 
+
+
+
   private HttpServletRequest getMockRequest(String remoteUser, String user, 
String doAs) {
     HttpServletRequest request = mock(HttpServletRequest.class);
     when(request.getParameter(UserParam.NAME)).thenReturn(user);
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java
index 0b1dfa5..b5d4410 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java
@@ -20,20 +20,37 @@ package org.apache.hadoop.hdfs.web;
 
 import static 
org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.KERBEROS;
 import static 
org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.SIMPLE;
+import static 
org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.DFS_DATA_TRANSFER_PROTECTION_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY;
+import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HTTP_POLICY_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
+import static 
org.apache.hadoop.hdfs.DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.*;
 
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
 import java.net.HttpURLConnection;
-import java.net.InetSocketAddress;
 import java.net.URI;
 import java.net.URL;
 import java.net.URLConnection;
 import java.security.PrivilegedExceptionAction;
 import java.util.Map;
+import java.util.Properties;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -46,17 +63,18 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import org.apache.hadoop.hdfs.web.resources.*;
 import org.apache.hadoop.http.HttpConfig;
-import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.io.Text;
-import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
+import org.apache.hadoop.security.authentication.util.KerberosName;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
 import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.test.Whitebox;
 import org.apache.hadoop.security.token.Token;
+import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -65,16 +83,95 @@ public class TestWebHdfsTokens {
   private static Configuration conf;
   URI uri = null;
 
+  //secure cluster
+  private static MiniKdc kdc = null;
+  private static File baseDir;
+  private static File keytabFile;
+  private static String username = "webhdfs-tokens-test";
+  private static String principal;
+  private static String keystoresDir;
+  private static String sslConfDir;
+
   @BeforeClass
   public static void setUp() {
     conf = new Configuration();
+  }
+
+  @AfterClass
+  public static void destroy() throws Exception {
+    if (kdc != null) {
+      kdc.stop();
+      FileUtil.fullyDelete(baseDir);
+      KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
+    }
+  }
+
+  private static void initEnv(){
     SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
-    UserGroupInformation.setConfiguration(conf);    
+    UserGroupInformation.setConfiguration(conf);
     UserGroupInformation.setLoginUser(
         UserGroupInformation.createUserForTesting(
             "LoginUser", new String[]{"supergroup"}));
   }
 
+  private static void initSecureConf(Configuration secureConf)
+      throws Exception {
+
+    baseDir = GenericTestUtils.getTestDir(
+        TestWebHdfsTokens.class.getSimpleName());
+    FileUtil.fullyDelete(baseDir);
+    assertTrue(baseDir.mkdirs());
+
+    Properties kdcConf = MiniKdc.createConf();
+    kdc = new MiniKdc(kdcConf, baseDir);
+    kdc.start();
+
+    SecurityUtil.setAuthenticationMethod(
+        UserGroupInformation.AuthenticationMethod.KERBEROS, secureConf);
+    UserGroupInformation.setConfiguration(secureConf);
+    KerberosName.resetDefaultRealm();
+    assertTrue("Expected secureConfiguration to enable security",
+        UserGroupInformation.isSecurityEnabled());
+
+    keytabFile = new File(baseDir, username + ".keytab");
+    String keytab = keytabFile.getAbsolutePath();
+    // Windows will not reverse name lookup "127.0.0.1" to "localhost".
+    String krbInstance = Path.WINDOWS ? "127.0.0.1" : "localhost";
+    principal = username + "/" + krbInstance + "@" + kdc.getRealm();
+    String spnegoPrincipal = "HTTP/" + krbInstance + "@" + kdc.getRealm();
+    kdc.createPrincipal(keytabFile, username, username + "/" + krbInstance,
+        "HTTP/" + krbInstance);
+
+    secureConf.set(DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, principal);
+    secureConf.set(DFS_NAMENODE_KEYTAB_FILE_KEY, keytab);
+    secureConf.set(DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, principal);
+    secureConf.set(DFS_DATANODE_KEYTAB_FILE_KEY, keytab);
+    secureConf.set(DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY,
+        spnegoPrincipal);
+    secureConf.setBoolean(DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, true);
+    secureConf.set(DFS_DATA_TRANSFER_PROTECTION_KEY, "authentication");
+    secureConf.set(DFS_HTTP_POLICY_KEY,
+        HttpConfig.Policy.HTTP_AND_HTTPS.name());
+    secureConf.set(DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    secureConf.set(DFS_NAMENODE_HTTP_ADDRESS_KEY, "localhost:0");
+    secureConf.set(DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
+    secureConf.set(DFS_DATANODE_HTTP_ADDRESS_KEY, "localhost:0");
+    secureConf.setBoolean(DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
+    secureConf.setBoolean(IGNORE_SECURE_PORTS_FOR_TESTING_KEY, true);
+
+    keystoresDir = baseDir.getAbsolutePath();
+    sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class);
+    KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir,
+        secureConf, false);
+
+    secureConf.set(DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
+        KeyStoreTestUtil.getClientSSLConfigFileName());
+    secureConf.set(DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
+        KeyStoreTestUtil.getServerSSLConfigFileName());
+  }
+
+
+
   private WebHdfsFileSystem spyWebhdfsInSecureSetup() throws IOException {
     WebHdfsFileSystem fsOrig = new WebHdfsFileSystem();
     fsOrig.initialize(URI.create("webhdfs://127.0.0.1:0"), conf);
@@ -84,6 +181,7 @@ public class TestWebHdfsTokens {
 
   @Test(timeout = 5000)
   public void testTokenForNonTokenOp() throws IOException {
+    initEnv();
     WebHdfsFileSystem fs = spyWebhdfsInSecureSetup();
     Token<?> token = mock(Token.class);
     doReturn(token).when(fs).getDelegationToken(null);
@@ -104,16 +202,19 @@ public class TestWebHdfsTokens {
 
   @Test(timeout = 5000)
   public void testNoTokenForGetToken() throws IOException {
+    initEnv();
     checkNoTokenForOperation(GetOpParam.Op.GETDELEGATIONTOKEN);
   }
 
   @Test(timeout = 5000)
   public void testNoTokenForRenewToken() throws IOException {
+    initEnv();
     checkNoTokenForOperation(PutOpParam.Op.RENEWDELEGATIONTOKEN);
   }
 
   @Test(timeout = 5000)
   public void testNoTokenForCancelToken() throws IOException {
+    initEnv();
     checkNoTokenForOperation(PutOpParam.Op.CANCELDELEGATIONTOKEN);
   }
 
@@ -162,86 +263,42 @@ public class TestWebHdfsTokens {
   @Test
   public void testLazyTokenFetchForWebhdfs() throws Exception {
     MiniDFSCluster cluster = null;
-    WebHdfsFileSystem fs = null;
+    UserGroupInformation ugi = null;
     try {
       final Configuration clusterConf = new HdfsConfiguration(conf);
-      SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
-      clusterConf.setBoolean(DFSConfigKeys
-          .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
+      initSecureConf(clusterConf);
 
-      // trick the NN into thinking security is enabled w/o it trying
-      // to login from a keytab
-      UserGroupInformation.setConfiguration(clusterConf);
       cluster = new 
MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build();
       cluster.waitActive();
-      SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
-      UserGroupInformation.setConfiguration(clusterConf);
-      
+
+      ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+          principal, keytabFile.getAbsolutePath());
+
+      //test with swebhdfs
+      uri = DFSUtil.createUri(
+          "swebhdfs", cluster.getNameNode().getHttpsAddress());
+      validateLazyTokenFetch(ugi, clusterConf);
+
+      //test with webhdfs
       uri = DFSUtil.createUri(
           "webhdfs", cluster.getNameNode().getHttpAddress());
-      validateLazyTokenFetch(clusterConf);
+      validateLazyTokenFetch(ugi, clusterConf);
+
     } finally {
-      IOUtils.cleanup(null, fs);
       if (cluster != null) {
         cluster.shutdown();
       }
+
+      // Reset UGI so that other tests are not affected.
+      UserGroupInformation.reset();
+      UserGroupInformation.setConfiguration(new Configuration());
     }
   }
-  
-  @Test
-  public void testLazyTokenFetchForSWebhdfs() throws Exception {
-    MiniDFSCluster cluster = null;
-    SWebHdfsFileSystem fs = null;
-    String keystoresDir;
-    String sslConfDir;
-    try {
-      final Configuration clusterConf = new HdfsConfiguration(conf);
-      SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
-      clusterConf.setBoolean(DFSConfigKeys
-           .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
-      String baseDir =
-          
GenericTestUtils.getTempPath(TestWebHdfsTokens.class.getSimpleName());
-           
-      clusterConf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, 
HttpConfig.Policy.HTTPS_ONLY.name());
-      clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, 
"localhost:0");
-      clusterConf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, 
"localhost:0");
-         
-      File base = new File(baseDir);
-      FileUtil.fullyDelete(base);
-      base.mkdirs();
-      keystoresDir = new File(baseDir).getAbsolutePath();
-      sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class);
-      KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, clusterConf, 
false);
-      clusterConf.set(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,
-          KeyStoreTestUtil.getClientSSLConfigFileName());
-      clusterConf.set(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY,
-          KeyStoreTestUtil.getServerSSLConfigFileName());
-
-      // trick the NN into thinking security is enabled w/o it trying
-      // to login from a keytab
-      UserGroupInformation.setConfiguration(clusterConf);
-      cluster = new 
MiniDFSCluster.Builder(clusterConf).numDataNodes(1).build();
-      cluster.waitActive();
-      InetSocketAddress addr = cluster.getNameNode().getHttpsAddress();
-      String nnAddr = NetUtils.getHostPortString(addr);
-      clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, nnAddr);
-      SecurityUtil.setAuthenticationMethod(KERBEROS, clusterConf);
-      UserGroupInformation.setConfiguration(clusterConf);
-      
-      uri = DFSUtil.createUri(
-        "swebhdfs", cluster.getNameNode().getHttpsAddress());
-      validateLazyTokenFetch(clusterConf);
-      } finally {
-        IOUtils.cleanup(null, fs);
-        if (cluster != null) {
-          cluster.shutdown();
-        }
-     }
-    KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, sslConfDir);
-  }
+
 
   @Test
   public void testSetTokenServiceAndKind() throws Exception {
+    initEnv();
     MiniDFSCluster cluster = null;
 
     try {
@@ -296,16 +353,20 @@ public class TestWebHdfsTokens {
     }
   }
   
-  private void validateLazyTokenFetch(final Configuration clusterConf) throws 
Exception{
-    final String testUser = "DummyUser";
-    UserGroupInformation ugi = UserGroupInformation.createUserForTesting(
-      testUser, new String[]{"supergroup"});
-    WebHdfsFileSystem fs = ugi.doAs(new 
PrivilegedExceptionAction<WebHdfsFileSystem>() {
-    @Override
-      public WebHdfsFileSystem run() throws IOException {
-        return spy((WebHdfsFileSystem) FileSystem.newInstance(uri, 
clusterConf));
-         }
-    });
+  private void validateLazyTokenFetch(UserGroupInformation ugi,
+      final Configuration clusterConf) throws Exception {
+
+    String testUser = ugi.getShortUserName();
+
+    WebHdfsFileSystem fs = ugi.doAs(
+        new PrivilegedExceptionAction<WebHdfsFileSystem>() {
+        @Override
+        public WebHdfsFileSystem run() throws IOException {
+          return spy((WebHdfsFileSystem) FileSystem.newInstance(uri,
+              clusterConf));
+        }
+      });
+
     // verify token ops don't get a token
     Assert.assertNull(fs.getRenewToken());
     Token<?> token = fs.getDelegationToken(null);
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
index 02a68ea..449f2c6 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hdfs.web;
 import static 
org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod.KERBEROS;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.mock;
 
 import java.io.IOException;
@@ -144,37 +145,47 @@ public class TestWebHdfsUrl {
     Path fsPath = new Path("/");
     String tokenString = webhdfs.getDelegationToken().encodeToUrlString();
 
+    String userParam = new UserParam(ugi.getShortUserName()).toString();
+
     // send user
     URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
+    assertTrue("secure webhdfs SHOULD NOT use user.name parameter",
+        getTokenUrl.toString().indexOf(userParam) == -1);
     checkQueryParams(
         new String[]{
             GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getShortUserName()).toString()
         },
         getTokenUrl);
 
+
+
     // send user
     URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
         fsPath, new TokenArgumentParam(tokenString));
+    assertTrue("secure webhdfs SHOULD NOT use user.name parameter",
+        renewTokenUrl.toString().indexOf(userParam) == -1);
     checkQueryParams(
         new String[]{
             PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getShortUserName()).toString(),
             new TokenArgumentParam(tokenString).toString(),
         },
         renewTokenUrl);
 
+
+
     // send token
     URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
         fsPath, new TokenArgumentParam(tokenString));
+    assertTrue("secure webhdfs SHOULD NOT use user.name parameter",
+        cancelTokenUrl.toString().indexOf(userParam) == -1);
     checkQueryParams(
         new String[]{
             PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getShortUserName()).toString(),
             new TokenArgumentParam(tokenString).toString(),
         },
         cancelTokenUrl);
-    
+
+
     // send token
     URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
     checkQueryParams(
@@ -190,14 +201,16 @@ public class TestWebHdfsUrl {
     // send user
     cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
         fsPath, new TokenArgumentParam(tokenString));
+    assertTrue("secure webhdfs SHOULD NOT use user.name parameter",
+        cancelTokenUrl.toString().indexOf(userParam) == -1);
     checkQueryParams(
         new String[]{
             PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getShortUserName()).toString(),
             new TokenArgumentParam(tokenString).toString(),
         },
         cancelTokenUrl);
 
+
     // send user
     fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
     checkQueryParams(
@@ -225,40 +238,50 @@ public class TestWebHdfsUrl {
     Path fsPath = new Path("/");
     String tokenString = webhdfs.getDelegationToken().encodeToUrlString();
 
+    String userParam = new UserParam(ugi.getRealUser().
+        getShortUserName()).toString();
+
     // send real+effective
     URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
+    assertTrue("secure webhdfs SHOULD NOT use user.name parameter",
+        getTokenUrl.toString().indexOf(userParam) == -1);
     checkQueryParams(
         new String[]{
             GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getRealUser().getShortUserName()).toString(),
             new DoAsParam(ugi.getShortUserName()).toString()
         },
         getTokenUrl);
 
+
+
     // send real+effective
     URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
         fsPath, new TokenArgumentParam(tokenString));
+    assertTrue("secure webhdfs SHOULD NOT use user.name parameter",
+        renewTokenUrl.toString().indexOf(userParam) == -1);
     checkQueryParams(
         new String[]{
             PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getRealUser().getShortUserName()).toString(),
             new DoAsParam(ugi.getShortUserName()).toString(),
             new TokenArgumentParam(tokenString).toString(),
         },
         renewTokenUrl);
 
+
     // send token
     URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
         fsPath, new TokenArgumentParam(tokenString));
+    assertTrue("secure webhdfs SHOULD NOT use user.name parameter",
+        cancelTokenUrl.toString().indexOf(userParam) == -1);
     checkQueryParams(
         new String[]{
             PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getRealUser().getShortUserName()).toString(),
             new DoAsParam(ugi.getShortUserName()).toString(),
             new TokenArgumentParam(tokenString).toString(),
         },
         cancelTokenUrl);
-    
+
+
     // send token
     URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
     checkQueryParams(
@@ -274,15 +297,17 @@ public class TestWebHdfsUrl {
     // send real+effective
     cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
         fsPath, new TokenArgumentParam(tokenString));
+    assertTrue("secure webhdfs SHOULD NOT use user.name parameter",
+        cancelTokenUrl.toString().indexOf(userParam) == -1);
     checkQueryParams(
         new String[]{
             PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
-            new UserParam(ugi.getRealUser().getShortUserName()).toString(),
             new DoAsParam(ugi.getShortUserName()).toString(),
             new TokenArgumentParam(tokenString).toString()
         },
         cancelTokenUrl);
-    
+
+
     // send real+effective
     fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
     checkQueryParams(


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to