Author: jitendra Date: Wed Nov 2 21:31:38 2011 New Revision: 1196812 URL: http://svn.apache.org/viewvc?rev=1196812&view=rev Log: Merged r1196434 and r1196386 from trunk for HADOOP-7792 and HDFS-2416.
Added: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/TokenArgumentParam.java - copied unchanged from r1196434, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/TokenArgumentParam.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java - copied unchanged from r1196434, hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/UserProvider.java hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Nov 2 21:31:38 2011 @@ -1176,6 +1176,8 @@ Release 0.23.0 - 2011-11-01 HDFS-2522. Disable TestDfsOverAvroRpc test. (suresh) + HDFS-2416. distcp with a webhdfs uri on a secure cluster fails. (jitendra) + BREAKDOWN OF HDFS-1073 SUBTASKS HDFS-1521. Persist transaction ID on disk between NN restarts. Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/ByteRangeInputStream.java Wed Nov 2 21:31:38 2011 @@ -40,13 +40,13 @@ import org.apache.hadoop.hdfs.web.resour public class ByteRangeInputStream extends FSInputStream { /** - * This class wraps a URL to allow easy mocking when testing. The URL class - * cannot be easily mocked because it is public. + * This class wraps a URL and provides method to open connection. + * It can be overridden to change how a connection is opened. */ - static class URLOpener { + public static class URLOpener { protected URL url; /** The url with offset parameter */ - private URL offsetUrl; + protected URL offsetUrl; public URLOpener(URL u) { url = u; @@ -60,7 +60,7 @@ public class ByteRangeInputStream extend return url; } - HttpURLConnection openConnection() throws IOException { + protected HttpURLConnection openConnection() throws IOException { return (HttpURLConnection)offsetUrl.openConnection(); } @@ -125,7 +125,13 @@ public class ByteRangeInputStream extend this(new URLOpener(url), new URLOpener(null)); } - ByteRangeInputStream(URLOpener o, URLOpener r) { + /** + * Create with the specified URLOpeners. Original url is used to open the + * stream for the first time. Resolved url is used in subsequent requests. + * @param o Original url + * @param r Resolved url + */ + public ByteRangeInputStream(URLOpener o, URLOpener r) { this.originalURL = o; this.resolvedURL = r; } Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java Wed Nov 2 21:31:38 2011 @@ -53,6 +53,7 @@ import org.apache.hadoop.hdfs.protocol.L import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; +import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer; import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.UserParam; @@ -552,6 +553,13 @@ public class JspHelper { DataInputStream in = new DataInputStream(buf); DelegationTokenIdentifier id = new DelegationTokenIdentifier(); id.readFields(in); + if (context != null) { + final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context); + if (nn != null) { + // Verify the token. + nn.getNamesystem().verifyToken(id, token.getPassword()); + } + } ugi = id.getUser(); checkUsername(ugi.getShortUserName(), usernameFromQuery); checkUsername(ugi.getShortUserName(), user); Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java Wed Nov 2 21:31:38 2011 @@ -4371,4 +4371,15 @@ public class FSNamesystem implements Nam public BlockManager getBlockManager() { return blockManager; } + + /** + * Verifies that the given identifier and password are valid and match. + * @param identifier Token identifier. + * @param password Password in the token. + * @throws InvalidToken + */ + public synchronized void verifyToken(DelegationTokenIdentifier identifier, + byte[] password) throws InvalidToken { + getDelegationTokenSecretManager().verifyToken(identifier, password); + } } Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java Wed Nov 2 21:31:38 2011 @@ -68,6 +68,7 @@ import org.apache.hadoop.hdfs.web.resour import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; import org.apache.hadoop.hdfs.web.resources.DelegationParam; +import org.apache.hadoop.hdfs.web.resources.TokenArgumentParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; @@ -104,7 +105,7 @@ public class NamenodeWebHdfsMethods { public static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class); private static final UriFsPathParam ROOT = new UriFsPathParam(""); - + private static final ThreadLocal<String> REMOTE_ADDRESS = new ThreadLocal<String>(); /** @return the remote client address. */ @@ -224,11 +225,13 @@ public class NamenodeWebHdfsMethods { @QueryParam(AccessTimeParam.NAME) @DefaultValue(AccessTimeParam.DEFAULT) final AccessTimeParam accessTime, @QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT) - final RenameOptionSetParam renameOptions + final RenameOptionSetParam renameOptions, + @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) + final TokenArgumentParam delegationTokenArgument ) throws IOException, InterruptedException { return put(ugi, delegation, ROOT, op, destination, owner, group, permission, overwrite, bufferSize, replication, blockSize, - modificationTime, accessTime, renameOptions); + modificationTime, accessTime, renameOptions, delegationTokenArgument); } /** Handle HTTP PUT request. */ @@ -264,7 +267,9 @@ public class NamenodeWebHdfsMethods { @QueryParam(AccessTimeParam.NAME) @DefaultValue(AccessTimeParam.DEFAULT) final AccessTimeParam accessTime, @QueryParam(RenameOptionSetParam.NAME) @DefaultValue(RenameOptionSetParam.DEFAULT) - final RenameOptionSetParam renameOptions + final RenameOptionSetParam renameOptions, + @QueryParam(TokenArgumentParam.NAME) @DefaultValue(TokenArgumentParam.DEFAULT) + final TokenArgumentParam delegationTokenArgument ) throws IOException, InterruptedException { if (LOG.isTraceEnabled()) { @@ -344,7 +349,7 @@ public class NamenodeWebHdfsMethods { case RENEWDELEGATIONTOKEN: { final Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(); - token.decodeFromUrlString(delegation.getValue()); + token.decodeFromUrlString(delegationTokenArgument.getValue()); final long expiryTime = np.renewDelegationToken(token); final String js = JsonUtil.toJsonString("long", expiryTime); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); @@ -352,7 +357,7 @@ public class NamenodeWebHdfsMethods { case CANCELDELEGATIONTOKEN: { final Token<DelegationTokenIdentifier> token = new Token<DelegationTokenIdentifier>(); - token.decodeFromUrlString(delegation.getValue()); + token.decodeFromUrlString(delegationTokenArgument.getValue()); np.cancelDelegationToken(token); return Response.ok().type(MediaType.APPLICATION_JSON).build(); } Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/AuthFilter.java Wed Nov 2 21:31:38 2011 @@ -17,11 +17,17 @@ */ package org.apache.hadoop.hdfs.web; +import java.io.IOException; import java.util.Properties; +import javax.servlet.FilterChain; import javax.servlet.FilterConfig; import javax.servlet.ServletException; +import javax.servlet.ServletRequest; +import javax.servlet.ServletResponse; +import javax.servlet.http.HttpServletRequest; +import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler; @@ -55,6 +61,21 @@ public class AuthFilter extends Authenti p.setProperty(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "true"); //set cookie path p.setProperty(COOKIE_PATH, "/"); - return p; + return p; + } + + @Override + public void doFilter(ServletRequest request, ServletResponse response, + FilterChain filterChain) throws IOException, ServletException { + HttpServletRequest httpRequest = (HttpServletRequest) request; + String tokenString = httpRequest + .getParameter(DelegationParam.NAME); + if (tokenString != null) { + //Token is present in the url, therefore token will be used for + //authentication, bypass kerberos authentication. + filterChain.doFilter(httpRequest, response); + return; + } + super.doFilter(request, response, filterChain); } } \ No newline at end of file Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java Wed Nov 2 21:31:38 2011 @@ -62,6 +62,7 @@ import org.apache.hadoop.hdfs.server.nam import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; +import org.apache.hadoop.hdfs.web.resources.TokenArgumentParam; import org.apache.hadoop.hdfs.web.resources.DeleteOpParam; import org.apache.hadoop.hdfs.web.resources.DestinationParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; @@ -290,24 +291,42 @@ public class WebHdfsFileSystem extends F final String query = op.toQueryString() + '&' + new UserParam(ugi) + Param.toSortedString("&", parameters); - final URL url = getNamenodeURL(path, addDt2Query(query)); + final URL url; + if (op.equals(PutOpParam.Op.RENEWDELEGATIONTOKEN) + || op.equals(GetOpParam.Op.GETDELEGATIONTOKEN)) { + // Skip adding delegation token for getting or renewing delegation token, + // because these operations require kerberos authentication. + url = getNamenodeURL(path, query); + } else { + url = getNamenodeURL(path, addDt2Query(query)); + } if (LOG.isTraceEnabled()) { LOG.trace("url=" + url); } return url; } + private HttpURLConnection getHttpUrlConnection(URL url) + throws IOException { + final HttpURLConnection conn; + try { + if (ugi.hasKerberosCredentials()) { + conn = new AuthenticatedURL(AUTH).openConnection(url, authToken); + } else { + conn = (HttpURLConnection)url.openConnection(); + } + } catch (AuthenticationException e) { + throw new IOException("Authentication failed, url=" + url, e); + } + return conn; + } + private HttpURLConnection httpConnect(final HttpOpParam.Op op, final Path fspath, final Param<?,?>... parameters) throws IOException { final URL url = toUrl(op, fspath, parameters); //connect and get response - final HttpURLConnection conn; - try { - conn = new AuthenticatedURL(AUTH).openConnection(url, authToken); - } catch(AuthenticationException e) { - throw new IOException("Authentication failed, url=" + url, e); - } + final HttpURLConnection conn = getHttpUrlConnection(url); try { conn.setRequestMethod(op.getType().toString()); conn.setDoOutput(op.getDoOutput()); @@ -317,7 +336,7 @@ public class WebHdfsFileSystem extends F } conn.connect(); return conn; - } catch(IOException e) { + } catch (IOException e) { conn.disconnect(); throw e; } @@ -513,7 +532,24 @@ public class WebHdfsFileSystem extends F statistics.incrementReadOps(1); final HttpOpParam.Op op = GetOpParam.Op.OPEN; final URL url = toUrl(op, f, new BufferSizeParam(buffersize)); - return new FSDataInputStream(new ByteRangeInputStream(url)); + ByteRangeInputStream str = getByteRangeInputStream(url); + return new FSDataInputStream(str); + } + + private class URLOpener extends ByteRangeInputStream.URLOpener { + + public URLOpener(URL u) { + super(u); + } + + @Override + public HttpURLConnection openConnection() throws IOException { + return getHttpUrlConnection(offsetUrl); + } + } + + private ByteRangeInputStream getByteRangeInputStream(URL url) { + return new ByteRangeInputStream(new URLOpener(url), new URLOpener(null)); } @Override @@ -576,17 +612,19 @@ public class WebHdfsFileSystem extends F private synchronized long renewDelegationToken(final Token<?> token ) throws IOException { - delegationToken = token; final HttpOpParam.Op op = PutOpParam.Op.RENEWDELEGATIONTOKEN; - final Map<?, ?> m = run(op, null); + TokenArgumentParam dtargParam = new TokenArgumentParam( + token.encodeToUrlString()); + final Map<?, ?> m = run(op, null, dtargParam); return (Long) m.get("long"); } private synchronized void cancelDelegationToken(final Token<?> token ) throws IOException { - delegationToken = token; final HttpOpParam.Op op = PutOpParam.Op.CANCELDELEGATIONTOKEN; - run(op, null); + TokenArgumentParam dtargParam = new TokenArgumentParam( + token.encodeToUrlString()); + run(op, null, dtargParam); } @Override Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java Wed Nov 2 21:31:38 2011 @@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.web.resou import org.apache.hadoop.security.UserGroupInformation; -/** Delegation token parameter. */ +/** Represents delegation token used for authentication. */ public class DelegationParam extends StringParam { /** Parameter name. */ public static final String NAME = "delegation"; Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/UserProvider.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/UserProvider.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/UserProvider.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/UserProvider.java Wed Nov 2 21:31:38 2011 @@ -50,7 +50,7 @@ public class UserProvider final Configuration conf = (Configuration) servletcontext .getAttribute(JspHelper.CURRENT_CONF); try { - return JspHelper.getUGI(null, request, conf, + return JspHelper.getUGI(servletcontext, request, conf, AuthenticationMethod.KERBEROS, false); } catch (IOException e) { throw new RuntimeException(e); Modified: hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java?rev=1196812&r1=1196811&r2=1196812&view=diff ============================================================================== --- hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java (original) +++ hadoop/common/branches/branch-0.23/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationToken.java Wed Nov 2 21:31:38 2011 @@ -148,7 +148,7 @@ public class TestDelegationToken { @Test public void testDelegationTokenDFSApi() throws Exception { DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem(); - Token<DelegationTokenIdentifier> token = dfs.getDelegationToken("JobTracker"); + final Token<DelegationTokenIdentifier> token = dfs.getDelegationToken("JobTracker"); DelegationTokenIdentifier identifier = new DelegationTokenIdentifier(); byte[] tokenId = token.getIdentifier(); identifier.readFields(new DataInputStream( @@ -156,6 +156,15 @@ public class TestDelegationToken { LOG.info("A valid token should have non-null password, and should be renewed successfully"); Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier)); dtSecretManager.renewToken(token, "JobTracker"); + UserGroupInformation.createRemoteUser("JobTracker").doAs( + new PrivilegedExceptionAction<Object>() { + @Override + public Object run() throws Exception { + token.renew(config); + token.cancel(config); + return null; + } + }); } @SuppressWarnings("deprecation") @@ -175,13 +184,23 @@ public class TestDelegationToken { } }); - final Token<DelegationTokenIdentifier> token = webhdfs.getDelegationToken("JobTracker"); + final Token<DelegationTokenIdentifier> token = webhdfs + .getDelegationToken("JobTracker"); DelegationTokenIdentifier identifier = new DelegationTokenIdentifier(); byte[] tokenId = token.getIdentifier(); - identifier.readFields(new DataInputStream(new ByteArrayInputStream(tokenId))); + identifier + .readFields(new DataInputStream(new ByteArrayInputStream(tokenId))); LOG.info("A valid token should have non-null password, and should be renewed successfully"); Assert.assertTrue(null != dtSecretManager.retrievePassword(identifier)); dtSecretManager.renewToken(token, "JobTracker"); + ugi.doAs(new PrivilegedExceptionAction<Object>() { + @Override + public Object run() throws Exception { + token.renew(config); + token.cancel(config); + return null; + } + }); } @Test