Repository: hadoop
Updated Branches:
  refs/heads/branch-2 c705eab22 -> cf6fb0916


HDFS-9166. Move hftp / hsftp filesystem to hfds-client. Contributed by 
Mingliang Liu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cf6fb091
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cf6fb091
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cf6fb091

Branch: refs/heads/branch-2
Commit: cf6fb091622c94b081321240b338a4cc42b3bea0
Parents: c705eab
Author: Haohui Mai <whe...@apache.org>
Authored: Tue Sep 29 10:05:58 2015 -0700
Committer: Haohui Mai <whe...@apache.org>
Committed: Tue Sep 29 10:07:19 2015 -0700

----------------------------------------------------------------------
 .../org/apache/hadoop/hdfs/DFSUtilClient.java   |  11 +
 .../token/delegation/DelegationUtilsClient.java | 253 +++++++++++++++++++
 .../services/org.apache.hadoop.fs.FileSystem    |   2 +
 hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt     |   3 +
 .../java/org/apache/hadoop/hdfs/DFSUtil.java    |   7 +-
 .../hadoop/hdfs/server/common/JspHelper.java    |  35 +--
 .../namenode/CancelDelegationTokenServlet.java  |   6 +-
 .../server/namenode/FileChecksumServlets.java   |   3 +-
 .../hdfs/server/namenode/FileDataServlet.java   |   5 +-
 .../namenode/GetDelegationTokenServlet.java     |   5 +-
 .../server/namenode/NameNodeHttpServer.java     |  11 +-
 .../namenode/RenewDelegationTokenServlet.java   |   5 +-
 .../server/namenode/StartupProgressServlet.java |   2 -
 .../hdfs/tools/DelegationTokenFetcher.java      | 198 +--------------
 .../apache/hadoop/hdfs/web/HftpFileSystem.java  |  25 +-
 .../apache/hadoop/hdfs/web/HsftpFileSystem.java |   3 +-
 .../services/org.apache.hadoop.fs.FileSystem    |  17 --
 .../hdfs/server/common/TestJspHelper.java       |  17 +-
 .../tools/TestDelegationTokenRemoteFetcher.java |  11 +-
 19 files changed, 321 insertions(+), 298 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
index e275afb..da05b22 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSUtilClient.java
@@ -652,4 +652,15 @@ public class DFSUtilClient {
     return URI.create(HdfsConstants.HDFS_URI_SCHEME + "://"
         + namenode.getHostName() + portString);
   }
+
+  /** Create a URI from the scheme and address */
+  public static URI createUri(String scheme, InetSocketAddress address) {
+    try {
+      return new URI(scheme, null, address.getHostName(), address.getPort(),
+          null, null, null);
+    } catch (URISyntaxException ue) {
+      throw new IllegalArgumentException(ue);
+    }
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationUtilsClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationUtilsClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationUtilsClient.java
new file mode 100644
index 0000000..9b3e4cb
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/security/token/delegation/DelegationUtilsClient.java
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.security.token.delegation;
+
+import com.google.common.base.Charsets;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.hdfs.DFSUtilClient;
+import org.apache.hadoop.hdfs.util.IOUtilsClient;
+import org.apache.hadoop.hdfs.web.URLConnectionFactory;
+import org.apache.hadoop.hdfs.web.WebHdfsConstants;
+import org.apache.hadoop.hdfs.web.resources.DelegationParam;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.token.Token;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
+import java.net.URI;
+import java.net.URL;
+
+@InterfaceAudience.Private
+public class DelegationUtilsClient {
+  public static final Logger LOG = LoggerFactory.getLogger(
+      DelegationUtilsClient.class);
+
+  public static final String STARTUP_PROGRESS_PATH_SPEC = "/startupProgress";
+  public static final String GET_DELEGATION_TOKEN_PATH_SPEC = 
"/getDelegationToken";
+  public static final String RENEW_DELEGATION_TOKEN_PATH_SPEC = 
"/renewDelegationToken";
+  public static final String CANCEL_DELEGATION_TOKEN_PATH_SPEC = 
"/cancelDelegationToken";
+  public static final String TOKEN = "token";
+  public static final String RENEWER = "renewer";
+
+  public static final String DELEGATION_PARAMETER_NAME = DelegationParam.NAME;
+  private static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME 
+ "=";
+
+  static public Credentials getDTfromRemote(URLConnectionFactory factory,
+      URI nnUri, String renewer, String proxyUser) throws IOException {
+    StringBuilder buf = new StringBuilder(nnUri.toString())
+        .append(GET_DELEGATION_TOKEN_PATH_SPEC);
+    String separator = "?";
+    if (renewer != null) {
+      buf.append("?").append(RENEWER).append("=")
+          .append(renewer);
+      separator = "&";
+    }
+    if (proxyUser != null) {
+      buf.append(separator).append("doas=").append(proxyUser);
+    }
+
+    boolean isHttps = nnUri.getScheme().equals("https");
+
+    HttpURLConnection conn = null;
+    DataInputStream dis = null;
+    InetSocketAddress serviceAddr = NetUtils.createSocketAddr(nnUri
+        .getAuthority());
+
+    try {
+      LOG.debug("Retrieving token from: {}", buf);
+
+      conn = run(factory, new URL(buf.toString()));
+      InputStream in = conn.getInputStream();
+      Credentials ts = new Credentials();
+      dis = new DataInputStream(in);
+      ts.readFields(dis);
+      for (Token<?> token : ts.getAllTokens()) {
+        token.setKind(isHttps ? WebHdfsConstants.HSFTP_TOKEN_KIND :
+            WebHdfsConstants.HFTP_TOKEN_KIND);
+        SecurityUtil.setTokenService(token, serviceAddr);
+      }
+      return ts;
+    } catch (Exception e) {
+      throw new IOException("Unable to obtain remote token", e);
+    } finally {
+      IOUtilsClient.cleanup(LOG, dis);
+      if (conn != null) {
+        conn.disconnect();
+      }
+    }
+  }
+
+  /**
+   * Cancel a Delegation Token.
+   * @param nnAddr the NameNode's address
+   * @param tok the token to cancel
+   * @throws IOException
+   * @throws AuthenticationException
+   */
+  static public void cancelDelegationToken(URLConnectionFactory factory,
+      URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
+      AuthenticationException {
+    StringBuilder buf = new StringBuilder(nnAddr.toString())
+        .append(CANCEL_DELEGATION_TOKEN_PATH_SPEC).append("?")
+        .append(TOKEN).append("=")
+        .append(tok.encodeToUrlString());
+    HttpURLConnection conn = run(factory, new URL(buf.toString()));
+    conn.disconnect();
+  }
+
+  /**
+   * Renew a Delegation Token.
+   * @param nnAddr the NameNode's address
+   * @param tok the token to renew
+   * @return the Date that the token will expire next.
+   * @throws IOException
+   * @throws AuthenticationException
+   */
+  static public long renewDelegationToken(URLConnectionFactory factory,
+      URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
+      AuthenticationException {
+    StringBuilder buf = new StringBuilder(nnAddr.toString())
+        .append(RENEW_DELEGATION_TOKEN_PATH_SPEC).append("?")
+        .append(TOKEN).append("=")
+        .append(tok.encodeToUrlString());
+
+    HttpURLConnection connection = null;
+    BufferedReader in = null;
+    try {
+      connection = run(factory, new URL(buf.toString()));
+      in = new BufferedReader(new InputStreamReader(
+          connection.getInputStream(), Charsets.UTF_8));
+      long result = Long.parseLong(in.readLine());
+      return result;
+    } catch (IOException ie) {
+      LOG.info("error in renew over HTTP", ie);
+      IOException e = getExceptionFromResponse(connection);
+
+      if (e != null) {
+        LOG.info("rethrowing exception from HTTP request: "
+            + e.getLocalizedMessage());
+        throw e;
+      }
+      throw ie;
+    } finally {
+      IOUtilsClient.cleanup(LOG, in);
+      if (connection != null) {
+        connection.disconnect();
+      }
+    }
+  }
+
+  // parse the message and extract the name of the exception and the message
+  static private IOException getExceptionFromResponse(HttpURLConnection con) {
+    IOException e = null;
+    String resp;
+    if(con == null)
+      return null;
+
+    try {
+      resp = con.getResponseMessage();
+    } catch (IOException ie) { return null; }
+    if(resp == null || resp.isEmpty())
+      return null;
+
+    String exceptionClass = "", exceptionMsg = "";
+    String[] rs = resp.split(";");
+    if(rs.length < 2)
+      return null;
+    exceptionClass = rs[0];
+    exceptionMsg = rs[1];
+    LOG.info("Error response from HTTP request=" + resp +
+        ";ec=" + exceptionClass + ";em="+exceptionMsg);
+
+    if(exceptionClass == null || exceptionClass.isEmpty())
+      return null;
+
+    // recreate exception objects
+    try {
+      Class<? extends Exception> ec =
+         Class.forName(exceptionClass).asSubclass(Exception.class);
+      // we are interested in constructor with String arguments
+      java.lang.reflect.Constructor<? extends Exception> constructor =
+          ec.getConstructor (new Class[] {String.class});
+
+      // create an instance
+      e =  (IOException) constructor.newInstance (exceptionMsg);
+
+    } catch (Exception ee)  {
+      LOG.warn("failed to create object of this class", ee);
+    }
+    if(e == null)
+      return null;
+
+    e.setStackTrace(new StackTraceElement[0]); // local stack is not relevant
+    LOG.info("Exception from HTTP response=" + e.getLocalizedMessage());
+    return e;
+  }
+
+  private static HttpURLConnection run(URLConnectionFactory factory, URL url)
+      throws IOException, AuthenticationException {
+    HttpURLConnection conn = null;
+
+    try {
+      conn = (HttpURLConnection) factory.openConnection(url, true);
+      if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
+        String msg = conn.getResponseMessage();
+
+        throw new IOException("Error when dealing remote token: " + msg);
+      }
+    } catch (IOException ie) {
+      LOG.info("Error when dealing remote token:", ie);
+      IOException e = getExceptionFromResponse(conn);
+
+      if (e != null) {
+        LOG.info("rethrowing exception from HTTP request: "
+            + e.getLocalizedMessage());
+        throw e;
+      }
+      throw ie;
+    }
+    return conn;
+  }
+
+  /**
+   * Returns the url parameter for the given token string.
+   * @param tokenString
+   * @return url parameter
+   */
+  public static String getDelegationTokenUrlParam(String tokenString) {
+    if (tokenString == null ) {
+      return "";
+    }
+    if (UserGroupInformation.isSecurityEnabled()) {
+      return SET_DELEGATION + tokenString;
+    } else {
+      return "";
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs-client/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
index abe2bfc..b1eaba2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
@@ -16,3 +16,5 @@
 org.apache.hadoop.hdfs.DistributedFileSystem
 org.apache.hadoop.hdfs.web.WebHdfsFileSystem
 org.apache.hadoop.hdfs.web.SWebHdfsFileSystem
+org.apache.hadoop.hdfs.web.HftpFileSystem
+org.apache.hadoop.hdfs.web.HsftpFileSystem

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 2993157..36a93e0 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -649,6 +649,9 @@ Release 2.8.0 - UNRELEASED
     HDFS-9165. Move entries in META-INF/services/o.a.h.fs.FileSystem to
     hdfs-client. (Mingliang Liu via wheat9)
 
+    HDFS-9166. Move hftp / hsftp filesystem to hfds-client.
+    (Mingliang Liu via wheat9)
+
   OPTIMIZATIONS
 
     HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
index 16695fa..c290031 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
@@ -1079,12 +1079,7 @@ public class DFSUtil {
 
   /** Create a URI from the scheme and address */
   public static URI createUri(String scheme, InetSocketAddress address) {
-    try {
-      return new URI(scheme, null, address.getHostName(), address.getPort(),
-          null, null, null);
-    } catch (URISyntaxException ue) {
-      throw new IllegalArgumentException(ue);
-    }
+    return DFSUtilClient.createUri(scheme, address);
   }
   
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
index 67bfb5c..542c843 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java
@@ -38,15 +38,13 @@ import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DFSUtil;
-import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.LocatedBlock;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.hdfs.server.namenode.NameNode;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
-import org.apache.hadoop.hdfs.web.resources.DelegationParam;
 import org.apache.hadoop.hdfs.web.resources.DoAsParam;
 import org.apache.hadoop.hdfs.web.resources.UserParam;
 import org.apache.hadoop.net.NetUtils;
@@ -59,23 +57,10 @@ import org.apache.hadoop.security.authorize.ProxyServers;
 import org.apache.hadoop.security.authorize.ProxyUsers;
 import org.apache.hadoop.security.token.Token;
 
-import javax.servlet.ServletContext;
-import javax.servlet.http.HttpServletRequest;
-import java.io.ByteArrayInputStream;
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.net.InetSocketAddress;
-
-import static 
org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
-import static 
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
-
 @InterfaceAudience.Private
 public class JspHelper {
   public static final String CURRENT_CONF = "current.conf";
-  public static final String DELEGATION_PARAMETER_NAME = DelegationParam.NAME;
   public static final String NAMENODE_ADDRESS = "nnaddr";
-  static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME +
-                                              "=";
   private static final Log LOG = LogFactory.getLog(JspHelper.class);
 
   /** Private constructor for preventing creating JspHelper object. */
@@ -233,7 +218,7 @@ public class JspHelper {
    
     if (UserGroupInformation.isSecurityEnabled()) {
       remoteUser = request.getRemoteUser();
-      final String tokenString = 
request.getParameter(DELEGATION_PARAMETER_NAME);
+      final String tokenString = 
request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME);
       if (tokenString != null) {
         // Token-based connections need only verify the effective user, and
         // disallow proxying to different user.  Proxy authorization checks
@@ -353,22 +338,6 @@ public class JspHelper {
   }
 
   /**
-   * Returns the url parameter for the given token string.
-   * @param tokenString
-   * @return url parameter
-   */
-  public static String getDelegationTokenUrlParam(String tokenString) {
-    if (tokenString == null ) {
-      return "";
-    }
-    if (UserGroupInformation.isSecurityEnabled()) {
-      return SET_DELEGATION + tokenString;
-    } else {
-      return "";
-    }
-  }
-
-  /**
    * Returns the url parameter for the given string, prefixed with
    * paramSeparator.
    * 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CancelDelegationTokenServlet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CancelDelegationTokenServlet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CancelDelegationTokenServlet.java
index 0b56c81..62b801e 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CancelDelegationTokenServlet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/CancelDelegationTokenServlet.java
@@ -28,6 +28,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 
@@ -37,9 +38,6 @@ import org.apache.hadoop.security.token.Token;
 @SuppressWarnings("serial")
 public class CancelDelegationTokenServlet extends DfsServlet {
   private static final Log LOG = 
LogFactory.getLog(CancelDelegationTokenServlet.class);
-  public static final String PATH_SPEC = "/cancelDelegationToken";
-  public static final String TOKEN = "token";
-  
   @Override
   protected void doGet(final HttpServletRequest req, final HttpServletResponse 
resp)
       throws ServletException, IOException {
@@ -57,7 +55,7 @@ public class CancelDelegationTokenServlet extends DfsServlet {
     }
     final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
         context);
-    String tokenString = req.getParameter(TOKEN);
+    String tokenString = req.getParameter(DelegationUtilsClient.TOKEN);
     if (tokenString == null) {
       resp.sendError(HttpServletResponse.SC_MULTIPLE_CHOICES,
                      "Token to renew not specified");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
index 8c6a7cc..f4ea969 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileChecksumServlets.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.datanode.DataNode;
 import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
@@ -70,7 +71,7 @@ public class FileChecksumServlets {
       String dtParam = "";
       if (UserGroupInformation.isSecurityEnabled()) {
         String tokenString = 
ugi.getTokens().iterator().next().encodeToUrlString();
-        dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
+        dtParam = 
DelegationUtilsClient.getDelegationTokenUrlParam(tokenString);
       }
       String addr = nn.getNameNodeAddressHostPortString();
       String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, 
addr);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
index d6f36c2..a38b662 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FileDataServlet.java
@@ -32,6 +32,7 @@ import org.apache.hadoop.hdfs.protocol.DatanodeID;
 import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ServletUtil;
@@ -66,7 +67,7 @@ public class FileDataServlet extends DfsServlet {
 
     String dtParam = "";
     if (dt != null) {
-      dtParam = JspHelper.getDelegationTokenUrlParam(dt);
+      dtParam = DelegationUtilsClient.getDelegationTokenUrlParam(dt);
     }
 
     // Add namenode address to the url params
@@ -120,7 +121,7 @@ public class FileDataServlet extends DfsServlet {
           final String path = ServletUtil.getDecodedPath(request, "/data");
           final String encodedPath = ServletUtil.getRawPath(request, "/data");
           String delegationToken = request
-              .getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
+              .getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME);
 
           HdfsFileStatus info = nn.getFileInfo(path);
           if (info != null && !info.isDir()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java
index 55bca6f..f0511d4 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetDelegationTokenServlet.java
@@ -29,6 +29,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.security.Credentials;
 import org.apache.hadoop.security.UserGroupInformation;
 
@@ -38,8 +39,6 @@ import org.apache.hadoop.security.UserGroupInformation;
 @SuppressWarnings("serial")
 public class GetDelegationTokenServlet extends DfsServlet {
   private static final Log LOG = 
LogFactory.getLog(GetDelegationTokenServlet.class);
-  public static final String PATH_SPEC = "/getDelegationToken";
-  public static final String RENEWER = "renewer";
   
   @Override
   protected void doGet(final HttpServletRequest req, final HttpServletResponse 
resp)
@@ -58,7 +57,7 @@ public class GetDelegationTokenServlet extends DfsServlet {
     }
     LOG.info("Sending token: {" + ugi.getUserName() + "," + 
req.getRemoteAddr() +"}");
     final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
-    String renewer = req.getParameter(RENEWER);
+    String renewer = req.getParameter(DelegationUtilsClient.RENEWER);
     final String renewerFinal = (renewer == null) ? 
         req.getUserPrincipal().getName() : renewer;
     

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
index b1bb4ce..749f34c 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.DFSUtil;
-import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.hdfs.server.common.JspHelper;
 import org.apache.hadoop.hdfs.server.namenode.startupprogress.StartupProgress;
 import 
org.apache.hadoop.hdfs.server.namenode.web.resources.NamenodeWebHdfsMethods;
@@ -239,15 +239,16 @@ public class NameNodeHttpServer {
 
   private static void setupServlets(HttpServer2 httpServer, Configuration 
conf) {
     httpServer.addInternalServlet("startupProgress",
-        StartupProgressServlet.PATH_SPEC, StartupProgressServlet.class);
+        DelegationUtilsClient.STARTUP_PROGRESS_PATH_SPEC,
+        StartupProgressServlet.class);
     httpServer.addInternalServlet("getDelegationToken",
-        GetDelegationTokenServlet.PATH_SPEC, 
+        DelegationUtilsClient.GET_DELEGATION_TOKEN_PATH_SPEC,
         GetDelegationTokenServlet.class, true);
     httpServer.addInternalServlet("renewDelegationToken", 
-        RenewDelegationTokenServlet.PATH_SPEC, 
+        DelegationUtilsClient.RENEW_DELEGATION_TOKEN_PATH_SPEC,
         RenewDelegationTokenServlet.class, true);
     httpServer.addInternalServlet("cancelDelegationToken", 
-        CancelDelegationTokenServlet.PATH_SPEC, 
+        DelegationUtilsClient.CANCEL_DELEGATION_TOKEN_PATH_SPEC,
         CancelDelegationTokenServlet.class, true);
     httpServer.addInternalServlet("fsck", "/fsck", FsckServlet.class,
         true);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java
index caec765..57a4fc9 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/RenewDelegationTokenServlet.java
@@ -30,6 +30,7 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.token.Token;
 
@@ -41,8 +42,6 @@ import com.google.common.base.Charsets;
 @SuppressWarnings("serial")
 public class RenewDelegationTokenServlet extends DfsServlet {
   private static final Log LOG = 
LogFactory.getLog(RenewDelegationTokenServlet.class);
-  public static final String PATH_SPEC = "/renewDelegationToken";
-  public static final String TOKEN = "token";
   
   @Override
   protected void doGet(final HttpServletRequest req, final HttpServletResponse 
resp)
@@ -60,7 +59,7 @@ public class RenewDelegationTokenServlet extends DfsServlet {
       return;
     }
     final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
-    String tokenString = req.getParameter(TOKEN);
+    String tokenString = req.getParameter(DelegationUtilsClient.TOKEN);
     if (tokenString == null) {
       resp.sendError(HttpServletResponse.SC_MULTIPLE_CHOICES,
                      "Token to renew not specified");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java
index f18fd92..6a0a8be 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/StartupProgressServlet.java
@@ -52,8 +52,6 @@ public class StartupProgressServlet extends DfsServlet {
   private static final String STEPS = "steps";
   private static final String TOTAL = "total";
 
-  public static final String PATH_SPEC = "/startupProgress";
-
   @Override
   protected void doGet(HttpServletRequest req, HttpServletResponse resp)
       throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
index 40daa24..2aaf106 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DelegationTokenFetcher.java
@@ -17,17 +17,11 @@
  */
 package org.apache.hadoop.hdfs.tools;
 
-import java.io.BufferedReader;
 import java.io.ByteArrayInputStream;
 import java.io.DataInputStream;
 import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.io.PrintStream;
-import java.net.HttpURLConnection;
-import java.net.InetSocketAddress;
 import java.net.URI;
-import java.net.URL;
 import java.security.PrivilegedExceptionAction;
 import java.util.Collection;
 import java.util.Date;
@@ -43,24 +37,15 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
-import org.apache.hadoop.hdfs.server.namenode.CancelDelegationTokenServlet;
-import org.apache.hadoop.hdfs.server.namenode.GetDelegationTokenServlet;
-import org.apache.hadoop.hdfs.server.namenode.RenewDelegationTokenServlet;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.hdfs.web.URLConnectionFactory;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.net.NetUtils;
 
-import org.apache.hadoop.hdfs.web.WebHdfsConstants;
 import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
-import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.util.ExitUtil;
 import org.apache.hadoop.util.GenericOptionsParser;
 
-import com.google.common.base.Charsets;
-
 /**
  * Fetch a DelegationToken from the current Namenode and store it in the
  * specified file.
@@ -186,8 +171,8 @@ public class DelegationTokenFetcher {
             } else {
               // otherwise we are fetching
               if (webUrl != null) {
-                Credentials creds = getDTfromRemote(connectionFactory, new URI(
-                    webUrl), renewer, null);
+                Credentials creds = DelegationUtilsClient.getDTfromRemote(
+                    connectionFactory, new URI(webUrl), renewer, null);
                 creds.writeTokenStorageFile(tokenFile, conf);
                 for (Token<?> token : creds.getAllTokens()) {
                   System.out.println("Fetched token via " + webUrl + " for "
@@ -208,182 +193,5 @@ public class DelegationTokenFetcher {
           }
         });
   }
-  
-  static public Credentials getDTfromRemote(URLConnectionFactory factory,
-      URI nnUri, String renewer, String proxyUser) throws IOException {
-    StringBuilder buf = new StringBuilder(nnUri.toString())
-        .append(GetDelegationTokenServlet.PATH_SPEC);
-    String separator = "?";
-    if (renewer != null) {
-      buf.append("?").append(GetDelegationTokenServlet.RENEWER).append("=")
-          .append(renewer);
-      separator = "&";
-    }
-    if (proxyUser != null) {
-      buf.append(separator).append("doas=").append(proxyUser);
-    }
-
-    boolean isHttps = nnUri.getScheme().equals("https");
-
-    HttpURLConnection conn = null;
-    DataInputStream dis = null;
-    InetSocketAddress serviceAddr = NetUtils.createSocketAddr(nnUri
-        .getAuthority());
-
-    try {
-      if(LOG.isDebugEnabled()) {
-        LOG.debug("Retrieving token from: " + buf);
-      }
-
-      conn = run(factory, new URL(buf.toString()));
-      InputStream in = conn.getInputStream();
-      Credentials ts = new Credentials();
-      dis = new DataInputStream(in);
-      ts.readFields(dis);
-      for (Token<?> token : ts.getAllTokens()) {
-        token.setKind(isHttps ? WebHdfsConstants.HSFTP_TOKEN_KIND : 
WebHdfsConstants.HFTP_TOKEN_KIND);
-        SecurityUtil.setTokenService(token, serviceAddr);
-      }
-      return ts;
-    } catch (Exception e) {
-      throw new IOException("Unable to obtain remote token", e);
-    } finally {
-      IOUtils.cleanup(LOG, dis);
-      if (conn != null) {
-        conn.disconnect();
-      }
-    }
-  }
 
-  /**
-   * Cancel a Delegation Token.
-   * @param nnAddr the NameNode's address
-   * @param tok the token to cancel
-   * @throws IOException
-   * @throws AuthenticationException
-   */
-  static public void cancelDelegationToken(URLConnectionFactory factory,
-      URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
-      AuthenticationException {
-    StringBuilder buf = new StringBuilder(nnAddr.toString())
-        .append(CancelDelegationTokenServlet.PATH_SPEC).append("?")
-        .append(CancelDelegationTokenServlet.TOKEN).append("=")
-        .append(tok.encodeToUrlString());
-    HttpURLConnection conn = run(factory, new URL(buf.toString()));
-    conn.disconnect();
-  }
-
-  /**
-   * Renew a Delegation Token.
-   * @param nnAddr the NameNode's address
-   * @param tok the token to renew
-   * @return the Date that the token will expire next.
-   * @throws IOException
-   * @throws AuthenticationException
-   */
-  static public long renewDelegationToken(URLConnectionFactory factory,
-      URI nnAddr, Token<DelegationTokenIdentifier> tok) throws IOException,
-      AuthenticationException {
-    StringBuilder buf = new StringBuilder(nnAddr.toString())
-        .append(RenewDelegationTokenServlet.PATH_SPEC).append("?")
-        .append(RenewDelegationTokenServlet.TOKEN).append("=")
-        .append(tok.encodeToUrlString());
-
-    HttpURLConnection connection = null;
-    BufferedReader in = null;
-    try {
-      connection = run(factory, new URL(buf.toString()));
-      in = new BufferedReader(new InputStreamReader(
-          connection.getInputStream(), Charsets.UTF_8));
-      long result = Long.parseLong(in.readLine());
-      return result;
-    } catch (IOException ie) {
-      LOG.info("error in renew over HTTP", ie);
-      IOException e = getExceptionFromResponse(connection);
-
-      if (e != null) {
-        LOG.info("rethrowing exception from HTTP request: "
-            + e.getLocalizedMessage());
-        throw e;
-      }
-      throw ie;
-    } finally {
-      IOUtils.cleanup(LOG, in);
-      if (connection != null) {
-        connection.disconnect();
-      }
-    }
-  }
-
-  // parse the message and extract the name of the exception and the message
-  static private IOException getExceptionFromResponse(HttpURLConnection con) {
-    IOException e = null;
-    String resp;
-    if(con == null) 
-      return null;    
-    
-    try {
-      resp = con.getResponseMessage();
-    } catch (IOException ie) { return null; }
-    if(resp == null || resp.isEmpty())
-      return null;
-
-    String exceptionClass = "", exceptionMsg = "";
-    String[] rs = resp.split(";");
-    if(rs.length < 2)
-      return null;
-    exceptionClass = rs[0];
-    exceptionMsg = rs[1];
-    LOG.info("Error response from HTTP request=" + resp + 
-        ";ec=" + exceptionClass + ";em="+exceptionMsg);
-    
-    if(exceptionClass == null || exceptionClass.isEmpty())
-      return null;
-    
-    // recreate exception objects
-    try {
-      Class<? extends Exception> ec = 
-         Class.forName(exceptionClass).asSubclass(Exception.class);
-      // we are interested in constructor with String arguments
-      java.lang.reflect.Constructor<? extends Exception> constructor =
-          ec.getConstructor (new Class[] {String.class});
-
-      // create an instance
-      e =  (IOException) constructor.newInstance (exceptionMsg);
-
-    } catch (Exception ee)  {
-      LOG.warn("failed to create object of this class", ee);
-    }
-    if(e == null)
-      return null;
-    
-    e.setStackTrace(new StackTraceElement[0]); // local stack is not relevant
-    LOG.info("Exception from HTTP response=" + e.getLocalizedMessage());
-    return e;
-  }
-
-  private static HttpURLConnection run(URLConnectionFactory factory, URL url)
-      throws IOException, AuthenticationException {
-    HttpURLConnection conn = null;
-
-    try {
-      conn = (HttpURLConnection) factory.openConnection(url, true);
-      if (conn.getResponseCode() != HttpURLConnection.HTTP_OK) {
-        String msg = conn.getResponseMessage();
-
-        throw new IOException("Error when dealing remote token: " + msg);
-      }
-    } catch (IOException ie) {
-      LOG.info("Error when dealing remote token:", ie);
-      IOException e = getExceptionFromResponse(conn);
-
-      if (e != null) {
-        LOG.info("rethrowing exception from HTTP request: "
-            + e.getLocalizedMessage());
-        throw e;
-      }
-      throw ie;
-    }
-    return conn;
-  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
index 14c619d..cafe3cb 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
@@ -46,12 +46,10 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
-import org.apache.hadoop.hdfs.DFSUtil;
+import org.apache.hadoop.hdfs.DFSUtilClient;
 import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
-import org.apache.hadoop.hdfs.server.common.JspHelper;
-import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.ipc.RemoteException;
 import org.apache.hadoop.net.NetUtils;
@@ -62,6 +60,8 @@ import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.TokenIdentifier;
 import org.apache.hadoop.util.Progressable;
 import org.apache.hadoop.util.ServletUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.xml.sax.Attributes;
 import org.xml.sax.InputSource;
 import org.xml.sax.SAXException;
@@ -92,6 +92,7 @@ public class HftpFileSystem extends FileSystem
 
   protected URI nnUri;
 
+  public static final Logger LOG = 
LoggerFactory.getLogger(HftpFileSystem.class);
   public static final String HFTP_TIMEZONE = "UTC";
   public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
 
@@ -122,7 +123,7 @@ public class HftpFileSystem extends FileSystem
   @Override
   protected int getDefaultPort() {
     return getConf().getInt(HdfsClientConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY,
-        DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
+        HdfsClientConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
   }
 
   /**
@@ -140,7 +141,7 @@ public class HftpFileSystem extends FileSystem
   }
 
   protected URI getNamenodeUri(URI uri) {
-    return DFSUtil.createUri(getUnderlyingProtocol(), getNamenodeAddr(uri));
+    return DFSUtilClient.createUri(getUnderlyingProtocol(), 
getNamenodeAddr(uri));
   }
 
   /**
@@ -246,7 +247,7 @@ public class HftpFileSystem extends FileSystem
         public Token<?> run() throws IOException {
           Credentials c;
           try {
-            c = DelegationTokenFetcher.getDTfromRemote(connectionFactory,
+            c = DelegationUtilsClient.getDTfromRemote(connectionFactory,
                 nnUri, renewer, proxyUser);
           } catch (IOException e) {
             if (e.getCause() instanceof ConnectException) {
@@ -334,7 +335,7 @@ public class HftpFileSystem extends FileSystem
         tokenAspect.ensureTokenInitialized();
         if (delegationToken != null) {
           tokenString = delegationToken.encodeToUrlString();
-          return (query + JspHelper.getDelegationTokenUrlParam(tokenString));
+          return (query + 
DelegationUtilsClient.getDelegationTokenUrlParam(tokenString));
         }
       }
     }
@@ -694,8 +695,8 @@ public class HftpFileSystem extends FileSystem
         public Long run() throws Exception {
           InetSocketAddress serviceAddr = SecurityUtil
               .getTokenServiceAddr(token);
-          return DelegationTokenFetcher.renewDelegationToken(connectionFactory,
-              DFSUtil.createUri(getUnderlyingProtocol(), serviceAddr),
+          return DelegationUtilsClient.renewDelegationToken(connectionFactory,
+              DFSUtilClient.createUri(getUnderlyingProtocol(), serviceAddr),
               (Token<DelegationTokenIdentifier>) token);
         }
       });
@@ -717,8 +718,8 @@ public class HftpFileSystem extends FileSystem
         public Void run() throws Exception {
           InetSocketAddress serviceAddr = SecurityUtil
               .getTokenServiceAddr(token);
-          DelegationTokenFetcher.cancelDelegationToken(connectionFactory,
-              DFSUtil.createUri(getUnderlyingProtocol(), serviceAddr),
+          DelegationUtilsClient.cancelDelegationToken(connectionFactory,
+              DFSUtilClient.createUri(getUnderlyingProtocol(), serviceAddr),
               (Token<DelegationTokenIdentifier>) token);
           return null;
         }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
index 1e9e96a..85bf604 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
@@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs.web;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
 
 /**
@@ -63,6 +62,6 @@ public class HsftpFileSystem extends HftpFileSystem {
   @Override
   protected int getDefaultPort() {
     return getConf().getInt(HdfsClientConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
-                            DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
+                            
HdfsClientConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
deleted file mode 100644
index edd900c..0000000
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/resources/META-INF/services/org.apache.hadoop.fs.FileSystem
+++ /dev/null
@@ -1,17 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-org.apache.hadoop.hdfs.web.HftpFileSystem
-org.apache.hadoop.hdfs.web.HsftpFileSystem

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
index 2bcf4a2..9bec78b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java
@@ -21,6 +21,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.HdfsConfiguration;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
 import org.apache.hadoop.hdfs.web.resources.DoAsParam;
 import org.apache.hadoop.hdfs.web.resources.UserParam;
@@ -93,7 +94,7 @@ public class TestJspHelper {
     Token<DelegationTokenIdentifier> token = new 
Token<DelegationTokenIdentifier>(
         dtId, new DummySecretManager(0, 0, 0, 0));
     String tokenString = token.encodeToUrlString();
-    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+    
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
     when(request.getRemoteUser()).thenReturn(user);
 
@@ -121,7 +122,7 @@ public class TestJspHelper {
     //Set the name.node.address attribute in Servlet context to null
     
when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
         .thenReturn(null);
-    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+    
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
     verifyServiceInToken(context, request, "3.3.3.3:3333");
   }
@@ -155,7 +156,7 @@ public class TestJspHelper {
     
     // token with no auth-ed user
     request = getMockRequest(null, null, null);
-    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+    
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
     ugi = JspHelper.getUGI(context, request, conf);
     Assert.assertNotNull(ugi.getRealUser());
@@ -165,7 +166,7 @@ public class TestJspHelper {
     
     // token with auth-ed user
     request = getMockRequest(realUser, null, null);
-    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+    
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
     ugi = JspHelper.getUGI(context, request, conf);
     Assert.assertNotNull(ugi.getRealUser());
@@ -175,7 +176,7 @@ public class TestJspHelper {
     
     // completely different user, token trumps auth
     request = getMockRequest("rogue", null, null);
-    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+    
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
     ugi = JspHelper.getUGI(context, request, conf);
     Assert.assertNotNull(ugi.getRealUser());
@@ -185,7 +186,7 @@ public class TestJspHelper {
     
     // expected case
     request = getMockRequest(null, user, null);
-    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+    
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
     ugi = JspHelper.getUGI(context, request, conf);
     Assert.assertNotNull(ugi.getRealUser());
@@ -195,7 +196,7 @@ public class TestJspHelper {
     
     // can't proxy with a token!
     request = getMockRequest(null, null, "rogue");
-    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+    
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
     try {
       JspHelper.getUGI(context, request, conf);
@@ -208,7 +209,7 @@ public class TestJspHelper {
     
     // can't proxy with a token!
     request = getMockRequest(null, user, "rogue");
-    when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
+    
when(request.getParameter(DelegationUtilsClient.DELEGATION_PARAMETER_NAME)).thenReturn(
         tokenString);
     try {
       JspHelper.getUGI(context, request, conf);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf6fb091/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java
index 37e5be1..c75c722 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tools/TestDelegationTokenRemoteFetcher.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import 
org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
+import org.apache.hadoop.hdfs.security.token.delegation.DelegationUtilsClient;
 import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher;
 import org.apache.hadoop.hdfs.web.URLConnectionFactory;
 import org.apache.hadoop.hdfs.web.WebHdfsConstants;
@@ -129,7 +130,7 @@ public class TestDelegationTokenRemoteFetcher {
   @Test
   public void testTokenRenewFail() throws AuthenticationException {
     try {
-      DelegationTokenFetcher.renewDelegationToken(connectionFactory, 
serviceUrl, testToken);
+      DelegationUtilsClient.renewDelegationToken(connectionFactory, 
serviceUrl, testToken);
       fail("Token fetcher shouldn't be able to renew tokens in absense of NN");
     } catch (IOException ex) {
     } 
@@ -141,7 +142,7 @@ public class TestDelegationTokenRemoteFetcher {
   @Test
   public void expectedTokenCancelFail() throws AuthenticationException {
     try {
-      DelegationTokenFetcher.cancelDelegationToken(connectionFactory, 
serviceUrl, testToken);
+      DelegationUtilsClient.cancelDelegationToken(connectionFactory, 
serviceUrl, testToken);
       fail("Token fetcher shouldn't be able to cancel tokens in absense of 
NN");
     } catch (IOException ex) {
     } 
@@ -155,7 +156,7 @@ public class TestDelegationTokenRemoteFetcher {
       throws AuthenticationException, URISyntaxException {
     bootstrap = startHttpServer(httpPort, testToken, serviceUrl);
     try {
-      DelegationTokenFetcher.renewDelegationToken(connectionFactory, new URI(
+      DelegationUtilsClient.renewDelegationToken(connectionFactory, new URI(
           serviceUrl.toString() + "/exception"), createToken(serviceUrl));
       fail("Token fetcher shouldn't be able to renew tokens using an invalid"
           + " NN URL");
@@ -172,7 +173,7 @@ public class TestDelegationTokenRemoteFetcher {
   public void testCancelTokenFromHttp() throws IOException,
       AuthenticationException {
     bootstrap = startHttpServer(httpPort, testToken, serviceUrl);
-    DelegationTokenFetcher.cancelDelegationToken(connectionFactory, serviceUrl,
+    DelegationUtilsClient.cancelDelegationToken(connectionFactory, serviceUrl,
         testToken);
     if (assertionError != null)
       throw assertionError;
@@ -186,7 +187,7 @@ public class TestDelegationTokenRemoteFetcher {
       NumberFormatException, AuthenticationException {
     bootstrap = startHttpServer(httpPort, testToken, serviceUrl);
     assertTrue("testRenewTokenFromHttp error",
-        Long.parseLong(EXP_DATE) == 
DelegationTokenFetcher.renewDelegationToken(
+        Long.parseLong(EXP_DATE) == DelegationUtilsClient.renewDelegationToken(
             connectionFactory, serviceUrl, testToken));
     if (assertionError != null)
       throw assertionError;

Reply via email to