HADOOP-13597. Switch KMS from Tomcat to Jetty. Contributed by John Zhuge.

Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5d182949
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5d182949
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5d182949

Branch: refs/heads/HADOOP-13345
Commit: 5d182949badb2eb80393de7ba3838102d006488b
Parents: 8850c05
Author: Xiao Chen <x...@apache.org>
Authored: Thu Jan 5 17:21:57 2017 -0800
Committer: Xiao Chen <x...@apache.org>
Committed: Thu Jan 5 17:21:57 2017 -0800

----------------------------------------------------------------------
 .../resources/assemblies/hadoop-kms-dist.xml    |  25 +-
 .../server/AuthenticationFilter.java            |   6 +-
 .../src/main/bin/hadoop-functions.sh            |  51 ++--
 .../hadoop/conf/ConfigurationWithLogging.java   | 113 +++++++++
 .../org/apache/hadoop/http/HttpServer2.java     | 163 ++++++++----
 .../apache/hadoop/security/ssl/SSLFactory.java  |  64 +++--
 .../src/site/markdown/CommandsManual.md         |   8 +-
 .../src/test/scripts/hadoop_mkdir.bats          |  42 ++++
 .../src/test/scripts/hadoop_using_envvar.bats   |  33 +++
 .../dev-support/findbugsExcludeFile.xml         |   2 +-
 hadoop-common-project/hadoop-kms/pom.xml        | 160 +-----------
 .../hadoop-kms/src/main/conf/kms-env.sh         |  48 ++--
 .../src/main/conf/kms-log4j.properties          |   3 +-
 .../hadoop-kms/src/main/conf/kms-site.xml       | 167 +------------
 .../crypto/key/kms/server/KMSConfiguration.java |  16 ++
 .../crypto/key/kms/server/KMSJMXServlet.java    |  36 ---
 .../hadoop/crypto/key/kms/server/KMSWebApp.java |  23 +-
 .../crypto/key/kms/server/KMSWebServer.java     | 155 ++++++++++++
 .../hadoop-kms/src/main/libexec/kms-config.sh   |  72 ------
 .../main/libexec/shellprofile.d/hadoop-kms.sh   |  57 +++++
 .../src/main/resources/kms-default.xml          | 248 +++++++++++++++++++
 .../main/resources/webapps/kms/WEB-INF/web.xml  |  68 +++++
 .../main/resources/webapps/static/index.html    |  35 +++
 .../hadoop-kms/src/main/sbin/kms.sh             | 116 +++------
 .../src/main/tomcat/ROOT/WEB-INF/web.xml        |  16 --
 .../hadoop-kms/src/main/tomcat/ROOT/index.html  |  27 --
 .../src/main/tomcat/logging.properties          |  67 -----
 .../hadoop-kms/src/main/tomcat/server.xml       | 155 ------------
 .../src/main/tomcat/ssl-server.xml.conf         | 136 ----------
 .../hadoop-kms/src/main/webapp/WEB-INF/web.xml  |  78 ------
 .../hadoop-kms/src/site/configuration.xsl       |  49 ++++
 .../hadoop-kms/src/site/markdown/index.md.vm    | 122 ++++++---
 .../hadoop/crypto/key/kms/server/MiniKMS.java   | 118 ++-------
 33 files changed, 1235 insertions(+), 1244 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
index 5830bba..ff6f990 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-kms-dist.xml
@@ -21,6 +21,14 @@
   </formats>
   <includeBaseDirectory>false</includeBaseDirectory>
   <fileSets>
+    <!-- Jar file -->
+    <fileSet>
+      <directory>target</directory>
+      <outputDirectory>/share/hadoop/common</outputDirectory>
+      <includes>
+        <include>${project.artifactId}-${project.version}.jar</include>
+      </includes>
+    </fileSet>
     <!-- Configuration files -->
     <fileSet>
       <directory>${basedir}/src/main/conf</directory>
@@ -41,7 +49,7 @@
       <directory>${basedir}/src/main/libexec</directory>
       <outputDirectory>/libexec</outputDirectory>
       <includes>
-        <include>*</include>
+        <include>**/*</include>
       </includes>
       <fileMode>0755</fileMode>
     </fileSet>
@@ -51,4 +59,19 @@
       <outputDirectory>/share/doc/hadoop/kms</outputDirectory>
     </fileSet>
   </fileSets>
+  <dependencySets>
+    <dependencySet>
+      <useProjectArtifact>false</useProjectArtifact>
+      <outputDirectory>/share/hadoop/common/lib</outputDirectory>
+      <!-- Exclude hadoop artifacts. They will be found via HADOOP* env -->
+      <excludes>
+        <exclude>org.apache.hadoop:hadoop-common</exclude>
+        <exclude>org.apache.hadoop:hadoop-hdfs</exclude>
+        <!-- use slf4j from common to avoid multiple binding warnings -->
+        <exclude>org.slf4j:slf4j-api</exclude>
+        <exclude>org.slf4j:slf4j-log4j12</exclude>
+        <exclude>org.hsqldb:hsqldb</exclude>
+      </excludes>
+    </dependencySet>
+  </dependencySets>
 </assembly>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
index 264d991..b10fc84 100644
--- 
a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
+++ 
b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/AuthenticationFilter.java
@@ -142,7 +142,7 @@ public class AuthenticationFilter implements Filter {
   private String cookieDomain;
   private String cookiePath;
   private boolean isCookiePersistent;
-  private boolean isInitializedByTomcat;
+  private boolean destroySecretProvider;
 
   /**
    * <p>Initializes the authentication filter and signer secret provider.</p>
@@ -209,7 +209,7 @@ public class AuthenticationFilter implements Filter {
         secretProvider = constructSecretProvider(
             filterConfig.getServletContext(),
             config, false);
-        isInitializedByTomcat = true;
+        destroySecretProvider = true;
       } catch (Exception ex) {
         throw new ServletException(ex);
       }
@@ -356,7 +356,7 @@ public class AuthenticationFilter implements Filter {
       authHandler.destroy();
       authHandler = null;
     }
-    if (secretProvider != null && isInitializedByTomcat) {
+    if (secretProvider != null && destroySecretProvider) {
       secretProvider.destroy();
       secretProvider = null;
     }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh 
b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index b6e2b59..3151023 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -262,6 +262,39 @@ function hadoop_deprecate_envvar
   fi
 }
 
+## @description  Declare `var` being used and print its value.
+## @audience     public
+## @stability    stable
+## @replaceable  yes
+## @param        var
+function hadoop_using_envvar
+{
+  local var=$1
+  local val=${!var}
+
+  if [[ -n "${val}" ]]; then
+    hadoop_debug "${var} = ${val}"
+  fi
+}
+
+## @description  Create the directory 'dir'.
+## @audience     public
+## @stability    stable
+## @replaceable  yes
+## @param        dir
+function hadoop_mkdir
+{
+  local dir=$1
+
+  if [[ ! -w "${dir}" ]] && [[ ! -d "${dir}" ]]; then
+    hadoop_error "WARNING: ${dir} does not exist. Creating."
+    if ! mkdir -p "${dir}"; then
+      hadoop_error "ERROR: Unable to create ${dir}. Aborting."
+      exit 1
+    fi
+  fi
+}
+
 ## @description  Bootstraps the Hadoop shell environment
 ## @audience     private
 ## @stability    evolving
@@ -1396,14 +1429,7 @@ function hadoop_verify_piddir
     hadoop_error "No pid directory defined."
     exit 1
   fi
-  if [[ ! -w "${HADOOP_PID_DIR}" ]] && [[ ! -d "${HADOOP_PID_DIR}" ]]; then
-    hadoop_error "WARNING: ${HADOOP_PID_DIR} does not exist. Creating."
-    mkdir -p "${HADOOP_PID_DIR}" > /dev/null 2>&1
-    if [[ $? -gt 0 ]]; then
-      hadoop_error "ERROR: Unable to create ${HADOOP_PID_DIR}. Aborting."
-      exit 1
-    fi
-  fi
+  hadoop_mkdir "${HADOOP_PID_DIR}"
   touch "${HADOOP_PID_DIR}/$$" >/dev/null 2>&1
   if [[ $? -gt 0 ]]; then
     hadoop_error "ERROR: Unable to write in ${HADOOP_PID_DIR}. Aborting."
@@ -1421,14 +1447,7 @@ function hadoop_verify_logdir
     hadoop_error "No log directory defined."
     exit 1
   fi
-  if [[ ! -w "${HADOOP_LOG_DIR}" ]] && [[ ! -d "${HADOOP_LOG_DIR}" ]]; then
-    hadoop_error "WARNING: ${HADOOP_LOG_DIR} does not exist. Creating."
-    mkdir -p "${HADOOP_LOG_DIR}" > /dev/null 2>&1
-    if [[ $? -gt 0 ]]; then
-      hadoop_error "ERROR: Unable to create ${HADOOP_LOG_DIR}. Aborting."
-      exit 1
-    fi
-  fi
+  hadoop_mkdir "${HADOOP_LOG_DIR}"
   touch "${HADOOP_LOG_DIR}/$$" >/dev/null 2>&1
   if [[ $? -gt 0 ]]; then
     hadoop_error "ERROR: Unable to write in ${HADOOP_LOG_DIR}. Aborting."

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java
new file mode 100644
index 0000000..8a5e054
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigurationWithLogging.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.conf;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Logs access to {@link Configuration}.
+ * Sensitive data will be redacted.
+ */
+@InterfaceAudience.Private
+public class ConfigurationWithLogging extends Configuration {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(ConfigurationWithLogging.class);
+
+  private final Logger log;
+  private final ConfigRedactor redactor;
+
+  public ConfigurationWithLogging(Configuration conf) {
+    super(conf);
+    log = LOG;
+    redactor = new ConfigRedactor(conf);
+  }
+
+  /**
+   * @see Configuration#get(String).
+   */
+  @Override
+  public String get(String name) {
+    String value = super.get(name);
+    log.info("Got {} = '{}'", name, redactor.redact(name, value));
+    return value;
+  }
+
+  /**
+   * @see Configuration#get(String, String).
+   */
+  @Override
+  public String get(String name, String defaultValue) {
+    String value = super.get(name, defaultValue);
+    log.info("Got {} = '{}' (default '{}')", name,
+        redactor.redact(name, value), redactor.redact(name, defaultValue));
+    return value;
+  }
+
+  /**
+   * @see Configuration#getBoolean(String, boolean).
+   */
+  @Override
+  public boolean getBoolean(String name, boolean defaultValue) {
+    boolean value = super.getBoolean(name, defaultValue);
+    log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
+    return value;
+  }
+
+  /**
+   * @see Configuration#getFloat(String, float).
+   */
+  @Override
+  public float getFloat(String name, float defaultValue) {
+    float value = super.getFloat(name, defaultValue);
+    log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
+    return value;
+  }
+
+  /**
+   * @see Configuration#getInt(String, int).
+   */
+  @Override
+  public int getInt(String name, int defaultValue) {
+    int value = super.getInt(name, defaultValue);
+    log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
+    return value;
+  }
+
+  /**
+   * @see Configuration#getLong(String, long).
+   */
+  @Override
+  public long getLong(String name, long defaultValue) {
+    long value = super.getLong(name, defaultValue);
+    log.info("Got {} = '{}' (default '{}')", name, value, defaultValue);
+    return value;
+  }
+
+  /**
+   * @see Configuration#set(String, String, String).
+   */
+  @Override
+  public void set(String name, String value, String source) {
+    log.info("Set {} to '{}'{}", name, redactor.redact(name, value),
+        source == null ? "" : " from " + source);
+    super.set(name, value, source);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index b930f75..6e21592 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -17,6 +17,10 @@
  */
 package org.apache.hadoop.http;
 
+import static 
org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
+import static 
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
+
+import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InterruptedIOException;
@@ -45,7 +49,10 @@ import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletRequestWrapper;
 import javax.servlet.http.HttpServletResponse;
 
+import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.HadoopIllegalArgumentException;
@@ -54,14 +61,15 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.ConfServlet;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeys;
-import org.apache.hadoop.security.AuthenticationFilterInitializer;
-import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
 import org.apache.hadoop.jmx.JMXJsonServlet;
 import org.apache.hadoop.log.LogLevel;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
+import org.apache.hadoop.security.authentication.util.SignerSecretProvider;
 import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.security.ssl.SSLFactory;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.Shell;
 import org.eclipse.jetty.http.HttpVersion;
@@ -90,16 +98,9 @@ import org.eclipse.jetty.servlet.ServletHolder;
 import org.eclipse.jetty.servlet.ServletMapping;
 import org.eclipse.jetty.util.ArrayUtil;
 import org.eclipse.jetty.util.MultiException;
-import org.eclipse.jetty.webapp.WebAppContext;
-import org.eclipse.jetty.util.thread.QueuedThreadPool;
-
-import com.google.common.base.Preconditions;
-import com.google.common.collect.Lists;
-import com.sun.jersey.spi.container.servlet.ServletContainer;
 import org.eclipse.jetty.util.ssl.SslContextFactory;
-
-import static 
org.apache.hadoop.fs.CommonConfigurationKeys.DEFAULT_HADOOP_HTTP_STATIC_USER;
-import static 
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_USER;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.eclipse.jetty.webapp.WebAppContext;
 
 /**
  * Create a Jetty embedded server to answer http requests. The primary goal is
@@ -116,9 +117,22 @@ import static 
org.apache.hadoop.fs.CommonConfigurationKeys.HADOOP_HTTP_STATIC_US
 public final class HttpServer2 implements FilterContainer {
   public static final Log LOG = LogFactory.getLog(HttpServer2.class);
 
+  public static final String HTTP_SCHEME = "http";
+  public static final String HTTPS_SCHEME = "https";
+
+  public static final String HTTP_MAX_REQUEST_HEADER_SIZE_KEY =
+      "hadoop.http.max.request.header.size";
+  public static final int HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT = 65536;
+  public static final String HTTP_MAX_RESPONSE_HEADER_SIZE_KEY =
+      "hadoop.http.max.response.header.size";
+  public static final int HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT = 65536;
+  public static final String HTTP_MAX_THREADS_KEY = "hadoop.http.max.threads";
+  public static final String HTTP_TEMP_DIR_KEY = "hadoop.http.temp.dir";
+
   static final String FILTER_INITIALIZER_PROPERTY
       = "hadoop.http.filter.initializers";
-  public static final String HTTP_MAX_THREADS = "hadoop.http.max.threads";
+  @Deprecated
+  public static final String HTTP_MAX_THREADS = HTTP_MAX_THREADS_KEY;
 
   // The ServletContext attribute where the daemon Configuration
   // gets stored.
@@ -158,6 +172,7 @@ public final class HttpServer2 implements FilterContainer {
     private ArrayList<URI> endpoints = Lists.newArrayList();
     private String name;
     private Configuration conf;
+    private Configuration sslConf;
     private String[] pathSpecs;
     private AccessControlList adminsAcl;
     private boolean securityEnabled = false;
@@ -253,6 +268,15 @@ public final class HttpServer2 implements FilterContainer {
       return this;
     }
 
+    /**
+     * Specify the SSL configuration to load. This API provides an alternative
+     * to keyStore/keyPassword/trustStore.
+     */
+    public Builder setSSLConf(Configuration sslCnf) {
+      this.sslConf = sslCnf;
+      return this;
+    }
+
     public Builder setPathSpec(String[] pathSpec) {
       this.pathSpecs = pathSpec;
       return this;
@@ -315,7 +339,45 @@ public final class HttpServer2 implements FilterContainer {
       return this;
     }
 
+    /**
+     * A wrapper of {@link Configuration#getPassword(String)}. It returns
+     * <code>String</code> instead of <code>char[]</code> and throws
+     * {@link IOException} when the password not found.
+     *
+     * @param conf the configuration
+     * @param name the property name
+     * @return the password string
+     */
+    private static String getPassword(Configuration conf, String name)
+        throws IOException {
+      char[] passchars = conf.getPassword(name);
+      if (passchars == null) {
+        throw new IOException("Password " + name + " not found");
+      }
+      return new String(passchars);
+    }
 
+    /**
+     * Load SSL properties from the SSL configuration.
+     */
+    private void loadSSLConfiguration() throws IOException {
+      if (sslConf == null) {
+        return;
+      }
+      needsClientAuth(sslConf.getBoolean(
+          SSLFactory.SSL_SERVER_NEED_CLIENT_AUTH,
+          SSLFactory.SSL_SERVER_NEED_CLIENT_AUTH_DEFAULT));
+      keyStore(sslConf.get(SSLFactory.SSL_SERVER_KEYSTORE_LOCATION),
+          getPassword(sslConf, SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD),
+          sslConf.get(SSLFactory.SSL_SERVER_KEYSTORE_TYPE,
+              SSLFactory.SSL_SERVER_KEYSTORE_TYPE_DEFAULT));
+      keyPassword(getPassword(sslConf,
+          SSLFactory.SSL_SERVER_KEYSTORE_KEYPASSWORD));
+      trustStore(sslConf.get(SSLFactory.SSL_SERVER_TRUSTSTORE_LOCATION),
+          getPassword(sslConf, SSLFactory.SSL_SERVER_TRUSTSTORE_PASSWORD),
+          sslConf.get(SSLFactory.SSL_SERVER_TRUSTSTORE_TYPE,
+              SSLFactory.SSL_SERVER_TRUSTSTORE_TYPE_DEFAULT));
+    }
 
     public HttpServer2 build() throws IOException {
       Preconditions.checkNotNull(name, "name is not set");
@@ -336,14 +398,32 @@ public final class HttpServer2 implements FilterContainer 
{
       }
 
       for (URI ep : endpoints) {
+        if (HTTPS_SCHEME.equals(ep.getScheme())) {
+          loadSSLConfiguration();
+          break;
+        }
+      }
+
+      int requestHeaderSize = conf.getInt(
+          HTTP_MAX_REQUEST_HEADER_SIZE_KEY,
+          HTTP_MAX_REQUEST_HEADER_SIZE_DEFAULT);
+      int responseHeaderSize = conf.getInt(
+          HTTP_MAX_RESPONSE_HEADER_SIZE_KEY,
+          HTTP_MAX_RESPONSE_HEADER_SIZE_DEFAULT);
+
+      HttpConfiguration httpConfig = new HttpConfiguration();
+      httpConfig.setRequestHeaderSize(requestHeaderSize);
+      httpConfig.setResponseHeaderSize(responseHeaderSize);
+
+      for (URI ep : endpoints) {
         final ServerConnector connector;
         String scheme = ep.getScheme();
-        if ("http".equals(scheme)) {
-          connector =
-              HttpServer2.createDefaultChannelConnector(server.webServer);
-        } else if ("https".equals(scheme)) {
-          connector = createHttpsChannelConnector(server.webServer);
-
+        if (HTTP_SCHEME.equals(scheme)) {
+          connector = createHttpChannelConnector(server.webServer,
+              httpConfig);
+        } else if (HTTPS_SCHEME.equals(scheme)) {
+          connector = createHttpsChannelConnector(server.webServer,
+              httpConfig);
         } else {
           throw new HadoopIllegalArgumentException(
               "unknown scheme for endpoint:" + ep);
@@ -356,16 +436,20 @@ public final class HttpServer2 implements FilterContainer 
{
       return server;
     }
 
-    private ServerConnector createHttpsChannelConnector(Server server) {
+    private ServerConnector createHttpChannelConnector(
+        Server server, HttpConfiguration httpConfig) {
       ServerConnector conn = new ServerConnector(server);
-      HttpConfiguration httpConfig = new HttpConfiguration();
-      httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE);
-      httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE);
-      httpConfig.setSecureScheme("https");
-      httpConfig.addCustomizer(new SecureRequestCustomizer());
       ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig);
       conn.addConnectionFactory(connFactory);
       configureChannelConnector(conn);
+      return conn;
+    }
+
+    private ServerConnector createHttpsChannelConnector(
+        Server server, HttpConfiguration httpConfig) {
+      httpConfig.setSecureScheme(HTTPS_SCHEME);
+      httpConfig.addCustomizer(new SecureRequestCustomizer());
+      ServerConnector conn = createHttpChannelConnector(server, httpConfig);
 
       SslContextFactory sslContextFactory = new SslContextFactory();
       sslContextFactory.setNeedClientAuth(needsClientAuth);
@@ -397,7 +481,7 @@ public final class HttpServer2 implements FilterContainer {
     this.webServer = new Server();
     this.adminsAcl = b.adminsAcl;
     this.handlers = new HandlerCollection();
-    this.webAppContext = createWebAppContext(b.name, b.conf, adminsAcl, 
appDir);
+    this.webAppContext = createWebAppContext(b, adminsAcl, appDir);
     this.xFrameOptionIsEnabled = b.xFrameEnabled;
     this.xFrameOption = b.xFrameOption;
 
@@ -482,8 +566,8 @@ public final class HttpServer2 implements FilterContainer {
     listeners.add(connector);
   }
 
-  private static WebAppContext createWebAppContext(String name,
-      Configuration conf, AccessControlList adminsAcl, final String appDir) {
+  private static WebAppContext createWebAppContext(Builder b,
+      AccessControlList adminsAcl, final String appDir) {
     WebAppContext ctx = new WebAppContext();
     ctx.setDefaultsDescriptor(null);
     ServletHolder holder = new ServletHolder(new DefaultServlet());
@@ -496,10 +580,15 @@ public final class HttpServer2 implements FilterContainer 
{
     holder.setInitParameters(params);
     ctx.setWelcomeFiles(new String[] {"index.html"});
     ctx.addServlet(holder, "/");
-    ctx.setDisplayName(name);
+    ctx.setDisplayName(b.name);
     ctx.setContextPath("/");
-    ctx.setWar(appDir + "/" + name);
-    ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
+    ctx.setWar(appDir + "/" + b.name);
+    String tempDirectory = b.conf.get(HTTP_TEMP_DIR_KEY);
+    if (tempDirectory != null && !tempDirectory.isEmpty()) {
+      ctx.setTempDirectory(new File(tempDirectory));
+      ctx.setAttribute("javax.servlet.context.tempdir", tempDirectory);
+    }
+    ctx.getServletContext().setAttribute(CONF_CONTEXT_ATTRIBUTE, b.conf);
     ctx.getServletContext().setAttribute(ADMINS_ACL, adminsAcl);
     addNoCacheFilter(ctx);
     return ctx;
@@ -541,18 +630,6 @@ public final class HttpServer2 implements FilterContainer {
     }
   }
 
-  @InterfaceAudience.Private
-  public static ServerConnector createDefaultChannelConnector(Server server) {
-    ServerConnector conn = new ServerConnector(server);
-    HttpConfiguration httpConfig = new HttpConfiguration();
-    httpConfig.setRequestHeaderSize(JettyUtils.HEADER_SIZE);
-    httpConfig.setResponseHeaderSize(JettyUtils.HEADER_SIZE);
-    ConnectionFactory connFactory = new HttpConnectionFactory(httpConfig);
-    conn.addConnectionFactory(connFactory);
-    configureChannelConnector(conn);
-    return conn;
-  }
-
   /** Get an array of FilterConfiguration specified in the conf */
   private static FilterInitializer[] getFilterInitializers(Configuration conf) 
{
     if (conf == null) {
@@ -1056,7 +1133,7 @@ public final class HttpServer2 implements FilterContainer 
{
     }
 
     try {
-      // explicitly destroy the secrete provider
+      // explicitly destroy the secret provider
       secretProvider.destroy();
       // clear & stop webAppContext attributes to avoid memory leaks.
       webAppContext.clearAttributes();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
index 95cba80..cda26a5 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ssl/SSLFactory.java
@@ -60,27 +60,61 @@ public class SSLFactory implements ConnectionConfigurator {
   @InterfaceAudience.Private
   public static enum Mode { CLIENT, SERVER }
 
+  public static final String SSL_CLIENT_CONF_KEY = "hadoop.ssl.client.conf";
+  public static final String SSL_CLIENT_CONF_DEFAULT = "ssl-client.xml";
+  public static final String SSL_SERVER_CONF_KEY = "hadoop.ssl.server.conf";
+  public static final String SSL_SERVER_CONF_DEFAULT = "ssl-server.xml";
+
   public static final String SSL_REQUIRE_CLIENT_CERT_KEY =
-    "hadoop.ssl.require.client.cert";
+      "hadoop.ssl.require.client.cert";
+  public static final boolean SSL_REQUIRE_CLIENT_CERT_DEFAULT = false;
   public static final String SSL_HOSTNAME_VERIFIER_KEY =
-    "hadoop.ssl.hostname.verifier";
-  public static final String SSL_CLIENT_CONF_KEY =
-    "hadoop.ssl.client.conf";
-  public static final String SSL_SERVER_CONF_KEY =
-    "hadoop.ssl.server.conf";
-  public static final String SSLCERTIFICATE = IBM_JAVA?"ibmX509":"SunX509";
+      "hadoop.ssl.hostname.verifier";
+  public static final String SSL_ENABLED_PROTOCOLS_KEY =
+      "hadoop.ssl.enabled.protocols";
+  public static final String SSL_ENABLED_PROTOCOLS_DEFAULT =
+      "TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2";
+
+  public static final String SSL_SERVER_NEED_CLIENT_AUTH =
+      "ssl.server.need.client.auth";
+  public static final boolean SSL_SERVER_NEED_CLIENT_AUTH_DEFAULT = false;
+
+  public static final String SSL_SERVER_KEYSTORE_LOCATION =
+      "ssl.server.keystore.location";
+  public static final String SSL_SERVER_KEYSTORE_PASSWORD =
+      "ssl.server.keystore.password";
+  public static final String SSL_SERVER_KEYSTORE_TYPE =
+      "ssl.server.keystore.type";
+  public static final String SSL_SERVER_KEYSTORE_TYPE_DEFAULT = "jks";
+  public static final String SSL_SERVER_KEYSTORE_KEYPASSWORD =
+      "ssl.server.keystore.keypassword";
+
+  public static final String SSL_SERVER_TRUSTSTORE_LOCATION =
+      "ssl.server.truststore.location";
+  public static final String SSL_SERVER_TRUSTSTORE_PASSWORD =
+      "ssl.server.truststore.password";
+  public static final String SSL_SERVER_TRUSTSTORE_TYPE =
+      "ssl.server.truststore.type";
+  public static final String SSL_SERVER_TRUSTSTORE_TYPE_DEFAULT = "jks";
+
+  public static final String SSL_SERVER_EXCLUDE_CIPHER_LIST =
+      "ssl.server.exclude.cipher.list";
 
-  public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT = false;
+  @Deprecated
+  public static final boolean DEFAULT_SSL_REQUIRE_CLIENT_CERT =
+      SSL_REQUIRE_CLIENT_CERT_DEFAULT;
+
+  public static final String SSLCERTIFICATE = IBM_JAVA?"ibmX509":"SunX509";
 
   public static final String KEYSTORES_FACTORY_CLASS_KEY =
     "hadoop.ssl.keystores.factory.class";
 
+  @Deprecated
   public static final String SSL_ENABLED_PROTOCOLS =
-      "hadoop.ssl.enabled.protocols";
+      SSL_ENABLED_PROTOCOLS_KEY;
+  @Deprecated
   public static final String DEFAULT_SSL_ENABLED_PROTOCOLS =
-      "TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2";
-  public static final String SSL_SERVER_EXCLUDE_CIPHER_LIST =
-      "ssl.server.exclude.cipher.list";
+      SSL_ENABLED_PROTOCOLS_DEFAULT;
 
   private Configuration conf;
   private Mode mode;
@@ -131,9 +165,11 @@ public class SSLFactory implements ConnectionConfigurator {
     sslConf.setBoolean(SSL_REQUIRE_CLIENT_CERT_KEY, requireClientCert);
     String sslConfResource;
     if (mode == Mode.CLIENT) {
-      sslConfResource = conf.get(SSL_CLIENT_CONF_KEY, "ssl-client.xml");
+      sslConfResource = conf.get(SSL_CLIENT_CONF_KEY,
+          SSL_CLIENT_CONF_DEFAULT);
     } else {
-      sslConfResource = conf.get(SSL_SERVER_CONF_KEY, "ssl-server.xml");
+      sslConfResource = conf.get(SSL_SERVER_CONF_KEY,
+          SSL_SERVER_CONF_DEFAULT);
     }
     sslConf.addResource(sslConfResource);
     return sslConf;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md 
b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
index 27a858a..8d98e91 100644
--- a/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
+++ b/hadoop-common-project/hadoop-common/src/site/markdown/CommandsManual.md
@@ -207,6 +207,12 @@ NOTE: Some KeyProviders (e.g. 
org.apache.hadoop.crypto.key.JavaKeyStoreProvider)
 
 NOTE: Some KeyProviders do not directly execute a key deletion (e.g. performs 
a soft-delete instead, or delay the actual deletion, to prevent mistake). In 
these cases, one may encounter errors when creating/deleting a key with the 
same name after deleting it. Please check the underlying KeyProvider for 
details.
 
+### `kms`
+
+Usage: `hadoop kms`
+
+Run KMS, the Key Management Server.
+
 ### `trace`
 
 View and modify Hadoop tracing settings. See the [Tracing 
Guide](./Tracing.html).
@@ -267,8 +273,6 @@ This command works by sending a HTTP/HTTPS request to the 
daemon's internal Jett
     * node manager
     * Timeline server
 
-However, the command does not support KMS server, because its web interface is 
based on Tomcat, which does not support the servlet.
-
 
 Files
 -----

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats
new file mode 100644
index 0000000..90a4f1a
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_mkdir.bats
@@ -0,0 +1,42 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_mkdir (create)" {
+  DIR=${BATS_TMPDIR}/nodir
+  rm -fr ${DIR}
+  run hadoop_mkdir ${DIR}
+  [ "${status}" = 0 ]
+  [ "${output}" = "WARNING: ${DIR} does not exist. Creating." ]
+}
+
+
+@test "hadoop_mkdir (exists)" {
+  DIR=${BATS_TMPDIR}/exists
+  mkdir -p ${DIR}
+  run hadoop_mkdir ${DIR}
+  [ "${status}" = 0 ]
+  [ -z "${output}" ]
+}
+
+
+@test "hadoop_mkdir (failed)" {
+  DIR=${BATS_TMPDIR}/readonly_dir/dir
+  mkdir -p ${BATS_TMPDIR}/readonly_dir
+  chmod a-w ${BATS_TMPDIR}/readonly_dir
+  run hadoop_mkdir ${DIR}
+  [ "${status}" != 0 ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats
new file mode 100644
index 0000000..8f8e937
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_using_envvar.bats
@@ -0,0 +1,33 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_using_envvar (has value)" {
+  HADOOP_SHELL_SCRIPT_DEBUG=true
+  VAR=value
+  run hadoop_using_envvar VAR
+  [ "${status}" = 0 ]
+  [ "${output}" = "DEBUG: VAR = value" ]
+}
+
+
+@test "hadoop_using_envvar (no value)" {
+  HADOOP_SHELL_SCRIPT_DEBUG=true
+  VAR=
+  run hadoop_using_envvar VAR
+  [ "${status}" = 0 ]
+  [ -z "${output}" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml 
b/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml
index 929936d..f864c03 100644
--- a/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-kms/dev-support/findbugsExcludeFile.xml
@@ -39,7 +39,7 @@
     <Bug pattern="DM_EXIT"/>
   </Match>
   <!--
-    KMS wants to log the exception before it's thrown to tomcat and disappear.
+    KMS wants to log the exception before it's thrown to Jetty and disappear.
   -->
   <Match>
     <Class name="org.apache.hadoop.crypto.key.kms.server.KMS"/>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/pom.xml 
b/hadoop-common-project/hadoop-kms/pom.xml
index 73b8339..41c36e8 100644
--- a/hadoop-common-project/hadoop-kms/pom.xml
+++ b/hadoop-common-project/hadoop-kms/pom.xml
@@ -27,20 +27,11 @@
   </parent>
   <artifactId>hadoop-kms</artifactId>
   <version>3.0.0-alpha2-SNAPSHOT</version>
-  <packaging>war</packaging>
+  <packaging>jar</packaging>
 
   <name>Apache Hadoop KMS</name>
   <description>Apache Hadoop KMS</description>
 
-  <properties>
-    <kms.tomcat.dist.dir>
-      
${project.build.directory}/${project.artifactId}-${project.version}/share/hadoop/kms/tomcat
-    </kms.tomcat.dist.dir>
-    <tomcat.download.url>
-      
http://archive.apache.org/dist/tomcat/tomcat-6/v${tomcat.version}/bin/apache-tomcat-${tomcat.version}.tar.gz
-    </tomcat.download.url>
-  </properties>
-
   <dependencies>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -80,12 +71,14 @@
     <dependency>
       <groupId>javax.servlet</groupId>
       <artifactId>javax.servlet-api</artifactId>
-      <scope>provided</scope>
     </dependency>
     <dependency>
       <groupId>org.eclipse.jetty</groupId>
       <artifactId>jetty-server</artifactId>
-      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-webapp</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -101,14 +94,6 @@
           <artifactId>commons-httpclient</artifactId>
         </exclusion>
         <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-compiler</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>tomcat</groupId>
-          <artifactId>jasper-runtime</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>javax.servlet</groupId>
           <artifactId>javax.servlet-api</artifactId>
         </exclusion>
@@ -229,66 +214,21 @@
         <artifactId>maven-antrun-plugin</artifactId>
         <executions>
           <execution>
-            <id>create-web-xmls</id>
-            <phase>generate-test-resources</phase>
+            <id>site</id>
+            <phase>site</phase>
             <goals>
               <goal>run</goal>
             </goals>
             <configuration>
               <target>
-                <mkdir 
dir="${project.build.directory}/test-classes/kms-webapp"/>
-
-                <copy 
todir="${project.build.directory}/test-classes/kms-webapp">
-                  <fileset dir="${basedir}/src/main/webapp"/>
-                </copy>
+                <xslt in="${basedir}/src/main/resources/kms-default.xml"
+                      out="${project.build.directory}/site/kms-default.html"
+                      style="${basedir}/src/site/configuration.xsl"/>
               </target>
             </configuration>
           </execution>
         </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-war-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>default-war</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>war</goal>
-            </goals>
-            <configuration>
-              <archiveClasses>true</archiveClasses>
-              <warName>kms</warName>
-              <webappDirectory>${project.build.directory}/kms
-              </webappDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-jar-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>prepare-jar</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>jar</goal>
-            </goals>
-            <configuration>
-              <classifier>classes</classifier>
-            </configuration>
-          </execution>
-          <execution>
-            <id>prepare-test-jar</id>
-            <phase>prepare-package</phase>
-            <goals>
-              <goal>test-jar</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
+      </plugin>      <plugin>
         <groupId>org.codehaus.mojo</groupId>
         <artifactId>findbugs-maven-plugin</artifactId>
         <configuration>
@@ -360,84 +300,6 @@
               </execution>
             </executions>
           </plugin>
-          <!-- Downloading Tomcat TAR.GZ, using downloads/ dir to avoid 
downloading over an over -->
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <executions>
-              <execution>
-                <id>dist</id>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <phase>package</phase>
-                <configuration>
-                  <target>
-                    <mkdir dir="downloads"/>
-                    <get
-                      src="${tomcat.download.url}"
-                      dest="downloads/apache-tomcat-${tomcat.version}.tar.gz"
-                      verbose="true" skipexisting="true"/>
-                    <delete dir="${project.build.directory}/tomcat.exp"/>
-                    <mkdir dir="${project.build.directory}/tomcat.exp"/>
-
-                    <!-- Using Unix script to preserve file permissions -->
-                    <echo file="${project.build.directory}/tomcat-untar.sh">
-                      cd "${project.build.directory}/tomcat.exp"
-                      gzip -cd 
../../downloads/apache-tomcat-${tomcat.version}.tar.gz | tar xf -
-                    </echo>
-                    <exec executable="${shell-executable}" 
dir="${project.build.directory}"
-                          failonerror="true">
-                      <arg line="./tomcat-untar.sh"/>
-                    </exec>
-
-                    <move
-                      
file="${project.build.directory}/tomcat.exp/apache-tomcat-${tomcat.version}"
-                      tofile="${kms.tomcat.dist.dir}"/>
-                    <delete dir="${project.build.directory}/tomcat.exp"/>
-                    <delete dir="${kms.tomcat.dist.dir}/webapps"/>
-                    <mkdir dir="${kms.tomcat.dist.dir}/webapps"/>
-                    <delete file="${kms.tomcat.dist.dir}/conf/server.xml"/>
-                    <copy file="${basedir}/src/main/tomcat/server.xml"
-                          toDir="${kms.tomcat.dist.dir}/conf"/>
-                    <delete 
file="${kms.tomcat.dist.dir}/conf/ssl-server.xml.conf"/>
-                    <copy file="${basedir}/src/main/tomcat/ssl-server.xml.conf"
-                          toDir="${kms.tomcat.dist.dir}/conf"/>
-                    <delete
-                      file="${kms.tomcat.dist.dir}/conf/logging.properties"/>
-                    <copy file="${basedir}/src/main/tomcat/logging.properties"
-                          toDir="${kms.tomcat.dist.dir}/conf"/>
-                    <copy toDir="${kms.tomcat.dist.dir}/webapps/ROOT">
-                      <fileset dir="${basedir}/src/main/tomcat/ROOT"/>
-                    </copy>
-                    <copy toDir="${kms.tomcat.dist.dir}/webapps/kms">
-                      <fileset dir="${project.build.directory}/kms"/>
-                    </copy>
-                  </target>
-                </configuration>
-              </execution>
-              <execution>
-                <id>tar</id>
-                <phase>package</phase>
-                <goals>
-                  <goal>run</goal>
-                </goals>
-                <configuration>
-                  <target if="tar">
-                    <!-- Using Unix script to preserve symlinks -->
-                    <echo file="${project.build.directory}/dist-maketar.sh">
-                      cd "${project.build.directory}"
-                      tar cf - ${project.artifactId}-${project.version} | gzip 
> ${project.artifactId}-${project.version}.tar.gz
-                    </echo>
-                    <exec executable="${shell-executable}" 
dir="${project.build.directory}"
-                          failonerror="true">
-                      <arg line="./dist-maketar.sh"/>
-                    </exec>
-                  </target>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
         </plugins>
       </build>
     </profile>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh 
b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
index e42904d..0528932 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-env.sh
@@ -18,6 +18,14 @@
 # hadoop-env.sh is read prior to this file.
 #
 
+# KMS config directory
+#
+# export KMS_CONFIG=${HADOOP_CONF_DIR}
+
+# KMS log directory
+#
+# export KMS_LOG=${HADOOP_LOG_DIR}
+
 # KMS temporary directory
 #
 # export KMS_TEMP=${HADOOP_HOME}/temp
@@ -26,48 +34,22 @@
 #
 # export KMS_HTTP_PORT=9600
 
-# The Admin port used by KMS
-#
-# export KMS_ADMIN_PORT=$((KMS_HTTP_PORT + 1))
-
-# The maximum number of Tomcat handler threads
+# The maximum number of HTTP handler threads
 #
 # export KMS_MAX_THREADS=1000
 
-# The maximum size of Tomcat HTTP header
+# The maximum size of HTTP header
 #
 # export KMS_MAX_HTTP_HEADER_SIZE=65536
 
+# Whether SSL is enabled
+#
+# export KMS_SSL_ENABLED=false
+
 # The location of the SSL keystore if using SSL
 #
 # export KMS_SSL_KEYSTORE_FILE=${HOME}/.keystore
 
-#
 # The password of the SSL keystore if using SSL
 #
-# export KMS_SSL_KEYSTORE_PASS=password
-
-
-##
-## Tomcat specific settings
-##
-#
-# Location of tomcat
-#
-# export KMS_CATALINA_HOME=${HADOOP_HOME}/share/hadoop/kms/tomcat
-
-# Java System properties for KMS should be specified in this variable.
-# The java.library.path and hadoop.home.dir properties are automatically
-# configured.  In order to supplement java.library.path,
-# one should add to the JAVA_LIBRARY_PATH env var.
-#
-# export CATALINA_OPTS=
-
-# PID file
-#
-# export CATALINA_PID=${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms.pid
-
-# Output file
-#
-# export 
CATALINA_OUT=${KMS_LOG}/hadoop-${HADOOP_IDENT_STRING}-kms-${HOSTNAME}.out
-
+# export KMS_SSL_KEYSTORE_PASS=password
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties 
b/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties
index 8e6d909..15ff436 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-log4j.properties
@@ -32,7 +32,6 @@ log4j.appender.kms-audit.layout.ConversionPattern=%d{ISO8601} 
%m%n
 log4j.logger.kms-audit=INFO, kms-audit
 log4j.additivity.kms-audit=false
 
-log4j.rootLogger=ALL, kms
-log4j.logger.org.apache.hadoop.conf=ERROR
+log4j.rootLogger=INFO, kms
 log4j.logger.org.apache.hadoop=INFO
 
log4j.logger.com.sun.jersey.server.wadl.generators.WadlGeneratorJAXBGrammarGenerator=OFF
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml 
b/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml
index d188735..85e71c3 100644
--- a/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml
+++ b/hadoop-common-project/hadoop-kms/src/main/conf/kms-site.xml
@@ -12,172 +12,9 @@
   See the License for the specific language governing permissions and
   limitations under the License.
 -->
-<configuration>
-
-  <!-- KMS Backend KeyProvider -->
-
-  <property>
-    <name>hadoop.kms.key.provider.uri</name>
-    <value>jceks://file@/${user.home}/kms.keystore</value>
-    <description>
-      URI of the backing KeyProvider for the KMS.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.security.keystore.java-keystore-provider.password-file</name>
-    <value>kms.keystore.password</value>
-    <description>
-      If using the JavaKeyStoreProvider, the file name for the keystore 
password.
-    </description>
-  </property>
-
-  <!-- KMS Cache -->
-
-  <property>
-    <name>hadoop.kms.cache.enable</name>
-    <value>true</value>
-    <description>
-      Whether the KMS will act as a cache for the backing KeyProvider.
-      When the cache is enabled, operations like getKeyVersion, getMetadata,
-      and getCurrentKey will sometimes return cached data without consulting
-      the backing KeyProvider. Cached values are flushed when keys are deleted
-      or modified.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.cache.timeout.ms</name>
-    <value>600000</value>
-    <description>
-      Expiry time for the KMS key version and key metadata cache, in
-      milliseconds. This affects getKeyVersion and getMetadata.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.current.key.cache.timeout.ms</name>
-    <value>30000</value>
-    <description>
-      Expiry time for the KMS current key cache, in milliseconds. This
-      affects getCurrentKey operations.
-    </description>
-  </property>
-
-  <!-- KMS Audit -->
-
-  <property>
-    <name>hadoop.kms.audit.aggregation.window.ms</name>
-    <value>10000</value>
-    <description>
-      Duplicate audit log events within the aggregation window (specified in
-      ms) are quashed to reduce log traffic. A single message for aggregated
-      events is printed at the end of the window, along with a count of the
-      number of aggregated events.
-    </description>
-  </property>
-
-  <!-- KMS Security -->
-
-  <property>
-    <name>hadoop.kms.authentication.type</name>
-    <value>simple</value>
-    <description>
-      Authentication type for the KMS. Can be either &quot;simple&quot;
-      or &quot;kerberos&quot;.
-    </description>
-  </property>
 
-  <property>
-    <name>hadoop.kms.authentication.kerberos.keytab</name>
-    <value>${user.home}/kms.keytab</value>
-    <description>
-      Path to the keytab with credentials for the configured Kerberos 
principal.
-    </description>
-  </property>
+<!-- Put site-specific property overrides in this file. -->
 
-  <property>
-    <name>hadoop.kms.authentication.kerberos.principal</name>
-    <value>HTTP/localhost</value>
-    <description>
-      The Kerberos principal to use for the HTTP endpoint.
-      The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO 
specification.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.authentication.kerberos.name.rules</name>
-    <value>DEFAULT</value>
-    <description>
-      Rules used to resolve Kerberos principal names.
-    </description>
-  </property>
-
-  <!-- Authentication cookie signature source -->
-
-  <property>
-    <name>hadoop.kms.authentication.signer.secret.provider</name>
-    <value>random</value>
-    <description>
-      Indicates how the secret to sign the authentication cookies will be
-      stored. Options are 'random' (default), 'string' and 'zookeeper'.
-      If using a setup with multiple KMS instances, 'zookeeper' should be used.
-    </description>
-  </property>
-
-  <!-- Configuration for 'zookeeper' authentication cookie signature source -->
-
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.path</name>
-    <value>/hadoop-kms/hadoop-auth-signature-secret</value>
-    <description>
-      The Zookeeper ZNode path where the KMS instances will store and retrieve
-      the secret from.
-    </description>
-  </property>
-
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string</name>
-    <value>#HOSTNAME#:#PORT#,...</value>
-    <description>
-      The Zookeeper connection string, a list of hostnames and port comma
-      separated.
-    </description>
-  </property>
-
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type</name>
-    <value>none</value>
-    <description>
-      The Zookeeper authentication type, 'none' (default) or 'sasl' (Kerberos).
-    </description>
-  </property>
-
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab</name>
-    <value>/etc/hadoop/conf/kms.keytab</value>
-    <description>
-      The absolute path for the Kerberos keytab with the credentials to
-      connect to Zookeeper.
-    </description>
-  </property>
-
-  <property>
-    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal</name>
-    <value>kms/#HOSTNAME#</value>
-    <description>
-      The Kerberos service principal used to connect to Zookeeper.
-    </description>
-  </property>
-
-  <property>
-    <name>hadoop.kms.audit.logger</name>
-    <value>org.apache.hadoop.crypto.key.kms.server.SimpleKMSAuditLogger</value>
-    <description>
-      The audit logger for KMS. It is a comma-separated list of KMSAuditLogger
-      class names. Default is the text-format SimpleKMSAuditLogger only.
-      If this is not configured, default will be used.
-    </description>
-  </property>
+<configuration>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
index 600f1e9..1ef6c4e 100644
--- 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
+++ 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSConfiguration.java
@@ -32,6 +32,7 @@ import java.net.URL;
 public class KMSConfiguration {
 
   public static final String KMS_CONFIG_DIR = "kms.config.dir";
+  public static final String KMS_DEFAULT_XML = "kms-default.xml";
   public static final String KMS_SITE_XML = "kms-site.xml";
   public static final String KMS_ACLS_XML = "kms-acls.xml";
 
@@ -42,6 +43,16 @@ public class KMSConfiguration {
   public static final String DEFAULT_KEY_ACL_PREFIX = "default.key.acl.";
   public static final String WHITELIST_KEY_ACL_PREFIX = "whitelist.key.acl.";
 
+  // HTTP properties
+  public static final String HTTP_PORT_KEY = "hadoop.kms.http.port";
+  public static final int HTTP_PORT_DEFAULT = 9600;
+  public static final String HTTP_HOST_KEY = "hadoop.kms.http.host";
+  public static final String HTTP_HOST_DEFAULT = "0.0.0.0";
+
+  // SSL properties
+  public static final String SSL_ENABLED_KEY = "hadoop.kms.ssl.enabled";
+  public static final boolean SSL_ENABLED_DEFAULT = false;
+
   // Property to set the backing KeyProvider
   public static final String KEY_PROVIDER_URI = CONFIG_PREFIX +
       "key.provider.uri";
@@ -77,6 +88,11 @@ public class KMSConfiguration {
 
   public static final boolean KEY_AUTHORIZATION_ENABLE_DEFAULT = true;
 
+  static {
+    Configuration.addDefaultResource(KMS_DEFAULT_XML);
+    Configuration.addDefaultResource(KMS_SITE_XML);
+  }
+
   static Configuration getConfiguration(boolean loadHadoopDefaults,
       String ... resources) {
     Configuration conf = new Configuration(loadHadoopDefaults);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java
 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java
deleted file mode 100644
index 6918015..0000000
--- 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSJMXServlet.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.crypto.key.kms.server;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.jmx.JMXJsonServlet;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-import java.io.IOException;
-
-@InterfaceAudience.Private
-public class KMSJMXServlet extends JMXJsonServlet {
-
-  @Override
-  protected boolean isInstrumentationAccessAllowed(HttpServletRequest request,
-      HttpServletResponse response) throws IOException {
-    return true;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
index 40ae19f..857139f 100644
--- 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
+++ 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java
@@ -17,10 +17,17 @@
  */
 package org.apache.hadoop.crypto.key.kms.server;
 
+import java.io.File;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URL;
+
+import javax.servlet.ServletContextEvent;
+import javax.servlet.ServletContextListener;
+
 import com.codahale.metrics.JmxReporter;
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.MetricRegistry;
-
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.crypto.key.CachingKeyProvider;
@@ -34,15 +41,6 @@ import org.apache.hadoop.util.VersionInfo;
 import org.apache.log4j.PropertyConfigurator;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import org.slf4j.bridge.SLF4JBridgeHandler;
-
-import javax.servlet.ServletContextEvent;
-import javax.servlet.ServletContextListener;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.URI;
-import java.net.URL;
 
 @InterfaceAudience.Private
 public class KMSWebApp implements ServletContextListener {
@@ -81,11 +79,6 @@ public class KMSWebApp implements ServletContextListener {
   private static KMSAudit kmsAudit;
   private static KeyProviderCryptoExtension keyProviderCryptoExtension;
 
-  static {
-    SLF4JBridgeHandler.removeHandlersForRootLogger();
-    SLF4JBridgeHandler.install();
-  }
-
   private void initLogging(String confDir) {
     if (System.getProperty("log4j.configuration") == null) {
       System.setProperty("log4j.defaultInitOverride", "true");

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
new file mode 100644
index 0000000..70945cb
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebServer.java
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.crypto.key.kms.server;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.MalformedURLException;
+import java.net.URI;
+import java.net.URL;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.ConfigurationWithLogging;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.http.HttpServer2;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.apache.hadoop.util.StringUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The KMS web server.
+ */
+@InterfaceAudience.Private
+public class KMSWebServer {
+  private static final Logger LOG =
+      LoggerFactory.getLogger(KMSWebServer.class);
+
+  private static final String NAME = "kms";
+  private static final String SERVLET_PATH = "/kms";
+
+  private final HttpServer2 httpServer;
+  private final String scheme;
+
+  KMSWebServer(Configuration cnf) throws Exception {
+    ConfigurationWithLogging conf = new ConfigurationWithLogging(cnf);
+
+    // Add SSL configuration file
+    conf.addResource(conf.get(SSLFactory.SSL_SERVER_CONF_KEY,
+        SSLFactory.SSL_SERVER_CONF_DEFAULT));
+
+    // Override configuration with deprecated environment variables.
+    deprecateEnv("KMS_TEMP", conf, HttpServer2.HTTP_TEMP_DIR_KEY,
+        KMSConfiguration.KMS_SITE_XML);
+    deprecateEnv("KMS_HTTP_PORT", conf,
+        KMSConfiguration.HTTP_PORT_KEY, KMSConfiguration.KMS_SITE_XML);
+    deprecateEnv("KMS_MAX_THREADS", conf,
+        HttpServer2.HTTP_MAX_THREADS_KEY, KMSConfiguration.KMS_SITE_XML);
+    deprecateEnv("KMS_MAX_HTTP_HEADER_SIZE", conf,
+        HttpServer2.HTTP_MAX_REQUEST_HEADER_SIZE_KEY,
+        KMSConfiguration.KMS_SITE_XML);
+    deprecateEnv("KMS_MAX_HTTP_HEADER_SIZE", conf,
+        HttpServer2.HTTP_MAX_RESPONSE_HEADER_SIZE_KEY,
+        KMSConfiguration.KMS_SITE_XML);
+    deprecateEnv("KMS_SSL_ENABLED", conf,
+        KMSConfiguration.SSL_ENABLED_KEY, KMSConfiguration.KMS_SITE_XML);
+    deprecateEnv("KMS_SSL_KEYSTORE_FILE", conf,
+        SSLFactory.SSL_SERVER_KEYSTORE_LOCATION,
+        SSLFactory.SSL_SERVER_CONF_DEFAULT);
+    deprecateEnv("KMS_SSL_KEYSTORE_PASS", conf,
+        SSLFactory.SSL_SERVER_KEYSTORE_PASSWORD,
+        SSLFactory.SSL_SERVER_CONF_DEFAULT);
+
+    boolean sslEnabled = conf.getBoolean(KMSConfiguration.SSL_ENABLED_KEY,
+        KMSConfiguration.SSL_ENABLED_DEFAULT);
+    scheme = sslEnabled ? HttpServer2.HTTPS_SCHEME : HttpServer2.HTTP_SCHEME;
+
+    String host = conf.get(KMSConfiguration.HTTP_HOST_KEY,
+        KMSConfiguration.HTTP_HOST_DEFAULT);
+    int port = conf.getInt(KMSConfiguration.HTTP_PORT_KEY,
+        KMSConfiguration.HTTP_PORT_DEFAULT);
+    URI endpoint = new URI(scheme, null, host, port, null, null, null);
+
+    httpServer = new HttpServer2.Builder()
+        .setName(NAME)
+        .setConf(conf)
+        .setSSLConf(conf)
+        .authFilterConfigurationPrefix(KMSAuthenticationFilter.CONFIG_PREFIX)
+        .addEndpoint(endpoint)
+        .build();
+  }
+
+  /**
+   * Load the deprecated environment variable into the configuration.
+   *
+   * @param varName the environment variable name
+   * @param conf the configuration
+   * @param propName the configuration property name
+   * @param confFile the configuration file name
+   */
+  private static void deprecateEnv(String varName, Configuration conf,
+                                   String propName, String confFile) {
+    String value = System.getenv(varName);
+    if (value == null) {
+      return;
+    }
+    String propValue = conf.get(propName);
+    LOG.warn("Environment variable {} = '{}' is deprecated and overriding"
+        + " property {} = '{}', please set the property in {} instead.",
+        varName, value, propName, propValue, confFile);
+    conf.set(propName, value, "environment variable " + varName);
+  }
+
+  public void start() throws IOException {
+    httpServer.start();
+  }
+
+  public boolean isRunning() {
+    return httpServer.isAlive();
+  }
+
+  public void join() throws InterruptedException {
+    httpServer.join();
+  }
+
+  public void stop() throws Exception {
+    httpServer.stop();
+  }
+
+  public URL getKMSUrl() {
+    InetSocketAddress addr = httpServer.getConnectorAddress(0);
+    if (null == addr) {
+      return null;
+    }
+    try {
+      return new URL(scheme, addr.getHostName(), addr.getPort(),
+          SERVLET_PATH);
+    } catch (MalformedURLException ex) {
+      throw new RuntimeException("It should never happen: " + ex.getMessage(),
+          ex);
+    }
+  }
+
+  public static void main(String[] args) throws Exception {
+    StringUtils.startupShutdownMessage(KMSWebServer.class, args, LOG);
+    Configuration conf = KMSConfiguration.getKMSConf();
+    KMSWebServer kmsWebServer = new KMSWebServer(conf);
+    kmsWebServer.start();
+    kmsWebServer.join();
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh 
b/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
deleted file mode 100644
index 52dba38..0000000
--- a/hadoop-common-project/hadoop-kms/src/main/libexec/kms-config.sh
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env bash
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#  http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-function hadoop_subproject_init
-{
-  local this
-  local binparent
-  local varlist
-
-  if [[ -z "${HADOOP_KMS_ENV_PROCESSED}" ]]; then
-    if [[ -e "${HADOOP_CONF_DIR}/kms-env.sh" ]]; then
-      . "${HADOOP_CONF_DIR}/kms-env.sh"
-      export HADOOP_KMS_ENV_PROCESSED=true
-    fi
-  fi
-
-  export HADOOP_CATALINA_PREFIX=kms
-
-  export HADOOP_CATALINA_TEMP="${KMS_TEMP:-${HADOOP_HOME}/temp}"
-
-  hadoop_deprecate_envvar KMS_CONFIG HADOOP_CONF_DIR
-
-  hadoop_deprecate_envvar KMS_LOG HADOOP_LOG_DIR
-
-  export HADOOP_CATALINA_CONFIG="${HADOOP_CONF_DIR}"
-  export HADOOP_CATALINA_LOG="${HADOOP_LOG_DIR}"
-
-  export HADOOP_CATALINA_HTTP_PORT="${KMS_HTTP_PORT:-9600}"
-  export 
HADOOP_CATALINA_ADMIN_PORT="${KMS_ADMIN_PORT:-$((HADOOP_CATALINA_HTTP_PORT+1))}"
-  export HADOOP_CATALINA_MAX_THREADS="${KMS_MAX_THREADS:-1000}"
-  export 
HADOOP_CATALINA_MAX_HTTP_HEADER_SIZE="${KMS_MAX_HTTP_HEADER_SIZE:-65536}"
-
-  export 
HADOOP_CATALINA_SSL_KEYSTORE_FILE="${KMS_SSL_KEYSTORE_FILE:-${HOME}/.keystore}"
-
-  export 
CATALINA_BASE="${CATALINA_BASE:-${HADOOP_HOME}/share/hadoop/kms/tomcat}"
-  export HADOOP_CATALINA_HOME="${KMS_CATALINA_HOME:-${CATALINA_BASE}}"
-
-  export 
CATALINA_OUT="${CATALINA_OUT:-${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms-${HOSTNAME}.out}"
-
-  export 
CATALINA_PID="${CATALINA_PID:-${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-kms.pid}"
-
-  if [[ -n "${HADOOP_SHELL_SCRIPT_DEBUG}" ]]; then
-    varlist=$(env | egrep '(^KMS|^CATALINA)' | cut -f1 -d= | grep -v _PASS)
-    for i in ${varlist}; do
-      hadoop_debug "Setting ${i} to ${!i}"
-    done
-  fi
-}
-
-if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
-   [[ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]]; then
-  . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh"
-elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
-  . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
-elif [[ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]]; then
-  . "${HADOOP_HOME}/libexec/hadoop-config.sh"
-else
-  echo "ERROR: Hadoop common not found." 2>&1
-  exit 1
-fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
 
b/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
new file mode 100755
index 0000000..c530716
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-kms/src/main/libexec/shellprofile.d/hadoop-kms.sh
@@ -0,0 +1,57 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+if [[ "${HADOOP_SHELL_EXECNAME}" = hadoop ]]; then
+  hadoop_add_subcommand "kms" "run KMS, the Key Management Server"
+fi
+
+## @description  Command handler for kms subcommand
+## @audience     private
+## @stability    stable
+## @replaceable  no
+function hadoop_subcommand_kms
+{
+  if [[ -f "${HADOOP_CONF_DIR}/kms-env.sh" ]]; then
+    # shellcheck disable=SC1090
+    . "${HADOOP_CONF_DIR}/kms-env.sh"
+  fi
+
+  hadoop_deprecate_envvar KMS_CONFIG HADOOP_CONF_DIR
+  hadoop_deprecate_envvar KMS_LOG HADOOP_LOG_DIR
+
+  hadoop_using_envvar KMS_HTTP_PORT
+  hadoop_using_envvar KMS_MAX_HTTP_HEADER_SIZE
+  hadoop_using_envvar KMS_MAX_THREADS
+  hadoop_using_envvar KMS_SSL_ENABLED
+  hadoop_using_envvar KMS_SSL_KEYSTORE_FILE
+  hadoop_using_envvar KMS_TEMP
+
+  # shellcheck disable=SC2034
+  HADOOP_SUBCMD_SUPPORTDAEMONIZATION=true
+  # shellcheck disable=SC2034
+  HADOOP_CLASSNAME=org.apache.hadoop.crypto.key.kms.server.KMSWebServer
+
+  hadoop_add_param HADOOP_OPTS "-Dkms.config.dir=" \
+    "-Dkms.config.dir=${HADOOP_CONF_DIR}"
+  hadoop_add_param HADOOP_OPTS "-Dkms.log.dir=" \
+    "-Dkms.log.dir=${HADOOP_LOG_DIR}"
+
+  if [[ "${HADOOP_DAEMON_MODE}" == "default" ]] ||
+     [[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then
+    hadoop_mkdir "${KMS_TEMP:-${HADOOP_HOME}/temp}"
+  fi
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml 
b/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml
new file mode 100644
index 0000000..2b178b8
--- /dev/null
+++ b/hadoop-common-project/hadoop-kms/src/main/resources/kms-default.xml
@@ -0,0 +1,248 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<!--
+  Do not modify this file directly.  Instead, copy entries that you wish to
+  modify from this file into kms-site.xml and change them there.  If
+  kms-site.xml does not already exist, create it.
+-->
+
+<configuration>
+
+  <property>
+    <name>hadoop.kms.http.port</name>
+    <value>9600</value>
+    <description>
+      The HTTP port for KMS REST API.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.http.host</name>
+    <value>0.0.0.0</value>
+    <description>
+      The bind host for KMS REST API.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.ssl.enabled</name>
+    <value>false</value>
+    <description>
+      Whether SSL is enabled. Default is false, i.e. disabled.
+    </description>
+  </property>
+
+  <!-- HTTP properties -->
+
+  <property>
+    <name>hadoop.http.max.threads</name>
+    <value>1000</value>
+    <description>
+      The maxmimum number of threads.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.http.max.request.header.size</name>
+    <value>65536</value>
+    <description>
+      The maxmimum HTTP request header size.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.http.max.response.header.size</name>
+    <value>65536</value>
+    <description>
+      The maxmimum HTTP response header size.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.http.temp.dir</name>
+    <value>${hadoop.tmp.dir}/kms</value>
+    <description>
+      KMS temp directory.
+    </description>
+  </property>
+
+  <!-- KMS Backend KeyProvider -->
+
+  <property>
+    <name>hadoop.kms.key.provider.uri</name>
+    <value>jceks://file@/${user.home}/kms.keystore</value>
+    <description>
+      URI of the backing KeyProvider for the KMS.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.security.keystore.java-keystore-provider.password-file</name>
+    <value></value>
+    <description>
+      If using the JavaKeyStoreProvider, the file name for the keystore 
password.
+    </description>
+  </property>
+
+  <!-- KMS Cache -->
+
+  <property>
+    <name>hadoop.kms.cache.enable</name>
+    <value>true</value>
+    <description>
+      Whether the KMS will act as a cache for the backing KeyProvider.
+      When the cache is enabled, operations like getKeyVersion, getMetadata,
+      and getCurrentKey will sometimes return cached data without consulting
+      the backing KeyProvider. Cached values are flushed when keys are deleted
+      or modified.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.cache.timeout.ms</name>
+    <value>600000</value>
+    <description>
+      Expiry time for the KMS key version and key metadata cache, in
+      milliseconds. This affects getKeyVersion and getMetadata.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.current.key.cache.timeout.ms</name>
+    <value>30000</value>
+    <description>
+      Expiry time for the KMS current key cache, in milliseconds. This
+      affects getCurrentKey operations.
+    </description>
+  </property>
+
+  <!-- KMS Audit -->
+
+  <property>
+    <name>hadoop.kms.audit.aggregation.window.ms</name>
+    <value>10000</value>
+    <description>
+      Duplicate audit log events within the aggregation window (specified in
+      ms) are quashed to reduce log traffic. A single message for aggregated
+      events is printed at the end of the window, along with a count of the
+      number of aggregated events.
+    </description>
+  </property>
+
+  <!-- KMS Security -->
+
+  <property>
+    <name>hadoop.kms.authentication.type</name>
+    <value>simple</value>
+    <description>
+      Authentication type for the KMS. Can be either 'simple' (default) or
+      'kerberos'.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.authentication.kerberos.keytab</name>
+    <value>${user.home}/kms.keytab</value>
+    <description>
+      Path to the keytab with credentials for the configured Kerberos 
principal.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.authentication.kerberos.principal</name>
+    <value>HTTP/localhost</value>
+    <description>
+      The Kerberos principal to use for the HTTP endpoint.
+      The principal must start with 'HTTP/' as per the Kerberos HTTP SPNEGO 
specification.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.authentication.kerberos.name.rules</name>
+    <value>DEFAULT</value>
+    <description>
+      Rules used to resolve Kerberos principal names.
+    </description>
+  </property>
+
+  <!-- Authentication cookie signature source -->
+
+  <property>
+    <name>hadoop.kms.authentication.signer.secret.provider</name>
+    <value>random</value>
+    <description>
+      Indicates how the secret to sign the authentication cookies will be
+      stored. Options are 'random' (default), 'string' and 'zookeeper'.
+      If using a setup with multiple KMS instances, 'zookeeper' should be used.
+    </description>
+  </property>
+
+  <!-- Configuration for 'zookeeper' authentication cookie signature source -->
+
+  <property>
+    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.path</name>
+    <value>/hadoop-kms/hadoop-auth-signature-secret</value>
+    <description>
+      The Zookeeper ZNode path where the KMS instances will store and retrieve
+      the secret from.
+    </description>
+  </property>
+
+  <property>
+    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.connection.string</name>
+    <value>#HOSTNAME#:#PORT#,...</value>
+    <description>
+      The Zookeeper connection string, a list of hostnames and port comma
+      separated.
+    </description>
+  </property>
+
+  <property>
+    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.auth.type</name>
+    <value>none</value>
+    <description>
+      The Zookeeper authentication type, 'none' (default) or 'sasl' (Kerberos).
+    </description>
+  </property>
+
+  <property>
+    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.keytab</name>
+    <value>/etc/hadoop/conf/kms.keytab</value>
+    <description>
+      The absolute path for the Kerberos keytab with the credentials to
+      connect to Zookeeper.
+    </description>
+  </property>
+
+  <property>
+    
<name>hadoop.kms.authentication.signer.secret.provider.zookeeper.kerberos.principal</name>
+    <value>kms/#HOSTNAME#</value>
+    <description>
+      The Kerberos service principal used to connect to Zookeeper.
+    </description>
+  </property>
+
+  <property>
+    <name>hadoop.kms.audit.logger</name>
+    <value>org.apache.hadoop.crypto.key.kms.server.SimpleKMSAuditLogger</value>
+    <description>
+      The audit logger for KMS. It is a comma-separated list of KMSAuditLogger
+      class names. Default is the text-format SimpleKMSAuditLogger only.
+      If this is not configured, default will be used.
+    </description>
+  </property>
+
+</configuration>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml
 
b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml
new file mode 100644
index 0000000..1c14d28
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/kms/WEB-INF/web.xml
@@ -0,0 +1,68 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<web-app version="2.4" xmlns="http://java.sun.com/xml/ns/j2ee";>
+
+  <listener>
+    
<listener-class>org.apache.hadoop.crypto.key.kms.server.KMSWebApp</listener-class>
+  </listener>
+
+  <servlet>
+    <servlet-name>webservices-driver</servlet-name>
+    
<servlet-class>com.sun.jersey.spi.container.servlet.ServletContainer</servlet-class>
+    <init-param>
+      <param-name>com.sun.jersey.config.property.packages</param-name>
+      <param-value>org.apache.hadoop.crypto.key.kms.server</param-value>
+    </init-param>
+
+    <!-- Enables detailed Jersey request/response logging -->
+    <!--
+    <init-param>
+        
<param-name>com.sun.jersey.spi.container.ContainerRequestFilters</param-name>
+        
<param-value>com.sun.jersey.api.container.filter.LoggingFilter</param-value>
+    </init-param>
+    <init-param>
+        
<param-name>com.sun.jersey.spi.container.ContainerResponseFilters</param-name>
+        
<param-value>com.sun.jersey.api.container.filter.LoggingFilter</param-value>
+    </init-param>
+    -->
+    <load-on-startup>1</load-on-startup>
+  </servlet>
+
+  <servlet-mapping>
+    <servlet-name>webservices-driver</servlet-name>
+    <url-pattern>/kms/*</url-pattern>
+  </servlet-mapping>
+
+  <filter>
+    <filter-name>authFilter</filter-name>
+    
<filter-class>org.apache.hadoop.crypto.key.kms.server.KMSAuthenticationFilter</filter-class>
+  </filter>
+
+  <filter>
+    <filter-name>MDCFilter</filter-name>
+    
<filter-class>org.apache.hadoop.crypto.key.kms.server.KMSMDCFilter</filter-class>
+  </filter>
+
+  <filter-mapping>
+    <filter-name>authFilter</filter-name>
+    <url-pattern>/*</url-pattern>
+  </filter-mapping>
+
+  <filter-mapping>
+    <filter-name>MDCFilter</filter-name>
+    <url-pattern>/*</url-pattern>
+  </filter-mapping>
+
+</web-app>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5d182949/hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html 
b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html
new file mode 100644
index 0000000..9925ad9
--- /dev/null
+++ 
b/hadoop-common-project/hadoop-kms/src/main/resources/webapps/static/index.html
@@ -0,0 +1,35 @@
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+
+
+-->
+<html>
+<head>
+  <title>Hadoop KMS</title>
+</head>
+<body>
+<h1>Hadoop KMS</h1>
+<ul>
+  <li>KMS REST API end-point <b>/kms/v1/</b></li>
+    <ul>
+      <li><a href="/kms/v1/keys/names">/kms/v1/keys/names</a>
+        to list all keys</li>
+    </ul>
+  <li><a href="/conf">KMS configuration properties</a></li>
+  <li><a href="/jmx">KMS JMX</a></li>
+  <li><a href="/logLevel">KMS log level</a></li>
+  <li><a href="/logs">KMS log files</a></li>
+  <li><a href="/stacks">KMS stacks</a></li>
+</ul>
+</body>
+</html>


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to