YARN-7113. Clean up packaging and dependencies for yarn-native-services. 
Contributed by Billie Rinaldi


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3541d107
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3541d107
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3541d107

Branch: refs/heads/yarn-native-services
Commit: 3541d1073aed9d7ddc3d1ee2023ee7ab12c25f5d
Parents: 5ccee80
Author: Jian He <jia...@apache.org>
Authored: Tue Aug 29 11:09:00 2017 -0700
Committer: Jian He <jia...@apache.org>
Committed: Tue Sep 19 21:36:20 2017 -0700

----------------------------------------------------------------------
 NOTICE.txt                                      |   14 +
 .../resources/assemblies/hadoop-yarn-dist.xml   |    8 -
 .../assemblies/hadoop-yarn-services-api.xml     |   36 -
 .../assemblies/hadoop-yarn-services-dist.xml    |   30 -
 hadoop-project/pom.xml                          |   19 +-
 hadoop-yarn-project/hadoop-yarn/bin/yarn        |   37 +-
 .../hadoop-yarn-services-api/pom.xml            |  104 +-
 .../yarn/service/webapp/ApiServerWebApp.java    |    4 +-
 .../src/main/resources/webapps/api-server/app   |   16 +
 .../resources/webapps/services-rest-api/app     |   16 -
 .../hadoop-yarn-services-core/pom.xml           |  213 +---
 .../service/client/params/ActionKDiagArgs.java  |   76 --
 .../yarn/service/client/params/ClientArgs.java  |    5 -
 .../registry/YarnRegistryViewForProviders.java  |    8 +-
 .../yarn/service/utils/KerberosDiags.java       |  680 -----------
 .../hadoop/yarn/service/utils/SliderUtils.java  | 1088 ------------------
 .../hadoop/yarn/service/ServiceTestUtils.java   |   28 +
 .../hadoop/yarn/service/TestServiceApiUtil.java |   38 +-
 .../yarn/service/TestYarnNativeServices.java    |   10 +-
 .../yarn/service/client/TestServiceCLI.java     |    1 -
 .../yarn/service/conf/TestAppJsonResolve.java   |   30 +-
 .../service/conf/TestLoadExampleAppJson.java    |   11 +-
 .../providers/TestAbstractClientProvider.java   |   10 +-
 hadoop-yarn-project/pom.xml                     |    4 +
 24 files changed, 175 insertions(+), 2311 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/NOTICE.txt
----------------------------------------------------------------------
diff --git a/NOTICE.txt b/NOTICE.txt
index 0718909..f3af2f7 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -581,3 +581,17 @@ The binary distribution of this product bundles binaries of
 Ehcache 3.3.1,
 which has the following notices:
  * Ehcache V3 Copyright 2014-2016 Terracotta, Inc.
+
+JCommander (https://github.com/cbeust/jcommander),
+which has the following notices:
+ * Copyright 2010 Cedric Beust ced...@beust.com
+
+The binary distribution of this product bundles binaries of
+snakeyaml (https://bitbucket.org/asomov/snakeyaml),
+which has the following notices:
+ * Copyright (c) 2008, http://www.snakeyaml.org
+
+The binary distribution of this product bundles binaries of
+swagger-annotations (https://github.com/swagger-api/swagger-core),
+which has the following notices:
+ * Copyright 2016 SmartBear Software

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
index 8aeeabd..8b3d292 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
@@ -98,10 +98,6 @@
       <outputDirectory>etc/hadoop</outputDirectory>
     </fileSet>
     <fileSet>
-      
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/target/hadoop-yarn-services-core-${project.version}</directory>
-      
<outputDirectory>/share/hadoop/${hadoop.component}/lib/services</outputDirectory>
-    </fileSet>
-    <fileSet>
       
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/target</directory>
       
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
       <includes>
@@ -109,10 +105,6 @@
       </includes>
     </fileSet>
     <fileSet>
-      
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/target/hadoop-yarn-services-api-${project.version}</directory>
-      
<outputDirectory>/share/hadoop/${hadoop.component}/lib/services-api</outputDirectory>
-    </fileSet>
-    <fileSet>
       
<directory>hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/target</directory>
       
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
       <includes>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-api.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-api.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-api.xml
deleted file mode 100644
index 589f724..0000000
--- 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-api.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.01
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<assembly 
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0";
-          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-          
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0
 http://maven.apache.org/xsd/assembly-1.1.0.xsd";>
-  <id>hadoop-yarn-services-api-dist</id>
-  <formats>
-    <format>dir</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <dependencySets>
-    <dependencySet>
-      <useProjectArtifact>false</useProjectArtifact>
-      <includes>
-        <include>com.fasterxml.jackson.jaxrs:jackson-jaxrs-base</include>
-        
<include>com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider</include>
-        
<include>com.fasterxml.jackson.module:jackson-module-jaxb-annotations</include>
-        <include>io.swagger:swagger-annotations</include>
-      </includes>
-    </dependencySet>
-  </dependencySets>
-</assembly>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-dist.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-dist.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-dist.xml
deleted file mode 100644
index 1b81f98..0000000
--- 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-dist.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.01
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-<assembly 
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0";
-          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-          
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0
 http://maven.apache.org/xsd/assembly-1.1.0.xsd";>
-  <id>hadoop-yarn-services-dist</id>
-  <formats>
-    <format>dir</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <dependencySets>
-    <dependencySet>
-      <useProjectArtifact>false</useProjectArtifact>
-    </dependencySet>
-  </dependencySets>
-</assembly>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-project/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index 3e0e036..e4f6e9b 100755
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -143,7 +143,8 @@
     <declared.hadoop.version>${project.version}</declared.hadoop.version>
     
     <swagger-annotations-version>1.5.4</swagger-annotations-version>
-    
<maven-doxia-module-markdown.version>1.4</maven-doxia-module-markdown.version>
+    <snakeyaml.version>1.16</snakeyaml.version>
+    <jcommander.version>1.30</jcommander.version>
   </properties>
 
   <dependencyManagement>
@@ -606,6 +607,11 @@
         <version>3.1.0</version>
       </dependency>
       <dependency>
+        <groupId>javax.ws.rs</groupId>
+        <artifactId>jsr311-api</artifactId>
+        <version>1.1.1</version>
+      </dependency>
+      <dependency>
         <groupId>org.eclipse.jetty</groupId>
         <artifactId>jetty-server</artifactId>
         <version>${jetty.version}</version>
@@ -1327,9 +1333,14 @@
           <version>${jackson2.version}</version>
         </dependency>
         <dependency>
-          <groupId>org.apache.maven.doxia</groupId>
-          <artifactId>doxia-module-markdown</artifactId>
-          <version>${maven-doxia-module-markdown.version}</version>
+          <groupId>org.yaml</groupId>
+          <artifactId>snakeyaml</artifactId>
+          <version>${snakeyaml.version}</version>
+        </dependency>
+        <dependency>
+          <groupId>com.beust</groupId>
+          <artifactId>jcommander</artifactId>
+          <version>${jcommander.version}</version>
         </dependency>
 
     </dependencies>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/bin/yarn
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn 
b/hadoop-yarn-project/hadoop-yarn/bin/yarn
index c93ed41..43bcb6b 100755
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn
@@ -31,6 +31,7 @@ function hadoop_usage
   hadoop_add_option "--hosts filename" "list of hosts to use in worker mode"
   hadoop_add_option "--workers" "turn on worker mode"
 
+  hadoop_add_subcommand "apiserver" "run yarn-native-service rest server"
   hadoop_add_subcommand "application" client "prints application(s) 
report/kill application"
   hadoop_add_subcommand "applicationattempt" client "prints 
applicationattempt(s) report"
   hadoop_add_subcommand "classpath" client "prints the class path needed to 
get the hadoop jar and the required libraries"
@@ -48,9 +49,8 @@ function hadoop_usage
   hadoop_add_subcommand "rmadmin" admin "admin tools"
   hadoop_add_subcommand "router" daemon "run the Router daemon"
   hadoop_add_subcommand "scmadmin" admin "SharedCacheManager admin tools"
-  hadoop_add_subcommand "apiserver" "run yarn-native-service rest server"
-  hadoop_add_subcommand "sharedcachemanager" admin "run the SharedCacheManager 
daemon"
   hadoop_add_subcommand "service" "run a service"
+  hadoop_add_subcommand "sharedcachemanager" admin "run the SharedCacheManager 
daemon"
   hadoop_add_subcommand "timelinereader" client "run the timeline reader 
server"
   hadoop_add_subcommand "timelineserver" daemon "run the timeline server"
   hadoop_add_subcommand "top" client "view cluster information"
@@ -69,6 +69,18 @@ function yarncmd_case
   shift
 
   case ${subcmd} in
+    apiserver)
+      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
+      HADOOP_CLASSNAME='org.apache.hadoop.yarn.service.webapp.ApiServerWebApp'
+      local sld="${HADOOP_YARN_HOME}/${YARN_DIR},\
+${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
+${HADOOP_HDFS_HOME}/${HDFS_DIR},\
+${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
+${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
+${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
+      hadoop_translate_cygwin_path sld
+      hadoop_add_param HADOOP_OPTS service.libdir "-Dservice.libdir=${sld}"
+    ;;
     application|applicationattempt|container)
       HADOOP_CLASSNAME=org.apache.hadoop.yarn.client.cli.ApplicationCLI
       set -- "${subcmd}" "$@"
@@ -147,14 +159,10 @@ function yarncmd_case
     scmadmin)
       HADOOP_CLASSNAME='org.apache.hadoop.yarn.client.SCMAdmin'
     ;;
-    apiserver)
-      HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
-      hadoop_add_classpath 
"${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services"'/*'
-      hadoop_add_classpath 
"${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services-api"'/*'
-      HADOOP_CLASSNAME='org.apache.hadoop.yarn.service.webapp.ApiServerWebApp'
+    service)
+      HADOOP_CLASSNAME='org.apache.hadoop.yarn.service.client.ServiceCLI'
       local sld="${HADOOP_YARN_HOME}/${YARN_DIR},\
 ${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
-${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services,\
 ${HADOOP_HDFS_HOME}/${HDFS_DIR},\
 ${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
 ${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
@@ -166,19 +174,6 @@ ${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
       HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.sharedcachemanager.SharedCacheManager'
     ;;
-    service)
-      hadoop_add_classpath 
"${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services"'/*'
-      HADOOP_CLASSNAME='org.apache.hadoop.yarn.service.client.ServiceCLI'
-      local sld="${HADOOP_YARN_HOME}/${YARN_DIR},\
-${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR},\
-${HADOOP_YARN_HOME}/${YARN_LIB_JARS_DIR}/services,\
-${HADOOP_HDFS_HOME}/${HDFS_DIR},\
-${HADOOP_HDFS_HOME}/${HDFS_LIB_JARS_DIR},\
-${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR},\
-${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"
-      hadoop_translate_cygwin_path sld
-      hadoop_add_param HADOOP_OPTS service.libdir "-Dservice.libdir=${sld}"
-    ;;
     timelinereader)
       HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
       
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer'

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/pom.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/pom.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/pom.xml
index 7d9f15c..1077ccd 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/pom.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/pom.xml
@@ -23,7 +23,6 @@
   </parent>
   <artifactId>hadoop-yarn-services-api</artifactId>
   <name>Apache Hadoop YARN Services API</name>
-  <version>3.0.0-beta1-SNAPSHOT</version>
   <packaging>jar</packaging>
   <description>Hadoop YARN REST APIs for services</description>
 
@@ -46,7 +45,6 @@
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-jar-plugin</artifactId>
-        <version>${maven-jar-plugin.version}</version>
         <!-- The configuration of the plugin -->
         <configuration>
           <!-- Configuration of the archiver -->
@@ -59,9 +57,6 @@
             <manifest>
             </manifest>
           </archive>
-          <excludes>
-            <exclude>**/run_rest_service.sh</exclude>
-          </excludes>
         </configuration>
         <executions>
           <execution>
@@ -92,97 +87,34 @@
     <dependency>
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-yarn-services-core</artifactId>
-      <version>${project.version}</version>
     </dependency>
     <dependency>
-      <groupId>io.swagger</groupId>
-      <artifactId>swagger-annotations</artifactId>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-common</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.fasterxml.jackson.core</groupId>
-      <artifactId>jackson-core</artifactId>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.fasterxml.jackson.core</groupId>
-      <artifactId>jackson-annotations</artifactId>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.fasterxml.jackson.core</groupId>
-      <artifactId>jackson-databind</artifactId>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-webapp</artifactId>
     </dependency>
     <dependency>
-      <groupId>com.fasterxml.jackson.jaxrs</groupId>
-      <artifactId>jackson-jaxrs-json-provider</artifactId>
+      <groupId>com.google.inject</groupId>
+      <artifactId>guice</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>javax.ws.rs</groupId>
+      <artifactId>jsr311-api</artifactId>
     </dependency>
   </dependencies>
-
-  <profiles>
-    <profile>
-      <id>dist</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${project.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>dist</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  
<finalName>${project.artifactId}-${project.version}</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-yarn-services-api</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-    <profile>
-      <id>rat</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.rat</groupId>
-            <artifactId>apache-rat-plugin</artifactId>
-            <version>${apache-rat-plugin.version}</version>
-            <executions>
-              <execution>
-                <id>check-licenses</id>
-                <goals>
-                  <goal>check</goal>
-                </goals>
-              </execution>
-            </executions>
-            <configuration>
-              <excludes>
-                <exclude>**/*.json</exclude>
-                <exclude>**/THIRD-PARTY.properties</exclude>
-              </excludes>
-            </configuration>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-  </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServerWebApp.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServerWebApp.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServerWebApp.java
index b226df7..fc65a63 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServerWebApp.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/webapp/ApiServerWebApp.java
@@ -59,8 +59,10 @@ public class ApiServerWebApp extends AbstractService {
   public static void main(String[] args) throws IOException {
     ApiServerWebApp apiWebApp = new ApiServerWebApp();
     try {
-      apiWebApp.startWebApp();
+      apiWebApp.init(new YarnConfiguration());
+      apiWebApp.serviceStart();
     } catch (Exception e) {
+      logger.error("Got exception starting", e);
       apiWebApp.close();
     }
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/webapps/api-server/app
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/webapps/api-server/app
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/webapps/api-server/app
new file mode 100644
index 0000000..6a077b1
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/webapps/api-server/app
@@ -0,0 +1,16 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DON'T DELETE. REST WEBAPP RUN SCRIPT WILL STOP WORKING.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/webapps/services-rest-api/app
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/webapps/services-rest-api/app
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/webapps/services-rest-api/app
deleted file mode 100644
index 6a077b1..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/webapps/services-rest-api/app
+++ /dev/null
@@ -1,16 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-DON'T DELETE. REST WEBAPP RUN SCRIPT WILL STOP WORKING.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml
index 1f8a408..d9b7adb 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/pom.xml
@@ -104,7 +104,6 @@
     <dependency>
       <groupId>com.beust</groupId>
       <artifactId>jcommander</artifactId>
-      <version>1.30</version>
     </dependency>
 
     <dependency>
@@ -126,66 +125,51 @@
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-core-asl</artifactId>
-      <scope>compile</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-jaxrs</artifactId>
-      <scope>compile</scope>
     </dependency>
 
     <dependency>
       <groupId>org.codehaus.jackson</groupId>
       <artifactId>jackson-mapper-asl</artifactId>
-      <scope>compile</scope>
     </dependency>
 
     <dependency>
-      <groupId>org.codehaus.jackson</groupId>
-      <artifactId>jackson-xc</artifactId>
-      <scope>compile</scope>
+      <groupId>com.fasterxml.jackson.core</groupId>
+      <artifactId>jackson-annotations</artifactId>
     </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-common</artifactId>
-      <type>test-jar</type>
-      <scope>test</scope>
+      <artifactId>hadoop-hdfs-client</artifactId>
     </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs</artifactId>
+      <artifactId>hadoop-yarn-client</artifactId>
     </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs-client</artifactId>
+      <artifactId>hadoop-yarn-registry</artifactId>
     </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-client</artifactId>
-      <scope>compile</scope>
+      <artifactId>hadoop-yarn-common</artifactId>
     </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-server-web-proxy</artifactId>
-      <scope>compile</scope>
+      <artifactId>hadoop-common</artifactId>
     </dependency>
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-registry</artifactId>
-      <scope>compile</scope>
+      <artifactId>hadoop-annotations</artifactId>
     </dependency>
 
     <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-api</artifactId>
     </dependency>
 
     <dependency>
@@ -195,13 +179,12 @@
 
     <dependency>
       <groupId>org.apache.commons</groupId>
-      <artifactId>commons-compress</artifactId>
+      <artifactId>commons-configuration2</artifactId>
     </dependency>
 
     <dependency>
-      <groupId>commons-digester</groupId>
-      <artifactId>commons-digester</artifactId>
-      <version>1.8</version>
+      <groupId>org.apache.commons</groupId>
+      <artifactId>commons-compress</artifactId>
     </dependency>
 
     <dependency>
@@ -215,37 +198,13 @@
     </dependency>
 
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-client</artifactId>
     </dependency>
 
     <dependency>
-      <groupId>com.codahale.metrics</groupId>
-      <artifactId>metrics-core</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>com.codahale.metrics</groupId>
-      <artifactId>metrics-servlets</artifactId>
-      <version>3.0.1</version>
-    </dependency>
-
-    <!-- ======================================================== -->
-    <!-- service registry -->
-    <!-- ======================================================== -->
-
-    <dependency>
-      <groupId>org.apache.zookeeper</groupId>
-      <artifactId>zookeeper</artifactId>
-    </dependency>
-
-    <!-- ======================================================== -->
-    <!-- Jersey and webapp support -->
-    <!-- ======================================================== -->
-
-    <dependency>
-      <groupId>javax.servlet</groupId>
-      <artifactId>javax.servlet-api</artifactId>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-framework</artifactId>
     </dependency>
 
     <dependency>
@@ -254,38 +213,23 @@
     </dependency>
 
     <dependency>
-      <groupId>com.sun.jersey</groupId>
-      <artifactId>jersey-client</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>com.sun.jersey</groupId>
-      <artifactId>jersey-json</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>com.sun.jersey</groupId>
-      <artifactId>jersey-server</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>com.google.inject</groupId>
-      <artifactId>guice</artifactId>
+      <groupId>org.yaml</groupId>
+      <artifactId>snakeyaml</artifactId>
     </dependency>
 
     <dependency>
-      <groupId>com.google.code.gson</groupId>
-      <artifactId>gson</artifactId>
+        <groupId>io.swagger</groupId>
+        <artifactId>swagger-annotations</artifactId>
     </dependency>
 
-    <dependency>
-      <groupId>com.google.inject.extensions</groupId>
-      <artifactId>guice-servlet</artifactId>
-    </dependency>
+    <!-- ======================================================== -->
+    <!-- Test dependencies -->
+    <!-- ======================================================== -->
 
     <dependency>
-      <groupId>com.sun.jersey.contribs</groupId>
-      <artifactId>jersey-guice</artifactId>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
     </dependency>
 
     <dependency>
@@ -295,114 +239,23 @@
     </dependency>
 
     <dependency>
-      <groupId>org.easymock</groupId>
-      <artifactId>easymock</artifactId>
-      <version>3.1</version>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-minicluster</artifactId>
       <scope>test</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>org.objenesis</groupId>
-          <artifactId>objenesis</artifactId>
-        </exclusion>
-      </exclusions>
     </dependency>
 
     <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-api-easymock</artifactId>
-      <version>1.6.5</version>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
       <scope>test</scope>
     </dependency>
 
     <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-module-junit4</artifactId>
-      <version>1.6.5</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.javassist</groupId>
-          <artifactId>javassist</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.objenesis</groupId>
-          <artifactId>objenesis</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>javax.servlet.jsp</groupId>
-      <artifactId>jsp-api</artifactId>
-      <scope>runtime</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.codehaus.jettison</groupId>
-      <artifactId>jettison</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.yaml</groupId>
-      <artifactId>snakeyaml</artifactId>
-      <version>1.16</version>
-      <scope>compile</scope>
-    </dependency>
-
-    <dependency>
-        <groupId>io.swagger</groupId>
-        <artifactId>swagger-annotations</artifactId>
-        <version>1.5.4</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-minicluster</artifactId>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-test</artifactId>
       <scope>test</scope>
     </dependency>
 
   </dependencies>
 
-
-  <profiles>
-    <profile>
-      <id>dist</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-assembly-plugin</artifactId>
-            <dependencies>
-              <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-assemblies</artifactId>
-                <version>${project.version}</version>
-              </dependency>
-            </dependencies>
-            <executions>
-              <execution>
-                <id>dist</id>
-                <phase>prepare-package</phase>
-                <goals>
-                  <goal>single</goal>
-                </goals>
-                <configuration>
-                  <appendAssemblyId>false</appendAssemblyId>
-                  <attach>false</attach>
-                  
<finalName>${project.artifactId}-${project.version}</finalName>
-                  <descriptorRefs>
-                    <descriptorRef>hadoop-yarn-services-dist</descriptorRef>
-                  </descriptorRefs>
-                </configuration>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-
-  </profiles>
-
 </project>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionKDiagArgs.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionKDiagArgs.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionKDiagArgs.java
deleted file mode 100644
index 061121e..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionKDiagArgs.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.client.params;
-
-import com.beust.jcommander.Parameter;
-import com.beust.jcommander.Parameters;
-import org.apache.hadoop.yarn.service.utils.SliderUtils;
-import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
-import org.apache.hadoop.yarn.service.exceptions.UsageException;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
-
-@Parameters(commandNames = { SliderActions.ACTION_KDIAG},
-            commandDescription = SliderActions.DESCRIBE_ACTION_KDIAG)
-
-public class ActionKDiagArgs extends AbstractActionArgs {
-
-  @Override
-  public String getActionName() {
-    return SliderActions.ACTION_KDIAG;
-  }
-
-  @Parameter(names = {ARG_SERVICES}, variableArity = true,
-    description =" list of services to check")
-  public List<String> services = new ArrayList<>();
-
-  @Parameter(names = {ARG_OUTPUT, ARG_OUTPUT_SHORT},
-      description = "output file for report")
-  public File out;
-
-  @Parameter(names = {ARG_KEYTAB}, description = "keytab to use")
-  public File keytab;
-
-  @Parameter(names = {ARG_KEYLEN}, description = "minimum key length")
-  public int keylen = 256;
-
-  @Parameter(names = {ARG_PRINCIPAL}, description = "principal to log in from 
a keytab")
-  public String principal;
-
-  @Parameter(names = {ARG_SECURE}, description = "Is security required")
-  public boolean secure = false;
-
-  @Override
-  public int getMinParams() {
-    return 0;
-  }
-
-  @Override
-  public void validate() throws BadCommandArgumentsException, UsageException {
-    super.validate();
-    if (keytab != null && SliderUtils.isUnset(principal)) {
-      throw new UsageException("Missing argument " + ARG_PRINCIPAL);
-    }
-    if (keytab == null && SliderUtils.isSet(principal)) {
-      throw new UsageException("Missing argument " + ARG_KEYTAB);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ClientArgs.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ClientArgs.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ClientArgs.java
index 7b957fa..09cae24 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ClientArgs.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ClientArgs.java
@@ -47,7 +47,6 @@ public class ClientArgs extends CommonArgs {
   private final ActionFlexArgs actionFlexArgs = new ActionFlexArgs();
   private final ActionFreezeArgs actionFreezeArgs = new ActionFreezeArgs();
   private final ActionHelpArgs actionHelpArgs = new ActionHelpArgs();
-  private final ActionKDiagArgs actionKDiagArgs = new ActionKDiagArgs();
   private final ActionKeytabArgs actionKeytabArgs = new ActionKeytabArgs();
   private final ActionListArgs actionListArgs = new ActionListArgs();
   private final ActionRegistryArgs actionRegistryArgs = new 
ActionRegistryArgs();
@@ -207,10 +206,6 @@ public class ClientArgs extends CommonArgs {
         bindCoreAction(actionHelpArgs);
         break;
 
-      case ACTION_KDIAG:
-        bindCoreAction(actionKDiagArgs);
-        break;
-
       case ACTION_KEYTAB:
         bindCoreAction(actionKeytabArgs);
         break;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
index add2475..62d7a6a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
@@ -19,8 +19,6 @@
 package org.apache.hadoop.yarn.service.registry;
 
 import com.google.common.base.Preconditions;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.PathNotFoundException;
 import org.apache.hadoop.registry.client.api.RegistryConstants;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
@@ -32,6 +30,8 @@ import 
org.apache.hadoop.registry.client.binding.RegistryPathUtils;
 import org.apache.hadoop.registry.client.types.ServiceRecord;
 import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceId;
 import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.List;
@@ -43,8 +43,8 @@ import static 
org.apache.hadoop.registry.client.binding.RegistryPathUtils.join;
  * is registered, offers access to the record and other things.
  */
 public class YarnRegistryViewForProviders {
-  private static final Log LOG =
-      LogFactory.getLog(YarnRegistryViewForProviders.class);
+  private static final Logger LOG =
+      LoggerFactory.getLogger(YarnRegistryViewForProviders.class);
 
   private final RegistryOperations registryOperations;
   private final String user;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3541d107/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
deleted file mode 100644
index c0712c3..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
+++ /dev/null
@@ -1,680 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.utils;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.commons.io.IOUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.SaslPropertiesResolver;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.token.Token;
-import org.apache.hadoop.security.token.TokenIdentifier;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.Shell;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.crypto.Cipher;
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.lang.reflect.InvocationTargetException;
-import java.net.InetAddress;
-import java.security.NoSuchAlgorithmException;
-import java.util.Collection;
-import java.util.Date;
-import java.util.List;
-import java.util.regex.Pattern;
-
-import static org.apache.hadoop.security.UserGroupInformation.*;
-import static org.apache.hadoop.security.authentication.util.KerberosUtil.*;
-import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*;
-
-/**
- * Kerberos diagnostics
- * At some point this may move to hadoop core, so please keep use of slider
- * methods and classes to ~0.
- *
- * This operation expands some of the diagnostic output of the security code,
- * but not all. For completeness
- *
- * Set the environment variable {@code HADOOP_JAAS_DEBUG=true}
- * Set the log level for {@code org.apache.hadoop.security=DEBUG}
- */
-public class KerberosDiags implements Closeable {
-
-  private static final Logger LOG = 
LoggerFactory.getLogger(KerberosDiags.class);
-  public static final String KRB5_CCNAME = "KRB5CCNAME";
-  public static final String JAVA_SECURITY_KRB5_CONF
-    = "java.security.krb5.conf";
-  public static final String JAVA_SECURITY_KRB5_REALM
-    = "java.security.krb5.realm";
-  public static final String SUN_SECURITY_KRB5_DEBUG
-    = "sun.security.krb5.debug";
-  public static final String SUN_SECURITY_SPNEGO_DEBUG
-    = "sun.security.spnego.debug";
-  public static final String SUN_SECURITY_JAAS_FILE
-    = "java.security.auth.login.config";
-  public static final String KERBEROS_KINIT_COMMAND
-    = "hadoop.kerberos.kinit.command";
-  public static final String HADOOP_AUTHENTICATION_IS_DISABLED
-      = "Hadoop authentication is disabled";
-  public static final String UNSET = "(unset)";
-  public static final String NO_DEFAULT_REALM = "Cannot locate default realm";
-
-  private final Configuration conf;
-  private final List<String> services;
-  private final PrintStream out;
-  private final File keytab;
-  private final String principal;
-  private final long minKeyLength;
-  private final boolean securityRequired;
-
-  public static final String CAT_JVM = "JVM";
-  public static final String CAT_JAAS = "JAAS";
-  public static final String CAT_CONFIG = "CONFIG";
-  public static final String CAT_LOGIN = "LOGIN";
-  public static final String CAT_KERBEROS = "KERBEROS";
-  public static final String CAT_SASL = "SASL";
-
-  @SuppressWarnings("IOResourceOpenedButNotSafelyClosed")
-  public KerberosDiags(Configuration conf,
-      PrintStream out,
-      List<String> services,
-      File keytab,
-      String principal,
-      long minKeyLength,
-      boolean securityRequired) {
-    this.conf = conf;
-    this.services = services;
-    this.keytab = keytab;
-    this.principal = principal;
-    this.out = out;
-    this.minKeyLength = minKeyLength;
-    this.securityRequired = securityRequired;
-  }
-
-  @Override
-  public void close() throws IOException {
-    flush();
-  }
-
-  /**
-   * Execute diagnostics.
-   * <p>
-   * Things it would be nice if UGI made accessible
-   * <ol>
-   *   <li>A way to enable JAAS debug programatically</li>
-   *   <li>Access to the TGT</li>
-   * </ol>
-   * @return true if security was enabled and all probes were successful
-   * @throws KerberosDiagsFailure explicitly raised failure
-   * @throws Exception other security problems
-   */
-  @SuppressWarnings("deprecation")
-  public boolean execute() throws Exception {
-
-    title("Kerberos Diagnostics scan at %s",
-        new Date(System.currentTimeMillis()));
-
-    // check that the machine has a name
-    println("Hostname: %s",
-        InetAddress.getLocalHost().getCanonicalHostName());
-
-    // Fail fast on a JVM without JCE installed.
-    validateKeyLength();
-
-    // look at realm
-    println("JVM Kerberos Login Module = %s", getKrb5LoginModuleName());
-    printDefaultRealm();
-
-    title("System Properties");
-    for (String prop : new String[]{
-      JAVA_SECURITY_KRB5_CONF,
-      JAVA_SECURITY_KRB5_REALM,
-      SUN_SECURITY_KRB5_DEBUG,
-      SUN_SECURITY_SPNEGO_DEBUG,
-      SUN_SECURITY_JAAS_FILE
-    }) {
-      printSysprop(prop);
-    }
-
-    title("Environment Variables");
-    for (String env : new String[]{
-      "HADOOP_JAAS_DEBUG",
-      KRB5_CCNAME,
-      "HADOOP_USER_NAME",
-      "HADOOP_PROXY_USER",
-      HADOOP_TOKEN_FILE_LOCATION,
-    }) {
-      printEnv(env);
-    }
-
-    for (String prop : new String[]{
-      KERBEROS_KINIT_COMMAND,
-      HADOOP_SECURITY_AUTHENTICATION,
-      HADOOP_SECURITY_AUTHORIZATION,
-      "hadoop.kerberos.min.seconds.before.relogin",    // not in 2.6
-      "hadoop.security.dns.interface",   // not in 2.6
-      "hadoop.security.dns.nameserver",  // not in 2.6
-      HADOOP_RPC_PROTECTION,
-      HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS,
-      HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX,
-      HADOOP_SECURITY_GROUP_MAPPING,
-      "hadoop.security.impersonation.provider.class",    // not in 2.6
-      "dfs.data.transfer.protection" // HDFS
-    }) {
-      printConfOpt(prop);
-    }
-
-    // check that authentication is enabled
-    if (SecurityUtil.getAuthenticationMethod(conf)
-        .equals(AuthenticationMethod.SIMPLE)) {
-      println(HADOOP_AUTHENTICATION_IS_DISABLED);
-      failif(securityRequired, CAT_CONFIG, HADOOP_AUTHENTICATION_IS_DISABLED);
-      // no security, skip rest of test
-      return false;
-    }
-
-    validateKrb5File();
-    validateSasl(HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS);
-    validateSasl("dfs.data.transfer.saslproperties.resolver.class");
-    validateKinitExecutable();
-    validateJAAS();
-    // now the big test: login, then try again
-    boolean krb5Debug = getAndSet(SUN_SECURITY_KRB5_DEBUG);
-    boolean spnegoDebug = getAndSet(SUN_SECURITY_SPNEGO_DEBUG);
-    try {
-      title("Logging in");
-
-      if (keytab != null) {
-        dumpKeytab(keytab);
-        loginFromKeytab();
-      } else {
-        UserGroupInformation loginUser = getLoginUser();
-        dumpUGI("Log in user", loginUser);
-        validateUGI("Login user", loginUser);
-        println("Ticket based login: %b", isLoginTicketBased());
-        println("Keytab based login: %b", isLoginKeytabBased());
-      }
-
-      return true;
-    } finally {
-      // restore original system properties
-      System.setProperty(SUN_SECURITY_KRB5_DEBUG,
-        Boolean.toString(krb5Debug));
-      System.setProperty(SUN_SECURITY_SPNEGO_DEBUG,
-        Boolean.toString(spnegoDebug));
-    }
-  }
-
-  /**
-   * Fail fast on a JVM without JCE installed.
-   *
-   * This is a recurrent problem
-   * (that is: it keeps creeping back with JVM updates);
-   * a fast failure is the best tactic
-   * @throws NoSuchAlgorithmException
-   */
-
-  protected void validateKeyLength() throws NoSuchAlgorithmException {
-    int aesLen = Cipher.getMaxAllowedKeyLength("AES");
-    println("Maximum AES encryption key length %d bits", aesLen);
-    failif (aesLen < minKeyLength,
-        CAT_JVM,
-        "Java Cryptography Extensions are not installed on this JVM."
-        +" Maximum supported key length %s - minimum required %d",
-        aesLen, minKeyLength);
-  }
-
-  /**
-   * Get the default realm.
-   * <p>
-   * Not having a default realm may be harmless, so is noted at info.
-   * All other invocation failures are downgraded to warn, as
-   * follow-on actions may still work.
-   * failure to invoke the method via introspection is rejected,
-   * as it's a sign of JVM compatibility issues that may have other
-   * consequences
-   */
-  protected void printDefaultRealm() {
-    try {
-      println("Default Realm = %s",
-          getDefaultRealm());
-    } catch (ClassNotFoundException
-        | IllegalAccessException
-        | NoSuchMethodException e) {
-
-      throw new KerberosDiagsFailure(CAT_JVM, e,
-          "Failed to invoke krb5.Config.getDefaultRealm: %s", e);
-    } catch (InvocationTargetException e) {
-      Throwable cause = e.getCause() != null ? e.getCause() : e;
-      if (cause.toString().contains(NO_DEFAULT_REALM)) {
-        // exception raised if there is no default realm. This is not
-        // always a problem, so downgrade to a message.
-        println("Host has no default realm");
-        LOG.debug(cause.toString(), cause);
-      } else {
-        println("Kerberos.getDefaultRealm() failed: %s\n%s",
-            cause,
-            org.apache.hadoop.util.StringUtils.stringifyException(cause));
-      }
-    }
-  }
-
-  /**
-   * Locate the krb5.conf file and dump it.
-   * No-op on windows.
-   * @throws IOException
-   */
-  private void validateKrb5File() throws IOException {
-    if (!Shell.WINDOWS) {
-      title("Locating Kerberos configuration file");
-      String krbPath = "/etc/krb5.conf";
-      String jvmKrbPath = System.getProperty(JAVA_SECURITY_KRB5_CONF);
-      if (jvmKrbPath != null) {
-        println("Setting kerberos path from sysprop %s: %s",
-          JAVA_SECURITY_KRB5_CONF, jvmKrbPath);
-        krbPath = jvmKrbPath;
-      }
-
-      String krb5name = System.getenv(KRB5_CCNAME);
-      if (krb5name != null) {
-        println("Setting kerberos path from environment variable %s: %s",
-          KRB5_CCNAME, krb5name);
-        krbPath = krb5name;
-        if (jvmKrbPath != null) {
-          println("Warning - both %s and %s were set - %s takes priority",
-            JAVA_SECURITY_KRB5_CONF, KRB5_CCNAME, KRB5_CCNAME);
-        }
-      }
-
-      File krbFile = new File(krbPath);
-      println("Kerberos configuration file = %s", krbFile);
-      failif(!krbFile.exists(),
-          CAT_KERBEROS,
-          "Kerberos configuration file %s not found", krbFile);
-      dump(krbFile);
-    }
-  }
-
-  /**
-   * Dump a keytab: list all principals.
-   * @param keytabFile the keytab file
-   * @throws IOException IO problems
-   */
-  public void dumpKeytab(File keytabFile) throws IOException {
-    title("Examining keytab %s", keytabFile);
-    File kt = keytabFile.getCanonicalFile();
-    failif(!kt.exists(), CAT_CONFIG, "Keytab not found: %s", kt);
-    failif(!kt.isFile(), CAT_CONFIG, "Keytab is not a valid file: %s", kt);
-
-    String[] names = getPrincipalNames(keytabFile.getCanonicalPath(),
-        Pattern.compile(".*"));
-    println("keytab entry count: %d", names.length);
-    for (String name : names) {
-      println("    %s", name);
-    }
-    println("-----");
-  }
-
-  /**
-   * Log in from a keytab, dump the UGI, validate it, then try and log in 
again.
-   * That second-time login catches JVM/Hadoop compatibility problems.
-   * @throws IOException
-   */
-  private void loginFromKeytab() throws IOException {
-    UserGroupInformation ugi;
-    String identity;
-    if (keytab != null) {
-      File kt = keytab.getCanonicalFile();
-      println("Using keytab %s principal %s", kt, principal);
-      identity = principal;
-
-      failif(StringUtils.isEmpty(principal), CAT_KERBEROS,
-          "No principal defined");
-      ugi = loginUserFromKeytabAndReturnUGI(principal, kt.getPath());
-      dumpUGI(identity, ugi);
-      validateUGI(principal, ugi);
-
-      title("Attempting to log in from keytab again");
-      // package scoped -hence the reason why this class must be in the
-      // hadoop.security package
-      setShouldRenewImmediatelyForTests(true);
-      // attempt a new login
-      ugi.reloginFromKeytab();
-    } else {
-      println("No keytab: logging is as current user");
-    }
-  }
-
-  /**
-   * Dump a UGI.
-   * @param title title of this section
-   * @param ugi UGI to dump
-   * @throws IOException
-   */
-  private void dumpUGI(String title, UserGroupInformation ugi)
-    throws IOException {
-    title(title);
-    println("UGI instance = %s", ugi);
-    println("Has kerberos credentials: %b", ugi.hasKerberosCredentials());
-    println("Authentication method: %s", ugi.getAuthenticationMethod());
-    println("Real Authentication method: %s",
-      ugi.getRealAuthenticationMethod());
-    title("Group names");
-    for (String name : ugi.getGroupNames()) {
-      println(name);
-    }
-    title("Credentials");
-    Credentials credentials = ugi.getCredentials();
-    List<Text> secretKeys = credentials.getAllSecretKeys();
-    title("Secret keys");
-    if (!secretKeys.isEmpty()) {
-      for (Text secret: secretKeys) {
-        println("%s", secret);
-      }
-    } else {
-      println("(none)");
-    }
-
-    dumpTokens(ugi);
-  }
-
-  /**
-   * Validate the UGI: verify it is kerberized.
-   * @param messagePrefix message in exceptions
-   * @param user user to validate
-   */
-  private void validateUGI(String messagePrefix, UserGroupInformation user) {
-    failif(!user.hasKerberosCredentials(),
-        CAT_LOGIN, "%s: No kerberos credentials for %s", messagePrefix, user);
-    failif(user.getAuthenticationMethod() == null,
-        CAT_LOGIN, "%s: Null AuthenticationMethod for %s", messagePrefix, 
user);
-  }
-
-  /**
-   * A cursory look at the {@code kinit} executable.
-   * If it is an absolute path: it must exist with a size > 0.
-   * If it is just a command, it has to be on the path. There's no check
-   * for that -but the PATH is printed out.
-   */
-  private void validateKinitExecutable() {
-    String kinit = conf.getTrimmed(KERBEROS_KINIT_COMMAND, "");
-    if (!kinit.isEmpty()) {
-      File kinitPath = new File(kinit);
-      println("%s = %s", KERBEROS_KINIT_COMMAND, kinitPath);
-      if (kinitPath.isAbsolute()) {
-        failif(!kinitPath.exists(), CAT_KERBEROS,
-            "%s executable does not exist: %s",
-            KERBEROS_KINIT_COMMAND, kinitPath);
-        failif(!kinitPath.isFile(), CAT_KERBEROS,
-            "%s path does not refer to a file: %s",
-            KERBEROS_KINIT_COMMAND, kinitPath);
-        failif(kinitPath.length() == 0, CAT_KERBEROS,
-            "%s file is empty: %s",
-            KERBEROS_KINIT_COMMAND, kinitPath);
-      } else {
-        println("Executable %s is relative -must be on the PATH", kinit);
-        printEnv("PATH");
-      }
-    }
-  }
-
-  /**
-   * Try to load the SASL resolver.
-   * @param saslPropsResolverKey key for the SASL resolver
-   */
-  private void validateSasl(String saslPropsResolverKey) {
-    title("Resolving SASL property %s", saslPropsResolverKey);
-    String saslPropsResolver = conf.getTrimmed(saslPropsResolverKey);
-    try {
-      Class<? extends SaslPropertiesResolver> resolverClass = conf.getClass(
-          saslPropsResolverKey,
-          SaslPropertiesResolver.class, SaslPropertiesResolver.class);
-      println("Resolver is %s", resolverClass);
-    } catch (RuntimeException e) {
-      throw new KerberosDiagsFailure(CAT_SASL, e,
-          "Failed to load %s class %s",
-          saslPropsResolverKey, saslPropsResolver);
-    }
-  }
-
-  /**
-   * Validate any JAAS entry referenced in the {@link #SUN_SECURITY_JAAS_FILE}
-   * property.
-   */
-  private void validateJAAS() {
-    String jaasFilename = System.getProperty(SUN_SECURITY_JAAS_FILE);
-    if (jaasFilename != null) {
-      title("JAAS");
-      File jaasFile = new File(jaasFilename);
-      println("JAAS file is defined in %s: %s",
-          SUN_SECURITY_JAAS_FILE, jaasFile);
-      failif(!jaasFile.exists(), CAT_JAAS,
-          "JAAS file does not exist: %s", jaasFile);
-      failif(!jaasFile.isFile(), CAT_JAAS,
-          "Specified JAAS file is not a file: %s", jaasFile);
-    }
-  }
-
-  /**
-   * Dump all tokens of a user
-   * @param user user
-   */
-  public void dumpTokens(UserGroupInformation user) {
-    Collection<Token<? extends TokenIdentifier>> tokens
-      = user.getCredentials().getAllTokens();
-    title("Token Count: %d", tokens.size());
-    for (Token<? extends TokenIdentifier> token : tokens) {
-      println("Token %s", token.getKind());
-    }
-  }
-
-  /**
-   * Set the System property to true; return the old value for caching
-   * @param sysprop property
-   * @return the previous value
-   */
-  private boolean getAndSet(String sysprop) {
-    boolean old = Boolean.getBoolean(sysprop);
-    System.setProperty(sysprop, "true");
-    return old;
-  }
-
-  /**
-   * Flush all active output channels, including {@Code System.err},
-   * so as to stay in sync with any JRE log messages.
-   */
-  private void flush() {
-    if (out != null) {
-      out.flush();
-    } else {
-      System.out.flush();
-    }
-    System.err.flush();
-  }
-
-  /**
-   * Format and print a line of output.
-   * This goes to any output file, or
-   * is logged at info. The output is flushed before and after, to
-   * try and stay in sync with JRE logging.
-   * @param format format string
-   * @param args any arguments
-   */
-  @VisibleForTesting
-  public void println(String format, Object... args) {
-    println(format(format, args));
-  }
-
-  /**
-   * Print a line of output. This goes to any output file, or
-   * is logged at info. The output is flushed before and after, to
-   * try and stay in sync with JRE logging.
-   * @param msg message string
-   */
-  @VisibleForTesting
-  private void println(String msg) {
-    flush();
-    if (out != null) {
-      out.println(msg);
-    } else {
-      LOG.info(msg);
-    }
-    flush();
-  }
-
-  /**
-   * Print a title entry
-   * @param format format string
-   * @param args any arguments
-   */
-  private void title(String format, Object... args) {
-    println("");
-    println("");
-    String msg = "== " + format(format, args) + " ==";
-    println(msg);
-    println("");
-  }
-
-  /**
-   * Print a system property, or {@link #UNSET} if unset.
-   * @param property property to print
-   */
-  private void printSysprop(String property) {
-    println("%s = \"%s\"", property,
-        System.getProperty(property, UNSET));
-  }
-
-  /**
-   * Print a configuration option, or {@link #UNSET} if unset.
-   * @param option option to print
-   */
-  private void printConfOpt(String option) {
-    println("%s = \"%s\"", option, conf.get(option, UNSET));
-  }
-
-  /**
-   * Print an environment variable's name and value; printing
-   * {@link #UNSET} if it is not set
-   * @param variable environment variable
-   */
-  private void printEnv(String variable) {
-    String env = System.getenv(variable);
-    println("%s = \"%s\"", variable, env != null ? env : UNSET);
-  }
-
-  /**
-   * Dump any file to standard out; add a trailing newline
-   * @param file file to dump
-   * @throws IOException IO problems
-   */
-  public void dump(File file) throws IOException {
-    try (FileInputStream in = new FileInputStream(file)) {
-      for (String line : IOUtils.readLines(in)) {
-        println("%s", line);
-      }
-    }
-    println("");
-  }
-
-  /**
-   * Format and raise a failure
-   *
-   * @param category category for exception
-   * @param message string formatting message
-   * @param args any arguments for the formatting
-   * @throws KerberosDiagsFailure containing the formatted text
-   */
-  private void fail(String category, String message, Object... args)
-    throws KerberosDiagsFailure {
-    throw new KerberosDiagsFailure(category, message, args);
-  }
-
-  /**
-   * Conditional failure with string formatted arguments
-   * @param condition failure condition
-   * @param category category for exception
-   * @param message string formatting message
-   * @param args any arguments for the formatting
-   * @throws KerberosDiagsFailure containing the formatted text
-   *         if the condition was met
-   */
-  private void failif(boolean condition,
-      String category,
-      String message,
-      Object... args)
-    throws KerberosDiagsFailure {
-    if (condition) {
-      fail(category, message, args);
-    }
-  }
-
-  /**
-   * Format a string, treating a call where there are no varags values
-   * as a string to pass through unformatted.
-   * @param message message, which is either a format string + args, or
-   * a general string
-   * @param args argument array
-   * @return a string for printing.
-   */
-  public static String format(String message, Object... args) {
-    if (args.length == 0) {
-      return message;
-    } else {
-      return String.format(message, args);
-    }
-  }
-
-  /**
-   * Diagnostics failures return the exit code 41, "unauthorized".
-   *
-   * They have a category, initially for testing: the category can be
-   * validated without having to match on the entire string.
-   */
-  public static class KerberosDiagsFailure extends ExitUtil.ExitException {
-    private final String category;
-
-    public KerberosDiagsFailure(String category, String message) {
-      super(41, category + ": " + message);
-      this.category = category;
-    }
-
-    public KerberosDiagsFailure(String category, String message, Object... 
args) {
-      this(category, format(message, args));
-    }
-
-    public KerberosDiagsFailure(String category, Throwable throwable,
-        String message, Object... args) {
-      this(category, message, args);
-      initCause(throwable);
-    }
-
-    public String getCategory() {
-      return category;
-    }
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to