http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/agent/conf/command_template.json
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/agent/conf/command_template.json
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/agent/conf/command_template.json
deleted file mode 100644
index da06c13..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/agent/conf/command_template.json
+++ /dev/null
@@ -1,168 +0,0 @@
-{
-  "roleCommand": "{{COMMAND}}",
-  "clusterName": "{{CLUSTER_NAME}}",
-  "hostname": "{{HOST_NAME}}",
-  "hostLevelParams": {
-    "java_home": "/usr/jdk64/jdk1.7.0_45"
-  },
-  "commandType": "EXECUTION_COMMAND",
-  "roleParams": {},
-  "serviceName": "{{SERVICE_NAME}}",
-  "role": "{{ROLE_NAME}}",
-  "commandParams": {},
-  "taskId": "{{TASK_ID}}",
-  "public_hostname": "{{HOST_NAME}}",
-  "configurations": {
-    "hbase-log4j": {
-      "log4j.threshold": "ALL",
-      "log4j.rootLogger": "${hbase.root.logger}",
-      "log4j.logger.org.apache.zookeeper": "INFO",
-      "log4j.logger.org.apache.hadoop.hbase": "DEBUG",
-      "log4j.logger.org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher": 
"INFO",
-      "log4j.logger.org.apache.hadoop.hbase.zookeeper.ZKUtil": "INFO",
-      "log4j.category.SecurityLogger": "${hbase.security.logger}",
-      "log4j.appender.console": "org.apache.log4j.ConsoleAppender",
-      "log4j.appender.console.target": "System.err",
-      "log4j.appender.console.layout": "org.apache.log4j.PatternLayout",
-      "log4j.appender.console.layout.ConversionPattern": "%d{ISO8601} %-5p 
[%t] %c{2}: %m%n",
-      "log4j.appender.RFAS": "org.apache.log4j.RollingFileAppender",
-      "log4j.appender.RFAS.layout": "org.apache.log4j.PatternLayout",
-      "log4j.appender.RFAS.layout.ConversionPattern": "%d{ISO8601} %p %c: 
%m%n",
-      "log4j.appender.RFAS.MaxFileSize": "${hbase.security.log.maxfilesize}",
-      "log4j.appender.RFAS.MaxBackupIndex": 
"${hbase.security.log.maxbackupindex}",
-      "log4j.appender.RFAS.File": 
"${hbase.log.dir}/${hbase.security.log.file}",
-      "log4j.appender.RFA": "org.apache.log4j.RollingFileAppender",
-      "log4j.appender.RFA.layout": "org.apache.log4j.PatternLayout",
-      "log4j.appender.RFA.layout.ConversionPattern": "%d{ISO8601} %-5p [%t] 
%c{2}: %m%n",
-      "log4j.appender.RFA.MaxFileSize": "${hbase.log.maxfilesize}",
-      "log4j.appender.RFA.MaxBackupIndex": "${hbase.log.maxbackupindex}",
-      "log4j.appender.RFA.File": "${hbase.log.dir}/${hbase.log.file}",
-      "log4j.appender.NullAppender": "org.apache.log4j.varia.NullAppender",
-      "log4j.appender.DRFA": "org.apache.log4j.DailyRollingFileAppender",
-      "log4j.appender.DRFA.layout": "org.apache.log4j.PatternLayout",
-      "log4j.appender.DRFA.layout.ConversionPattern": "%d{ISO8601} %-5p [%t] 
%c{2}: %m%n",
-      "log4j.appender.DRFA.File": "${hbase.log.dir}/${hbase.log.file}",
-      "log4j.appender.DRFA.DatePattern": ".yyyy-MM-dd",
-      "log4j.additivity.SecurityLogger": "false",
-      "hbase.security.logger": "INFO,console",
-      "hbase.security.log.maxfilesize": "256MB",
-      "hbase.security.log.maxbackupindex": "20",
-      "hbase.security.log.file": "SecurityAuth.audit",
-      "hbase.root.logger": "INFO,console",
-      "hbase.log.maxfilesize": "256MB",
-      "hbase.log.maxbackupindex": "20",
-      "hbase.log.file": "hbase.log",
-      "hbase.log.dir": "."
-    },
-    "global": {
-      "hbase_root": "{{HBASE_HOME}}",
-      "hbase_pid_dir": "{{PID_DIR}}",
-      "proxyuser_group": "users",
-      "syncLimit": "5",
-      "hbase_regionserver_heapsize": "{{REGION_SERVER_HEAP_SIZE}}",
-      "rca_enabled": "false",
-      "tickTime": "2000",
-      "hbase_master_heapsize": "{{MASTER_HEAP_SIZE}}",
-      "initLimit": "10",
-      "user_group": "{{GROUP_NAME}}",
-      "hbase_user": "{{USER_NAME}}",
-      "hbase_log_dir": "{{LOG_DIR}}"
-    },
-    "hdfs-site": {
-      "dfs.namenode.checkpoint.period": "21600",
-      "dfs.namenode.avoid.write.stale.datanode": "true",
-      "dfs.namenode.checkpoint.txns": "1000000",
-      "dfs.block.access.token.enable": "true",
-      "dfs.support.append": "true",
-      "dfs.datanode.address": "0.0.0.0:${ambari.dfs.datanode.port}",
-      "dfs.cluster.administrators": " hdfs",
-      "dfs.replication": "3",
-      "ambari.dfs.datanode.http.port": "50075",
-      "dfs.datanode.balance.bandwidthPerSec": "6250000",
-      "dfs.namenode.safemode.threshold-pct": "1.0f",
-      "dfs.namenode.checkpoint.edits.dir": "${dfs.namenode.checkpoint.dir}",
-      "dfs.permissions.enabled": "true",
-      "dfs.client.read.shortcircuit": "true",
-      "dfs.namenode.https-address": "{{NAMENODE_HTTPS_ADDRESS}}",
-      "dfs.journalnode.edits.dir": "/grid/0/hdfs/journal",
-      "dfs.blocksize": "134217728",
-      "dfs.datanode.max.transfer.threads": "1024",
-      "dfs.datanode.du.reserved": "1073741824",
-      "dfs.webhdfs.enabled": "true",
-      "dfs.namenode.handler.count": "100",
-      "dfs.namenode.checkpoint.dir": "/hadoop/hdfs/namesecondary",
-      "fs.permissions.umask-mode": "022",
-      "dfs.datanode.http.address": "0.0.0.0:${ambari.dfs.datanode.http.port}",
-      "dfs.datanode.ipc.address": "0.0.0.0:8010",
-      "dfs.datanode.data.dir": "/hadoop/hdfs/data",
-      "dfs.namenode.http-address": "{{NAMENODE_HTTP_ADDRESS}}",
-      "dfs.blockreport.initialDelay": "120",
-      "dfs.datanode.failed.volumes.tolerated": "0",
-      "dfs.namenode.accesstime.precision": "0",
-      "ambari.dfs.datanode.port": "50010",
-      "dfs.namenode.avoid.read.stale.datanode": "true",
-      "dfs.namenode.secondary.http-address": "c6402.ambari.apache.org:50090",
-      "dfs.namenode.stale.datanode.interval": "30000",
-      "dfs.heartbeat.interval": "3",
-      "dfs.client.read.shortcircuit.streams.cache.size": "4096",
-      "dfs.permissions.superusergroup": "hdfs",
-      "dfs.https.port": "50470",
-      "dfs.journalnode.http-address": "0.0.0.0:8480",
-      "dfs.domain.socket.path": "/var/lib/hadoop-hdfs/dn_socket",
-      "dfs.namenode.write.stale.datanode.ratio": "1.0f",
-      "dfs.hosts.exclude": "/etc/hadoop/conf/dfs.exclude",
-      "dfs.datanode.data.dir.perm": "750",
-      "dfs.namenode.name.dir.restore": "true",
-      "dfs.replication.max": "50",
-      "dfs.namenode.name.dir": "/hadoop/hdfs/namenode"
-    },
-    "hbase-site": {
-      "hbase.hstore.flush.retries.number": "120",
-      "hbase.client.keyvalue.maxsize": "10485760",
-      "hbase.hstore.compactionThreshold": "3",
-      "hbase.rootdir": "{{HBASE_ROOT_DIR}}",
-      "hbase.stagingdir": "{{HBASE_STAGING_DIR}}",
-      "hbase.regionserver.handler.count": "60",
-      "hbase.regionserver.global.memstore.lowerLimit": "0.38",
-      "hbase.hregion.memstore.block.multiplier": "2",
-      "hbase.hregion.memstore.flush.size": "134217728",
-      "hbase.superuser": "{{HBASE_SUPERUSER}}",
-      "hbase.zookeeper.property.clientPort": "{{ZK_CLIENT_PORT}}",
-      "hbase.regionserver.global.memstore.upperLimit": "0.4",
-      "zookeeper.session.timeout": "30000",
-      "hbase.tmp.dir": "/hadoop/hbase",
-      "hbase.hregion.max.filesize": "10737418240",
-      "hfile.block.cache.size": "0.40",
-      "hbase.security.authentication": "simple",
-      "hbase.defaults.for.version.skip": "true",
-      "hbase.zookeeper.quorum": "{{ZK_HOSTS}}",
-      "zookeeper.znode.parent": "{{ZK_NODE_PARENT}}",
-      "hbase.hstore.blockingStoreFiles": "10",
-      "hbase.hregion.majorcompaction": "86400000",
-      "hbase.security.authorization": "false",
-      "hbase.cluster.distributed": "true",
-      "hbase.hregion.memstore.mslab.enabled": "true",
-      "hbase.client.scanner.caching": "100",
-      "hbase.zookeeper.useMulti": "true",
-      "hbase.regionserver.info.port": "{{REGION_SERVER_INFO_PORT}}",
-      "hbase.master.info.port": "{{MASTER_INFO_PORT}}"
-    },
-    "core-site": {
-      "io.serializations": 
"org.apache.hadoop.io.serializer.WritableSerialization",
-      "gluster.daemon.user": "null",
-      "fs.trash.interval": "360",
-      "hadoop.security.authentication": "simple",
-      "io.compression.codecs": 
"org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.DefaultCodec",
-      "mapreduce.jobtracker.webinterface.trusted": "false",
-      "fs.AbstractFileSystem.glusterfs.impl": "null",
-      "fs.defaultFS": "{{DEFAULT_FS}}",
-      "ipc.client.connect.max.retries": "50",
-      "ipc.client.idlethreshold": "8000",
-      "io.file.buffer.size": "131072",
-      "hadoop.security.authorization": "false",
-      "hadoop.security.auth_to_local": "\n        
RULE:[2:$1@$0]([rn]m@.*)s/.*/yarn/\n        
RULE:[2:$1@$0](jhs@.*)s/.*/mapred/\n        
RULE:[2:$1@$0]([nd]n@.*)s/.*/hdfs/\n        RULE:[2:$1@$0](hm@.*)s/.*/hbase/\n  
      RULE:[2:$1@$0](rs@.*)s/.*/hbase/\n        DEFAULT",
-      "ipc.client.connection.maxidletime": "30000"
-    }
-  },
-  "commandId": "{{COMMAND_ID}}"
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/agent/role-node.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/agent/role-node.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/agent/role-node.xml
deleted file mode 100644
index aff1e05..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/agent/role-node.xml
+++ /dev/null
@@ -1,65 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~  or more contributor license agreements.  See the NOTICE file
-  ~  distributed with this work for additional information
-  ~  regarding copyright ownership.  The ASF licenses this file
-  ~  to you under the Apache License, Version 2.0 (the
-  ~  "License"); you may not use this file except in compliance
-  ~  with the License.  You may obtain a copy of the License at
-  ~
-  ~       http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~  Unless required by applicable law or agreed to in writing, software
-  ~  distributed under the License is distributed on an "AS IS" BASIS,
-  ~  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~  See the License for the specific language governing permissions and
-  ~  limitations under the License.
-  -->
-
-  <!--
-  Role options for an agent-managed node
-  -->
-<configuration>
-  <property>
-    <name>role.name</name>
-    <value>node</value>
-  </property>
-  
-  <property>
-    <name>role.instances</name>
-    <value>1</value>
-  </property>
-    
-  <property>
-    <name>role.priority</name>
-    <value>1</value>
-  </property>
-      
-  <property>
-    <name>role.placement.policy</name>
-    <value>2</value>
-  </property>
-  
-  <property>
-    <name>yarn.memory</name>
-    <value>256</value>
-  </property>
-  
-  <property>
-    <name>yarn.vcores</name>
-    <value>1</value>
-  </property>
-  
-  <property>
-    <name>jvm.heapsize</name>
-    <value>256M</value>
-  </property>
-  
-  <property>
-    <name>env.MALLOC_ARENA_MAX</name>
-    <value>4</value>
-  </property>
-
-</configuration>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/dynamic/application.properties
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/dynamic/application.properties
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/dynamic/application.properties
deleted file mode 100644
index d9b42de..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/dynamic/application.properties
+++ /dev/null
@@ -1,25 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-#  
-#       http://www.apache.org/licenses/LICENSE-2.0
-#  
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-# gets updated at build time
-application.name=${pom.name}
-application.version=${pom.version}
-application.build=${buildNumber}
-application.build.java.version=${java.version}
-application.build.user=${user.name}
-application.build.info=${pom.name}-${pom.version} Built against commit# 
${buildNumber} on Java ${java.version} by ${user.name}
-hadoop.build.info=${hadoop.version}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/appconf.json
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/appconf.json
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/appconf.json
deleted file mode 100644
index 81239a2..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/appconf.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
-  "schema": "http://example.org/specification/v2.0.0";,
-
-  "metadata": {
-
-
-  },
-
-  "global": {
-    "env.MALLOC_ARENA_MAX": "4"
-  },
-
-  "components": {
-    "slider-appmaster" : {
-      "jvm.heapsize": "256M"
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/internal.json
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/internal.json
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/internal.json
deleted file mode 100644
index 2367d8f..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/internal.json
+++ /dev/null
@@ -1,17 +0,0 @@
-{
-  "schema": "http://example.org/specification/v2.0.0";,
-
-  "metadata": {
-  },
-
-  "global": {
-    "internal.container.failure.shortlife": "60000",
-    "internal.container.failure.threshold": "5",
-    "slider.cluster.directory.permissions": "0770",
-    "slider.data.directory.permissions": "0770"
-  },
-
-  "components": {
-
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/resources.json
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/resources.json
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/resources.json
deleted file mode 100644
index 478ab7e..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/org/apache/slider/providers/slideram/instance/resources.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
-  "schema": "http://example.org/specification/v2.0.0";,
-
-  "metadata": {
- 
-  },
-
-  "global": {
-  },
-
-  "components": {
-    "slider-appmaster": {
-      "yarn.component.instances": "1",
-      "yarn.vcores": "1",
-      "yarn.memory": "1024"
-    }
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/webapps/slideram/.keep
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/webapps/slideram/.keep
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/resources/webapps/slideram/.keep
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/scripts/slider_keytabs.sh
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/scripts/slider_keytabs.sh
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/scripts/slider_keytabs.sh
deleted file mode 100644
index f0a8fc2..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/scripts/slider_keytabs.sh
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# This script exists to create the keytab set for a node on the cluster
-# including hbase and ZK alongside then YARN cores.
-
-# usage
-# keytabs <realm> <hostname>
-# validate the args
-
-num_vars=$#
-if [[ $num_vars < 2 ]]
-then
-  echo "Usage: $0 <realm> <hostname>"
-  exit -2
-fi
-
-realm="$1"
-hostname="$2"
-dest="."
-
-kadmin=kadmin.local
-
-${kadmin} <<EOF
-addprinc -randkey hdfs/${hostname}@${realm}
-addprinc -randkey yarn/${hostname}@${realm}
-addprinc -randkey HTTP/${hostname}@${realm}
-addprinc -randkey hbase/${hostname}@${realm}
-addprinc -randkey zookeeper/${hostname}@${realm}
-
-ktadd -norandkey -k ${dest}/hdfs.keytab  \
-  hdfs/${hostname}@${realm} \
-  HTTP/${hostname}@${realm}
-
-ktadd -norandkey -k ${dest}/yarn.keytab  \
-  yarn/${hostname}@${realm} \
-  HTTP/${hostname}@${realm}
-
-ktadd -norandkey -k ${dest}/hbase.keytab  \
-  hbase/${hostname}@${realm} 
-
-ktadd -norandkey -k ${dest}/zookeeper.keytab  \
-  zookeeper/${hostname}@${realm} 
-EOF
-
-exitcode=$?
-if  [[ $exitcode != 0 ]]
-then
-  echo "keytab generation from ${kadmin} failed with exit code $exitcode"
-  exit $exitcode
-else
-  echo "keytab files for ${hostname}@${realm} created"
-fi

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/scripts/yarnservice.py
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/scripts/yarnservice.py
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/scripts/yarnservice.py
deleted file mode 100644
index 1208c28..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/scripts/yarnservice.py
+++ /dev/null
@@ -1,383 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-"""Launches a yarn service
-
-WORK IN PROGRESS, IGNORE
-
-This is as work in progress project to build as new launcher script for
-any Hadoop service
-A key feature here is that the configs are defined in JSON files -
-files that are read in the order passed down, and merged into each other.
-
-The final merged file is used to define the java command to execute
--and hadoop XML files.
-
-
-It uses a JSON config file 
-  --jfile configuration file (JSON format)
-  -class classname
-  -Dname=value -arbitrary value to pass down to the JVM
-  --java: any JVM arg
-  -javaX: javaX value
-
-
- after an -- , all following commands are passed straight down to the invoked 
process.
-  # -xJ name=value JVM options. No: this is just another param
-  -xF file  file to load next. Files are loaded in order. 
-  -xD name=value again, values are loaded in order
-  -xU undefine
-  -xX main class, 'eXecute'
-
-  --  end of arguments
-  
-
-"""
-
-import sys
-# see : http://simplejson.readthedocs.org/en/latest/
-# and install w/ easy_install simplejson
-import simplejson
-
-KEY_JFILE = "-xF"
-KEY_DEF = "-xD"
-KEY_UNDEF = "-xU"
-KEY_EXEC = "-xX"
-KEY_ARGS = "--"
-
-COMMANDS = [KEY_JFILE, KEY_DEF, KEY_EXEC]
-
-#
-
-def debug(string) :
-  print string
-
-
-def pop_required_arg(arglist, previousArg) :
-  """
-  Pop the first element off the list and return it.
-  If the list is empty, raise an exception about a missing argument after the 
$previousArgument
-  """
-  if not len(arglist) :
-    raise Exception, "Missing required parameter after %s" % previousArg
-  head = arglist[0]
-  del arglist[0]
-  return head
-
-
-def parse_one_jfile(filename) :
-  """
-  read in the given config file
-  """
-  parsed = simplejson.load(open(filename, "r"))
-  return parsed
-
-# hand down sys.argv:
-def extract_jfiles(args) :
-  """ takes a list of arg strings and separates them into jfile references
-  and other arguments.
-  """
-  l = len(args)
-  stripped = []
-  jfiles = []
-  index = 0
-  while index < l :
-    elt = args[index]
-    index += 1
-    if KEY_JFILE == elt :
-      # a match
-      if index == l :
-        #overshoot
-        raise Exception("Missing filename after " + KEY_JFILE)
-      filename = args[index]
-      debug("jfile " + filename)
-      jfiles.append(filename)
-      index += 1
-    else :
-      stripped.append(elt)
-  return jfiles, stripped
-
-
-def extract_args(args) :
-  """
-  Take a list of args, parse them or fail, generating a dictionary of actions
-  Return: dictionary and all leftover arguments
-  """
-  jfiles = []
-  execs = []
-  defs = []
-  remainder = []
-  while len(args) :
-    # the next call cannot fail, because of the len(args)
-    arg = pop_required_arg(args, "")
-    if KEY_JFILE == arg :
-      jfiles.append(pop_required_arg(args, KEY_JFILE))
-    elif KEY_DEF == arg :
-      defs.append((KEY_DEF, pop_required_arg(args, KEY_DEF)))
-    elif KEY_UNDEF == arg :
-      defs.append((KEY_UNDEF, pop_required_arg(args, KEY_UNDEF)))
-    elif KEY_EXEC == arg :
-      execs.append(pop_required_arg(args, KEY_EXEC))
-    elif KEY_ARGS == arg :
-      remainder += args
-      args = []
-    else :
-      remainder.append(arg)
-      #build the action list
-  actions = {
-    KEY_JFILE : jfiles,
-    KEY_EXEC : execs,
-    KEY_DEF : defs,
-    KEY_ARGS : remainder
-  }
-  #end of the run, there's a dictionary and a list of unparsed values
-  return actions
-
-
-def get(conf, key, defVal) :
-  if conf.has_key(key) :
-    return conf[key]
-  else :
-    return defVal
-
-
-def merge_json(conf, json) :
-  """ merge in a json dict with the existing one
-  in: configuration dict, json dict
-  out: configuration'
-  """
-  for (key, val) in json.items() :
-    if key in conf :
-      #there's a match, do a more detailed merge
-      oldval = conf[key]
-      if type(oldval) == dict and type(val) == dict :
-      # two dictionary instances -merge
-        merge_json(oldval, val)
-      else :
-        conf[key] = val
-    else :
-      conf[key] = val
-  return conf
-
-
-def merge_jfile(conf, filename) :
-  json = parse_one_jfile(filename)
-  return merge_json(conf, json)
-
-
-def merge_jfile_list(conf, jfiles) :
-  """ merge a list of jfiles on top of a conf dict
-  """
-  for jfile in jfiles :
-    conf = merge_jfile(conf, jfile)
-  return conf
-
-
-def split_to_keyval_tuple(param) :
-  """
-  Split a key=value string into the (key,value) tuple
-  * an exception is raised on any string "=value"
-  * if there is no string: exception.
-  * a key only definition maps to (key, None)
-  * a "key=" definition maps to (key, "")
-  """
-  if not len(param) :
-    raise Exception, "Empty string cannot be a key=value definition"
-  equalsPos = param.find("=")
-  if equalsPos < 0 :
-    return param, None
-  elif not equalsPos :
-    raise Exception, "no key in argument %s" % param
-  else :
-    key = param[:(equalsPos - 1)]
-    value = param[(equalsPos + 1) :]
-    return key, value
-
-
-def recursive_define(conf, path, value) :
-  if not len(path) :
-    #fallen off the end of the world
-    return
-  entry = path[0]
-  if len(path) == 1 :
-    #end of list, apply it.
-    conf[entry] = value
-  else :
-    #there's 1+ elements below, yet there's a subdir here.
-    if conf.has_key(entry) and type(conf[entry]) == dict :
-      #it's a subdir, simple: recurse.
-      recursive_define(conf[entry], path[1 :], value)
-    else :
-      #either there is an entry that isn't a conf, or its not there. Same 
outcome.
-      subconf = {}
-      conf[entry] = subconf
-      recursive_define(subconf, path[1 :], value)
-
-def recursive_undef(conf, path) :
-  if not len(path) :
-    #fallen off the end of the world
-    return
-  entry = path[0]
-  if len(path) == 1 :
-    #end of list, apply it.
-    del conf[entry]
-  else :
-    #there's 1+ elements below, yet there's a subdir here.
-    if conf.has_key(entry) and type(conf[entry]) == dict :
-      #it's a subdir, simple: recurse.
-      recursive_undef(conf[entry], path[1 :])
-    else :
-      #either there is an entry that isn't a conf, or its not there. Same 
outcome.
-      pass
-
-def apply_action(conf, action, key, value) :
-  """
-  Apply either a def or undef action; splitting the key into a path and 
running through it.
-  """
-  keypath = key.split("/")
-  #now have a split key,
-  if KEY_DEF == action :
-    recursive_define(conf, keypath, value)
-  elif KEY_UNDEF == action :
-    recursive_undef(conf, keypath)
-
-
-def apply_local_definitions(conf, definitions) :
-  """
-  Run through the definition actions and apply them one by one
-  """
-  for defn in definitions :
-    # split into key=value; no value -> empty string
-    (action, param) = defn
-    if KEY_DEF == action :
-      (key, val) = split_to_keyval_tuple(param)
-      apply_action(conf, KEY_DEF, key, val)
-
-  return conf
-
-
-#def parse_args(conf, args) :
-#  """
-#   split an arg string, parse the jfiles & merge over the conf
-#  (configuration, args[]) -> (conf', stripped, jfiles[])
-#  """
-#  (jfiles, stripped) = extract_jfiles(args)
-#
-#  actions = extract_args(args)
-#  jfiles = actions[KEY_JFILE]
-#  conf = merge_jfile_list(conf, jfiles)
-#  return conf, actions
-
-
-def print_conf(conf) :
-  """ dump the configuration to the console
-  """
-  print "{"
-  for (key, val) in conf.items() :
-    if type(val) == dict :
-      print key
-      print_conf(val)
-    else :
-      print "" + key + " => " + str(val)
-  print "}"
-
-
-def list_to_str(l, spacer) :
-  result = ""
-  for elt in l :
-    if len(result) > 0 :
-      result += spacer
-    result += elt
-  return result
-
-
-def list_to_hxml_str(l) :
-  return list_to_str(l, ",")
-
-
-def export_kv_xml(output, key, value) :
-  line = "<property><name>" + key + "</name><value>" + str(value) + 
"</value>\n"
-  print line
-  output.write(line)
-
-
-def export_to_hadoop_xml(output, conf) :
-  """ export the conf to hadoop XML
-  dictionaries are skipped.
-  """
-  output.write("<configuration>\n")
-  for (key, value) in conf.items() :
-    if type(value) is list :
-      # list print
-      export_kv_xml(output, key, list_to_hxml_str(value))
-    else :
-      if type(value) is dict :
-        print "skipping dict " + key
-      else :
-        export_kv_xml(output, key, value)
-  output.write("</configuration>\n")
-
-
-def start(conf, stripped_args) :
-  """
-  start the process by grabbing exec/args for the arguments
-  """
-  ex = conf["exec"]
-  args = []
-  jsonargs = get(ex, "args", [])
-  args.extend(jsonargs)
-  args.extend(stripped_args)
-  classname = get(ex, "classname", "")
-  if not len(classname) :
-    raise Exception, "No classname supplied"
-  classname = get(ex, "classname", "")
-  commandline = ["java"]
-  classpath = []
-  jvmargs = []
-  commandline.extend(jvmargs)
-  commandline.append("-classpath")
-  commandline.append(list_to_str(classpath, ":"))
-  commandline.append("org.apache.hadoop.yarn.service.launcher.ServiceLauncher")
-  commandline.append(classname)
-  commandline.extend(args)
-  print "ready to exec : %s" % commandline
-
-
-def main() :
-#  (conf, stripped, jfiles) = parse_args({}, sys.argv[1 :])
-  actions = extract_args(sys.argv[1 :])
-  jfiles = actions[KEY_JFILE]
-  conf = merge_jfile_list({}, jfiles)
-  apply_local_definitions(conf, actions[KEY_DEF])
-  exec_args = actions[KEY_ARGS]
-
-  print_conf(conf)
-  #  if len(stripped) > 0 :
-  #got an output file
-  #    filename = stripped[0]
-  #    print "Writing XML configuration to " + filename
-  #    output = open(filename, "w")
-  #    export_to_hadoop_xml(output, conf["site"])
-  start(conf, exec_args)
-
-
-if __name__ == "__main__" :
-  main()
-
-

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/site/site.xml
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/site/site.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/site/site.xml
deleted file mode 100644
index 3b5df7a..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/site/site.xml
+++ /dev/null
@@ -1,26 +0,0 @@
-<?xml version="1.0"?>
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~  or more contributor license agreements.  See the NOTICE file
-  ~  distributed with this work for additional information
-  ~  regarding copyright ownership.  The ASF licenses this file
-  ~  to you under the Apache License, Version 2.0 (the
-  ~  "License"); you may not use this file except in compliance
-  ~  with the License.  You may obtain a copy of the License at
-  ~
-  ~       http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~  Unless required by applicable law or agreed to in writing, software
-  ~  distributed under the License is distributed on an "AS IS" BASIS,
-  ~  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~  See the License for the specific language governing permissions and
-  ~  limitations under the License.
-  -->
-
-<project name="Slider">
-
-  <version position="right"/>
-  <body>
-    <menu ref="reports"/>
-  </body>
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/MockServiceAM.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/MockServiceAM.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/MockServiceAM.java
index 9746d33..4fa81ee 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/MockServiceAM.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/MockServiceAM.java
@@ -38,14 +38,13 @@ import 
org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
 import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
 import org.apache.hadoop.yarn.client.api.impl.AMRMClientImpl;
 import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.proto.*;
 import org.apache.hadoop.yarn.proto.ClientAMProtocol;
+import org.apache.hadoop.yarn.service.api.records.Application;
 import org.apache.hadoop.yarn.service.component.Component;
 import org.apache.hadoop.yarn.service.component.ComponentState;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.core.exceptions.BadClusterStateException;
-import 
org.apache.slider.server.services.yarnregistry.YarnRegistryViewForProviders;
+import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException;
+import org.apache.hadoop.yarn.service.registry.YarnRegistryViewForProviders;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 
 import java.io.IOException;
 import java.util.Collections;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
index ea75a90..73172bf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/ServiceTestUtils.java
@@ -18,12 +18,18 @@
 
 package org.apache.hadoop.yarn.service;
 
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.api.resource.Resource;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.Resource;
+import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
+import org.codehaus.jackson.map.PropertyNamingStrategy;
 
 public class ServiceTestUtils {
 
+  public static final JsonSerDeser<Application> JSON_SER_DESER =
+      new JsonSerDeser<>(Application.class,
+          PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES);
+
   // Example service definition
   // 2 components, each of which has 2 containers.
   protected Application createExampleApplication() {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
index d99e30e..1a22875 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestServiceApiUtil.java
@@ -21,15 +21,14 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.registry.client.api.RegistryConstants;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Artifact;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.api.resource.Resource;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.core.persist.JsonSerDeser;
-import org.apache.slider.util.RestApiConstants;
-import org.apache.slider.util.RestApiErrorMessages;
+import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.Resource;
+import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
 import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -41,15 +40,10 @@ import java.util.Arrays;
 import java.util.Collection;
 import java.util.List;
 
-import static org.apache.slider.util.RestApiConstants.DEFAULT_COMPONENT_NAME;
-import static 
org.apache.slider.util.RestApiConstants.DEFAULT_UNLIMITED_LIFETIME;
-import static org.apache.slider.util.RestApiErrorMessages.*;
-import static 
org.apache.slider.util.RestApiErrorMessages.ERROR_CONTAINERS_COUNT_INVALID;
-import static 
org.apache.slider.util.RestApiErrorMessages.ERROR_RESOURCE_PROFILE_NOT_SUPPORTED_YET;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
+import static 
org.apache.hadoop.yarn.service.conf.RestApiConstants.DEFAULT_COMPONENT_NAME;
+import static 
org.apache.hadoop.yarn.service.conf.RestApiConstants.DEFAULT_UNLIMITED_LIFETIME;
+import static org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages.*;
+import static org.easymock.EasyMock.*;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNotNull;
 
@@ -136,7 +130,7 @@ public class TestServiceApiUtil {
     } catch (IllegalArgumentException e) {
       assertEquals(String.format(
           RestApiErrorMessages.ERROR_RESOURCE_FOR_COMP_INVALID,
-          RestApiConstants.DEFAULT_COMPONENT_NAME), e.getMessage());
+          DEFAULT_COMPONENT_NAME), e.getMessage());
     }
 
     // memory not specified
@@ -148,7 +142,7 @@ public class TestServiceApiUtil {
     } catch (IllegalArgumentException e) {
       assertEquals(String.format(
           RestApiErrorMessages.ERROR_RESOURCE_MEMORY_FOR_COMP_INVALID,
-          RestApiConstants.DEFAULT_COMPONENT_NAME), e.getMessage());
+          DEFAULT_COMPONENT_NAME), e.getMessage());
     }
 
     // invalid no of cpus
@@ -161,7 +155,7 @@ public class TestServiceApiUtil {
     } catch (IllegalArgumentException e) {
       assertEquals(String.format(
           RestApiErrorMessages.ERROR_RESOURCE_CPUS_FOR_COMP_INVALID_RANGE,
-          RestApiConstants.DEFAULT_COMPONENT_NAME), e.getMessage());
+          DEFAULT_COMPONENT_NAME), e.getMessage());
     }
 
     // number of containers not specified
@@ -183,7 +177,7 @@ public class TestServiceApiUtil {
     } catch (IllegalArgumentException e) {
       assertEquals(String.format(RestApiErrorMessages
               .ERROR_RESOURCE_PROFILE_MULTIPLE_VALUES_FOR_COMP_NOT_SUPPORTED,
-          RestApiConstants.DEFAULT_COMPONENT_NAME),
+          DEFAULT_COMPONENT_NAME),
           e.getMessage());
     }
 
@@ -476,8 +470,7 @@ public class TestServiceApiUtil {
   @Test
   public void testInvalidComponent() throws IOException {
     SliderFileSystem sfs = initMock(null);
-    testComponent(sfs, false);
-    testComponent(sfs, true);
+    testComponent(sfs);
   }
 
   @Test
@@ -496,17 +489,15 @@ public class TestServiceApiUtil {
     }
   }
 
-  private static void testComponent(SliderFileSystem sfs, boolean unique)
+  private static void testComponent(SliderFileSystem sfs)
       throws IOException {
     int maxLen = RegistryConstants.MAX_FQDN_LABEL_LENGTH;
-    if (unique) {
-      assertEquals(19, Long.toString(Long.MAX_VALUE).length());
-      maxLen = maxLen - Long.toString(Long.MAX_VALUE).length();
-    }
+    assertEquals(19, Long.toString(Long.MAX_VALUE).length());
+    maxLen = maxLen - Long.toString(Long.MAX_VALUE).length();
+
     String compName = LEN_64_STR.substring(0, maxLen + 1);
     Application app = createValidApplication(null);
-    app.addComponent(createValidComponent(compName).uniqueComponentSupport(
-        unique));
+    app.addComponent(createValidComponent(compName));
 
     // invalid component name fails if dns is enabled
     try {
@@ -526,8 +517,7 @@ public class TestServiceApiUtil {
 
     compName = LEN_64_STR.substring(0, maxLen);
     app = createValidApplication(null);
-    app.addComponent(createValidComponent(compName).uniqueComponentSupport(
-        unique));
+    app.addComponent(createValidComponent(compName));
 
     // does not fail
     try {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
index 28105b2..a36e0b4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/TestYarnNativeServices.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.yarn.service;
 
-import com.google.common.base.Supplier;
 import org.apache.commons.io.FileUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -36,17 +35,16 @@ import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.server.MiniYARNCluster;
-import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.Container;
+import org.apache.hadoop.yarn.service.api.records.ContainerState;
 import org.apache.hadoop.yarn.service.client.ServiceClient;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
 import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree;
-import org.apache.slider.api.InternalKeys;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.api.resource.Container;
-import org.apache.slider.api.resource.ContainerState;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.core.exceptions.SliderException;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -71,8 +69,8 @@ import java.util.concurrent.TimeoutException;
 import static 
org.apache.hadoop.registry.client.api.RegistryConstants.KEY_REGISTRY_ZK_QUORUM;
 import static org.apache.hadoop.yarn.api.records.YarnApplicationState.FINISHED;
 import static org.apache.hadoop.yarn.conf.YarnConfiguration.*;
-import static 
org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys.KEY_AM_RESOURCE_MEM;
-import static 
org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys.KEY_SLIDER_BASE_PATH;
+import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.AM_RESOURCE_MEM;
+import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.YARN_SERVICE_BASE_PATH;
 
 /**
  * End to end tests to test deploying services with MiniYarnCluster and a 
in-JVM
@@ -122,8 +120,8 @@ public class TestYarnNativeServices extends 
ServiceTestUtils{
     conf.setBoolean(TIMELINE_SERVICE_ENABLED, false);
     conf.setInt(YarnConfiguration.NM_MAX_PER_DISK_UTILIZATION_PERCENTAGE, 100);
     conf.setLong(DEBUG_NM_DELETE_DELAY_SEC, 60000);
-    conf.setLong(KEY_AM_RESOURCE_MEM, 526);
-    conf.setLong(InternalKeys.MONITOR_INTERVAL, 5);
+    conf.setLong(AM_RESOURCE_MEM, 526);
+    conf.setLong(YarnServiceConf.READINESS_CHECK_INTERVAL, 5);
     // Disable vmem check to disallow NM killing the container
     conf.setBoolean(NM_VMEM_CHECK_ENABLED, false);
     conf.setBoolean(NM_PMEM_CHECK_ENABLED, false);
@@ -143,7 +141,7 @@ public class TestYarnNativeServices extends 
ServiceTestUtils{
       basedir.mkdirs();
     }
 
-    conf.set(KEY_SLIDER_BASE_PATH, basedir.getAbsolutePath());
+    conf.set(YARN_SERVICE_BASE_PATH, basedir.getAbsolutePath());
 
     if (yarnCluster == null) {
       yarnCluster =
@@ -267,7 +265,7 @@ public class TestYarnNativeServices extends 
ServiceTestUtils{
 
     // stop the service
     LOG.info("Stop the service");
-    client.actionStop(exampleApp.getName());
+    client.actionStop(exampleApp.getName(), true);
     ApplicationReport report = client.getYarnClient()
         .getApplicationReport(ApplicationId.fromString(exampleApp.getId()));
     // AM unregisters with RM successfully
@@ -303,7 +301,7 @@ public class TestYarnNativeServices extends 
ServiceTestUtils{
     // check that containers for compa are launched before containers for compb
     checkContainerLaunchDependencies(client, exampleApp, "compa", "compb");
 
-    client.actionStop(exampleApp.getName());
+    client.actionStop(exampleApp.getName(), true);
     client.actionDestroy(exampleApp.getName());
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestBuildExternalComponents.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestBuildExternalComponents.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestBuildExternalComponents.java
index 4bc9f26..a22c000 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestBuildExternalComponents.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestBuildExternalComponents.java
@@ -20,11 +20,11 @@ package org.apache.hadoop.yarn.service.client;
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.service.api.records.Component;
 import org.apache.hadoop.yarn.service.conf.ExampleAppJson;
-import org.apache.slider.api.resource.Component;
 import org.apache.hadoop.yarn.service.client.params.ClientArgs;
-import org.apache.slider.common.tools.SliderFileSystem;
 import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -37,7 +37,7 @@ import java.util.List;
 import java.util.Set;
 
 import static 
org.apache.hadoop.yarn.service.client.params.Arguments.ARG_APPDEF;
-import static 
org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys.KEY_SLIDER_BASE_PATH;
+import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.YARN_SERVICE_BASE_PATH;
 
 /**
  * Test for building / resolving components of type APPLICATION.
@@ -87,7 +87,7 @@ public class TestBuildExternalComponents {
     } else {
       basedir.mkdirs();
     }
-    conf.set(KEY_SLIDER_BASE_PATH, basedir.getAbsolutePath());
+    conf.set(YARN_SERVICE_BASE_PATH, basedir.getAbsolutePath());
   }
 
   @After

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
index 1f07301..20c06ab 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/client/TestServiceCLI.java
@@ -20,14 +20,15 @@ package org.apache.hadoop.yarn.service.client;
 
 import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.service.ClientAMProtocol;
+import org.apache.hadoop.yarn.service.api.records.Component;
 import org.apache.hadoop.yarn.service.client.params.ClientArgs;
 import org.apache.hadoop.yarn.service.conf.ExampleAppJson;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.common.tools.SliderFileSystem;
 import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -38,7 +39,7 @@ import java.io.IOException;
 import java.util.List;
 
 import static 
org.apache.hadoop.yarn.service.client.params.Arguments.ARG_APPDEF;
-import static 
org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys.KEY_SLIDER_BASE_PATH;
+import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.YARN_SERVICE_BASE_PATH;
 import static org.mockito.Mockito.mock;
 
 public class TestServiceCLI {
@@ -59,7 +60,7 @@ public class TestServiceCLI {
   @Before
   public void setup() throws Throwable {
     basedir = new File("target", "apps");
-    conf.set(KEY_SLIDER_BASE_PATH, basedir.getAbsolutePath());
+    conf.set(YARN_SERVICE_BASE_PATH, basedir.getAbsolutePath());
     fs = new SliderFileSystem(conf);
     if (basedir.exists()) {
       FileUtils.deleteDirectory(basedir);
@@ -71,7 +72,11 @@ public class TestServiceCLI {
     cli = new ServiceCLI() {
       @Override protected void createServiceClient() {
         client = new ServiceClient() {
-          @Override protected ClientAMProtocol connectToAM(String appName)
+          @Override protected ClientAMProtocol getAMProxy(String appName,
+              ApplicationReport report) throws IOException {
+            return mock(ClientAMProtocol.class);
+          }
+          @Override protected ClientAMProtocol getAMProxy(String appName)
               throws IOException, YarnException {
             return mock(ClientAMProtocol.class);
           }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/ExampleAppJson.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/ExampleAppJson.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/ExampleAppJson.java
index 9791976..9e13200 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/ExampleAppJson.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/ExampleAppJson.java
@@ -18,13 +18,14 @@
 
 package org.apache.hadoop.yarn.service.conf;
 
-import org.apache.slider.api.resource.Application;
+
+import org.apache.hadoop.yarn.service.api.records.Application;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import static org.apache.slider.utils.SliderTestUtils.JSON_SER_DESER;
+import static org.apache.hadoop.yarn.service.ServiceTestUtils.JSON_SER_DESER;
 
 /**
  * Names of the example configs.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
index 66939a1..954d117 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestAppJsonResolve.java
@@ -21,14 +21,13 @@ package org.apache.hadoop.yarn.service.conf;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.ConfigFile;
-import org.apache.slider.api.resource.ConfigFile.TypeEnum;
-import org.apache.slider.api.resource.Configuration;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.persist.JsonSerDeser;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.api.records.Configuration;
+import org.apache.hadoop.yarn.service.utils.JsonSerDeser;
 import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
 import org.junit.Assert;
 import org.junit.Test;
 import org.slf4j.Logger;
@@ -41,18 +40,9 @@ import java.util.HashSet;
 import java.util.Map;
 import java.util.Set;
 
-import static org.apache.slider.api.InternalKeys.CHAOS_MONKEY_INTERVAL;
-import static 
org.apache.slider.api.InternalKeys.DEFAULT_CHAOS_MONKEY_INTERVAL_DAYS;
-import static 
org.apache.slider.api.InternalKeys.DEFAULT_CHAOS_MONKEY_INTERVAL_HOURS;
-import static 
org.apache.slider.api.InternalKeys.DEFAULT_CHAOS_MONKEY_INTERVAL_MINUTES;
-import static org.apache.hadoop.yarn.service.conf.ExampleAppJson.APP_JSON;
-import static 
org.apache.hadoop.yarn.service.conf.ExampleAppJson.EXTERNAL_JSON_1;
-import static org.apache.hadoop.yarn.service.conf.ExampleAppJson.OVERRIDE_JSON;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
-import static org.easymock.EasyMock.reset;
+import static org.apache.hadoop.yarn.service.conf.ExampleAppJson.*;
+import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.*;
+import static org.easymock.EasyMock.*;
 
 /**
  * Test global configuration resolution.
@@ -115,9 +105,9 @@ public class TestAppJsonResolve extends Assert {
     Map<String, String> props = new HashMap<>();
     props.put("k1", "overridden");
     props.put("k2", "v2");
-    files.add(new ConfigFile().destFile("file1").type(TypeEnum
+    files.add(new ConfigFile().destFile("file1").type(ConfigFile.TypeEnum
         .PROPERTIES).props(props));
-    files.add(new ConfigFile().destFile("file2").type(TypeEnum
+    files.add(new ConfigFile().destFile("file2").type(ConfigFile.TypeEnum
         .XML).props(Collections.singletonMap("k3", "v3")));
     assertTrue(files.contains(simple.getFiles().get(0)));
     assertTrue(files.contains(simple.getFiles().get(1)));
@@ -132,9 +122,9 @@ public class TestAppJsonResolve extends Assert {
 
     props.put("k1", "v1");
     files.clear();
-    files.add(new ConfigFile().destFile("file1").type(TypeEnum
+    files.add(new ConfigFile().destFile("file1").type(ConfigFile.TypeEnum
         .PROPERTIES).props(props));
-    files.add(new ConfigFile().destFile("file2").type(TypeEnum
+    files.add(new ConfigFile().destFile("file2").type(ConfigFile.TypeEnum
         .XML).props(Collections.singletonMap("k3", "v3")));
 
     assertTrue(files.contains(master.getFiles().get(0)));
@@ -208,7 +198,7 @@ public class TestAppJsonResolve extends Assert {
     assertEquals("a", simple.getProperty("g1"));
     assertEquals("b", simple.getProperty("g2"));
     assertEquals("60",
-        simple.getProperty("internal.chaos.monkey.interval.seconds"));
+        simple.getProperty("yarn.service.failure-count-reset.window"));
 
     master = orig.getComponent("master").getConfiguration();
     assertEquals(5, master.getProperties().size());
@@ -217,7 +207,7 @@ public class TestAppJsonResolve extends Assert {
     assertEquals("b", master.getProperty("g2"));
     assertEquals("is-overridden", master.getProperty("g3"));
     assertEquals("60",
-        simple.getProperty("internal.chaos.monkey.interval.seconds"));
+        simple.getProperty("yarn.service.failure-count-reset.window"));
 
     Configuration worker = orig.getComponent("worker").getConfiguration();
     LOG.info("worker = {}", worker);
@@ -226,27 +216,9 @@ public class TestAppJsonResolve extends Assert {
     assertEquals("overridden-by-worker", worker.getProperty("g1"));
     assertEquals("b", worker.getProperty("g2"));
     assertEquals("60",
-        worker.getProperty("internal.chaos.monkey.interval.seconds"));
+        worker.getProperty("yarn.service.failure-count-reset.window"));
 
     other = orig.getComponent("other").getConfiguration();
     assertEquals(0, other.getProperties().size());
   }
-
-  @Test
-  public void testTimeIntervalLoading() throws Throwable {
-    Application orig = ExampleAppJson.loadResource(APP_JSON);
-
-    Configuration conf = orig.getConfiguration();
-    long s = conf.getPropertyLong(
-        CHAOS_MONKEY_INTERVAL + SliderUtils.SECONDS,
-        0);
-    assertEquals(60, s);
-    long monkeyInterval = SliderUtils.getTimeRange(conf,
-        CHAOS_MONKEY_INTERVAL,
-        DEFAULT_CHAOS_MONKEY_INTERVAL_DAYS,
-        DEFAULT_CHAOS_MONKEY_INTERVAL_HOURS,
-        DEFAULT_CHAOS_MONKEY_INTERVAL_MINUTES,
-        0);
-    assertEquals(60L, monkeyInterval);
-  }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
index b304901..8310530 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
@@ -21,9 +21,9 @@ package org.apache.hadoop.yarn.service.conf;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.common.tools.SliderFileSystem;
+import org.apache.hadoop.yarn.service.api.records.Application;
 import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -32,11 +32,8 @@ import org.junit.runners.Parameterized;
 import java.util.Arrays;
 import java.util.Collection;
 
-import static org.apache.slider.utils.SliderTestUtils.JSON_SER_DESER;
-import static org.easymock.EasyMock.anyObject;
-import static org.easymock.EasyMock.createNiceMock;
-import static org.easymock.EasyMock.expect;
-import static org.easymock.EasyMock.replay;
+import static org.apache.hadoop.yarn.service.ServiceTestUtils.JSON_SER_DESER;
+import static org.easymock.EasyMock.*;
 
 /**
  * Test loading example resources.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
index 9a8dbee..98c78d3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.yarn.service.conf;
 
-import org.apache.slider.common.tools.SliderUtils;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
 import org.junit.Assert;
 import org.junit.Test;
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
index 568a066..5b24a1d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestAbstractClientProvider.java
@@ -19,10 +19,9 @@ package org.apache.hadoop.yarn.service.providers;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
 import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
-import org.apache.slider.api.resource.Artifact;
-import org.apache.slider.api.resource.ConfigFile;
-import org.apache.slider.api.resource.ConfigFile.TypeEnum;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -70,7 +69,7 @@ public class TestAbstractClientProvider {
     } catch (IllegalArgumentException e) {
     }
 
-    configFile.setType(TypeEnum.TEMPLATE);
+    configFile.setType(ConfigFile.TypeEnum.TEMPLATE);
     try {
       clientProvider.validateConfigFiles(configFiles, mockFs);
       Assert.fail(EXCEPTION_PREFIX + "empty src_file for type template");
@@ -92,7 +91,7 @@ public class TestAbstractClientProvider {
     }
 
     configFile = new ConfigFile();
-    configFile.setType(TypeEnum.JSON);
+    configFile.setType(ConfigFile.TypeEnum.JSON);
     configFile.setSrcFile(null);
     configFile.setDestFile("path/destfile2");
     configFiles.add(configFile);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestProviderFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestProviderFactory.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestProviderFactory.java
index 5cb5793..489578d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestProviderFactory.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/providers/TestProviderFactory.java
@@ -18,6 +18,8 @@
 
 package org.apache.hadoop.yarn.service.providers;
 
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.Artifact.TypeEnum;
 import org.apache.hadoop.yarn.service.provider.ProviderFactory;
 import 
org.apache.hadoop.yarn.service.provider.defaultImpl.DefaultClientProvider;
 import 
org.apache.hadoop.yarn.service.provider.defaultImpl.DefaultProviderFactory;
@@ -28,8 +30,7 @@ import 
org.apache.hadoop.yarn.service.provider.docker.DockerProviderService;
 import org.apache.hadoop.yarn.service.provider.tarball.TarballClientProvider;
 import org.apache.hadoop.yarn.service.provider.tarball.TarballProviderFactory;
 import org.apache.hadoop.yarn.service.provider.tarball.TarballProviderService;
-import org.apache.slider.api.resource.Artifact;
-import org.apache.slider.api.resource.Artifact.TypeEnum;
+
 import org.junit.Test;
 
 import static org.junit.Assert.assertTrue;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/servicemonitor/TestServiceMonitor.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/servicemonitor/TestServiceMonitor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/servicemonitor/TestServiceMonitor.java
index db83cb6..6f5653f 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/servicemonitor/TestServiceMonitor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/servicemonitor/TestServiceMonitor.java
@@ -20,33 +20,14 @@
 package org.apache.hadoop.yarn.service.servicemonitor;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.test.GenericTestUtils;
-import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
-import 
org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.client.api.AMRMClient;
-import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
-import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
-import org.apache.hadoop.yarn.client.api.impl.AMRMClientImpl;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.server.utils.BuilderUtils;
 import org.apache.hadoop.yarn.service.MockServiceAM;
 import org.apache.hadoop.yarn.service.ServiceTestUtils;
-import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
-import org.apache.slider.api.InternalKeys;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.core.exceptions.BadClusterStateException;
+
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -54,15 +35,7 @@ import org.junit.Test;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
-import java.util.List;
-
-import static org.mockito.Matchers.anyFloat;
-import static org.mockito.Matchers.anyInt;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Mockito.*;
 
 public class TestServiceMonitor extends ServiceTestUtils {
 
@@ -77,7 +50,7 @@ public class TestServiceMonitor extends ServiceTestUtils {
     } else {
       basedir.mkdirs();
     }
-    conf.setLong(InternalKeys.MONITOR_INTERVAL, 2);
+    conf.setLong(YarnServiceConf.READINESS_CHECK_INTERVAL, 2);
   }
 
   @After

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
index 476727a..a891df8 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
@@ -19,8 +19,8 @@
 package org.apache.hadoop.yarn.service.timelineservice;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.Identifier;
 import org.apache.hadoop.yarn.client.api.TimelineV2Client;
@@ -28,19 +28,16 @@ import 
org.apache.hadoop.yarn.client.api.impl.TimelineV2ClientImpl;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.service.ServiceContext;
-import org.apache.hadoop.yarn.service.ServiceScheduler;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.ApplicationState;
-import org.apache.slider.api.resource.Artifact;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.api.resource.Container;
-import org.apache.slider.api.resource.ContainerState;
-import org.apache.slider.api.resource.PlacementPolicy;
-import org.apache.slider.api.resource.Resource;
-import org.apache.slider.server.appmaster.actions.ActionStopSlider;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.ApplicationState;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.Container;
+import org.apache.hadoop.yarn.service.api.records.ContainerState;
+import org.apache.hadoop.yarn.service.api.records.PlacementPolicy;
+import org.apache.hadoop.yarn.service.api.records.Resource;
 import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
 import org.apache.hadoop.yarn.service.compinstance.ComponentInstanceId;
-import org.apache.slider.server.appmaster.state.AppState;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -103,16 +100,9 @@ public class TestServiceTimelinePublisher {
 
   @Test
   public void testServiceAttemptEntity() {
-    AppState appState = createMockAppState();
-    int exitCode = 0;
-    String message = "Stopped by user";
-    ActionStopSlider stopAction = mock(ActionStopSlider.class);
-    when(stopAction.getExitCode()).thenReturn(exitCode);
-    when(stopAction.getFinalApplicationStatus())
-        .thenReturn(FinalApplicationStatus.SUCCEEDED);
-    when(stopAction.getMessage()).thenReturn(message);
-
-    
serviceTimelinePublisher.serviceAttemptRegistered(appState.getClusterStatus());
+    Application application = createMockApplication();
+    serviceTimelinePublisher
+        .serviceAttemptRegistered(application, new YarnConfiguration());
 
     Collection<TimelineEntity> lastPublishedEntities =
         ((DummyTimelineClient) timelineClient).getLastPublishedEntities();
@@ -123,17 +113,21 @@ public class TestServiceTimelinePublisher {
           .toString()) {
         verifyComponentTimelineEntity(timelineEntity);
       } else {
-        verifyServiceAttemptTimelineEntity(timelineEntity, 0, null, true);
+        verifyServiceAttemptTimelineEntity(timelineEntity, null, true);
       }
     }
 
-    serviceTimelinePublisher.serviceAttemptUnregistered(appState, stopAction);
+    ServiceContext context = new ServiceContext();
+    context.attemptId = ApplicationAttemptId
+        .newInstance(ApplicationId.fromString(application.getId()), 1);
+    String exitDiags = "service killed";
+    serviceTimelinePublisher.serviceAttemptUnregistered(context, exitDiags);
     lastPublishedEntities =
         ((DummyTimelineClient) timelineClient).getLastPublishedEntities();
     for (TimelineEntity timelineEntity : lastPublishedEntities) {
       if (timelineEntity.getType() == ServiceTimelineEntityType.SERVICE_ATTEMPT
           .toString()) {
-        verifyServiceAttemptTimelineEntity(timelineEntity, exitCode, message,
+        verifyServiceAttemptTimelineEntity(timelineEntity, exitDiags,
             false);
       }
     }
@@ -180,7 +174,7 @@ public class TestServiceTimelinePublisher {
   }
 
   private void verifyServiceAttemptTimelineEntity(TimelineEntity 
timelineEntity,
-      int exitCode, String message, boolean isRegistedEntity) {
+      String message, boolean isRegistedEntity) {
     assertEquals(SERVICEID, timelineEntity.getId());
     assertEquals(SERVICE_NAME,
         timelineEntity.getInfo().get(ServiceTimelineMetricsConstants.NAME));
@@ -190,13 +184,10 @@ public class TestServiceTimelinePublisher {
       assertEquals(ServiceTimelineEvent.SERVICE_ATTEMPT_REGISTERED.toString(),
           timelineEntity.getEvents().iterator().next().getId());
     } else {
-      assertEquals("SUCCEEDED",
-          timelineEntity.getInfo().get(ServiceTimelineMetricsConstants.STATE));
-      assertEquals(exitCode, timelineEntity.getInfo()
-          .get(ServiceTimelineMetricsConstants.EXIT_STATUS_CODE));
+      assertEquals("ENDED",
+          
timelineEntity.getInfo().get(ServiceTimelineMetricsConstants.STATE).toString());
       assertEquals(message, timelineEntity.getInfo()
-          .get(ServiceTimelineMetricsConstants.EXIT_REASON));
-
+          .get(ServiceTimelineMetricsConstants.DIAGNOSTICS_INFO));
       assertEquals(2, timelineEntity.getEvents().size());
       
assertEquals(ServiceTimelineEvent.SERVICE_ATTEMPT_UNREGISTERED.toString(),
           timelineEntity.getEvents().iterator().next().getId());
@@ -218,23 +209,20 @@ public class TestServiceTimelinePublisher {
     assertEquals("sleep 1",
         info.get(ServiceTimelineMetricsConstants.LAUNCH_COMMAND));
     assertEquals("false",
-        info.get(ServiceTimelineMetricsConstants.UNIQUE_COMPONENT_SUPPORT));
-    assertEquals("false",
         info.get(ServiceTimelineMetricsConstants.RUN_PRIVILEGED_CONTAINER));
     assertEquals("label",
         info.get(ServiceTimelineMetricsConstants.PLACEMENT_POLICY));
   }
 
-  private static AppState createMockAppState() {
-    AppState appState = mock(AppState.class);
+  private static Application createMockApplication() {
     Application application = mock(Application.class);
 
     when(application.getId()).thenReturn(SERVICEID);
     when(application.getLaunchTime()).thenReturn(new Date());
     when(application.getState()).thenReturn(ApplicationState.STARTED);
     when(application.getName()).thenReturn(SERVICE_NAME);
-    when(application.getConfiguration())
-        .thenReturn(new org.apache.slider.api.resource.Configuration());
+    when(application.getConfiguration()).thenReturn(
+        new org.apache.hadoop.yarn.service.api.records.Configuration());
 
     Component component = mock(Component.class);
     Artifact artifact = new Artifact();
@@ -250,19 +238,13 @@ public class TestServiceTimelinePublisher {
     PlacementPolicy placementPolicy = new PlacementPolicy();
     placementPolicy.setLabel("label");
     when(component.getPlacementPolicy()).thenReturn(placementPolicy);
-    when(component.getConfiguration())
-        .thenReturn(new org.apache.slider.api.resource.Configuration());
+    when(component.getConfiguration()).thenReturn(
+        new org.apache.hadoop.yarn.service.api.records.Configuration());
     List<Component> components = new ArrayList<Component>();
     components.add(component);
 
     when(application.getComponents()).thenReturn(components);
-    when(appState.getClusterStatus()).thenReturn(application);
-    return appState;
-  }
-
-  public static void main(String[] args) {
-    Application application = createMockAppState().getClusterStatus();
-    System.out.println(application.getConfiguration());
+    return application;
   }
 
   protected static class DummyTimelineClient extends TimelineV2ClientImpl {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/43277ffd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/api/TestRPCBinding.java
----------------------------------------------------------------------
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/api/TestRPCBinding.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/api/TestRPCBinding.java
deleted file mode 100644
index 0c2a2aa..0000000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/api/TestRPCBinding.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.api;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.slider.server.appmaster.rpc.RpcBinder;
-import org.apache.slider.server.appmaster.rpc.SliderClusterProtocolPB;
-import org.junit.Test;
-
-import java.net.InetSocketAddress;
-
-import static org.junit.Assert.assertTrue;
-
-/**
- * Tests RPC work.
- */
-public class TestRPCBinding {
-
-  //@Test
-  public void testRegistration() throws Throwable {
-    Configuration conf = new Configuration();
-    RpcBinder.registerSliderAPI(conf);
-    assertTrue(RpcBinder.verifyBondedToProtobuf(conf,
-        SliderClusterProtocolPB.class));
-  }
-
-  //@Test
-  public void testGetProxy() throws Throwable {
-    Configuration conf = new Configuration();
-    InetSocketAddress saddr = new InetSocketAddress("127.0.0.1", 9000);
-    SliderClusterProtocol proxy =
-        RpcBinder.connectToServer(saddr, null, conf, 1000);
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to