http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
index 3b937b0..57d0d72 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/mapreduce2_client.py
@@ -28,16 +28,13 @@ from yarn import yarn
 
 class MapReduce2Client(Script):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
index 1d959e1..6b09b13 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/nodemanager.py
@@ -35,9 +35,6 @@ from service import service
 
 class Nodemanager(Script):
 
-  def get_component_name(self):
-    return "hadoop-yarn-nodemanager"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -53,7 +50,7 @@ class Nodemanager(Script):
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-nodemanager", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-yarn-nodemanager {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
index 46d825a..274100d 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/resourcemanager.py
@@ -33,9 +33,6 @@ from setup_ranger_yarn import setup_ranger_yarn
 
 class Resourcemanager(Script):
 
-  def get_component_name(self):
-    return "hadoop-yarn-resourcemanager"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -52,7 +49,7 @@ class Resourcemanager(Script):
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-yarn-resourcemanager", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-yarn-resourcemanager {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
index e20e483..e3b9125 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/YARN/package/scripts/yarn_client.py
@@ -40,16 +40,13 @@ class YarnClient(Script):
   def status(self, env):
     raise ClientComponentHasNoStatus()
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set hadoop-client {version}"))
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
index cea5d64..9253f43 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper.py
@@ -74,7 +74,7 @@ def zookeeper(type = None, upgrade_type=None):
     # This path may be missing after Ambari upgrade. We need to create it.
     if (upgrade_type == "rolling") and (not 
os.path.exists("/usr/iop/current/zookeeper-server")) and params.current_version:
       conf_select(params.stack_name, "zookeeper", params.current_version)
-      stack_select.select("zookeeper-server", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-server {version}"))
 
   if (params.log4j_props != None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
index f579168..f5408aa 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_client.py
@@ -30,9 +30,6 @@ from zookeeper import zookeeper
 
 class ZookeeperClient(Script):
 
-  def get_component_name(self):
-    return "zookeeper-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -50,7 +47,7 @@ class ZookeeperClient(Script):
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "zookeeper", params.version)
-      stack_select.select("zookeeper-client", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-client {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
index e0ce881..fb92b2e 100755
--- 
a/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
+++ 
b/ambari-server/src/main/resources/stacks/BigInsights/4.2/services/ZOOKEEPER/package/scripts/zookeeper_server.py
@@ -40,9 +40,6 @@ from zookeeper_service import zookeeper_service
 
 class ZookeeperServer(Script):
 
-  def get_component_name(self):
-    return "zookeeper-server"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -59,7 +56,7 @@ class ZookeeperServer(Script):
 
     if params.version and 
compare_versions(format_stack_version(params.version), '4.0.0.0') >= 0:
       conf_select.select(params.stack_name, "zookeeper", params.version)
-      stack_select.select("zookeeper-server", params.version)
+      stack_select.select_packages(params.version)
       #Execute(format("iop-select set zookeeper-server {version}"))
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
index c6b091d..2d797bb 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/configuration/cluster-env.xml
@@ -262,6 +262,21 @@ gpgcheck=0</value>
     </value-attributes>
     <on-ambari-upgrade add="true"/>
   </property>
+  <!-- Define stack_select_packages property in the base stack. DO NOT 
override this property for each stack version -->
+  <property>
+    <name>stack_select_packages</name>
+    <value/>
+    <description>Associations between component and stack-select 
tools.</description>
+    <property-type>VALUE_FROM_PROPERTY_FILE</property-type>
+    <value-attributes>
+      <property-file-name>stack_select_packages.json</property-file-name>
+      <property-file-type>json</property-file-type>
+      <read-only>true</read-only>
+      <overridable>false</overridable>
+      <visible>false</visible>
+    </value-attributes>
+    <on-ambari-upgrade add="true"/>
+  </property>
   <property>
     <name>stack_root</name>
     <value>{"HDP":"/usr/hdp"}</value>

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
index 8a583b3..8bae9e6 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/hook.py
@@ -28,7 +28,7 @@ class AfterInstallHook(Hook):
     import params
 
     env.set_params(params)
-    setup_stack_symlinks()
+    setup_stack_symlinks(self.stroutfile)
     setup_config()
 
     link_configs(self.stroutfile)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 1782298..24f9076 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -41,9 +41,6 @@ sudo = AMBARI_SUDO_BINARY
 stack_version_unformatted = config['hostLevelParams']['stack_version']
 stack_version_formatted = format_stack_version(stack_version_unformatted)
 
-# current host stack version
-current_version = default("/hostLevelParams/current_version", None)
-
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
 hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index e9f2283..ce106d2 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -23,13 +23,12 @@ from resource_management.core.logger import Logger
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.functions.fcntl_based_process_lock import 
FcntlBasedProcessLock
 from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.script import Script
 
 
-def setup_stack_symlinks():
+def setup_stack_symlinks(struct_out_file):
   """
   Invokes <stack-selector-tool> set all against a calculated fully-qualified, 
"normalized" version based on a
   stack version, such as "2.3". This should always be called after a component 
has been
@@ -38,18 +37,30 @@ def setup_stack_symlinks():
   :return:
   """
   import params
-  if params.stack_version_formatted != "" and 
compare_versions(params.stack_version_formatted, '2.2') >= 0:
-    # try using the exact version first, falling back in just the stack if 
it's not defined
-    # which would only be during an intial cluster installation
-    version = params.current_version if params.current_version is not None 
else params.stack_version_unformatted
-
-    if not params.upgrade_suspended:
-      if params.host_sys_prepped:
-        Logger.warning("Skipping running stack-selector-tool for stack {0} as 
its a sys_prepped host. This may cause symlink pointers not to be created for 
HDP componets installed later on top of an already sys_prepped 
host.".format(version))
-        return
-      # On parallel command execution this should be executed by a single 
process at a time.
-      with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = 
params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-        stack_select.select_all(version)
+  if params.upgrade_suspended:
+    Logger.warning("Skipping running stack-selector-tool because there is a 
suspended upgrade")
+    return
+
+  if params.host_sys_prepped:
+    Logger.warning("Skipping running stack-selector-tool becase this is a 
sys_prepped host. This may cause symlink pointers not to be created for HDP 
componets installed later on top of an already sys_prepped host.")
+    return
+
+  # get the packages which the stack-select tool should be used on
+  stack_select_packages = 
stack_select.get_packages(stack_select.PACKAGE_SCOPE_INSTALL)
+  if stack_select_packages is None:
+    return
+
+  json_version = load_version(struct_out_file)
+
+  if not json_version:
+    Logger.info("There is no advertised version for this component stored in 
{0}".format(struct_out_file))
+    return
+
+  # On parallel command execution this should be executed by a single process 
at a time.
+  with FcntlBasedProcessLock(params.stack_select_lock_file, enabled = 
params.is_parallel_execution_enabled, skip_fcntl_failures = True):
+    for package in stack_select_packages:
+      stack_select.select(package, json_version)
+
 
 def setup_config():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
new file mode 100644
index 0000000..2747188
--- /dev/null
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/properties/stack_select_packages.json
@@ -0,0 +1,952 @@
+{
+  "HDP": {
+    "stack-select": {
+      "ACCUMULO": {
+        "ACCUMULO_CLIENT": {
+          "STACK-SELECT-PACKAGE": "accumulo-client",
+          "INSTALL": [
+            "accumulo-client"
+          ],
+          "PATCH": [
+            "accumulo-client"
+          ],
+          "STANDARD": [
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_GC": {
+          "STACK-SELECT-PACKAGE": "accumulo-gc",
+          "INSTALL": [
+            "accumulo-gc"
+          ],
+          "PATCH": [
+            "accumulo-gc"
+          ],
+          "STANDARD": [
+            "accumulo-gc",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MASTER": {
+          "STACK-SELECT-PACKAGE": "accumulo-master",
+          "INSTALL": [
+            "accumulo-master"
+          ],
+          "PATCH": [
+            "accumulo-master"
+          ],
+          "STANDARD": [
+            "accumulo-master",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_MONITOR": {
+          "STACK-SELECT-PACKAGE": "accumulo-monitor",
+          "INSTALL": [
+            "accumulo-monitor"
+          ],
+          "PATCH": [
+            "accumulo-monitor"
+          ],
+          "STANDARD": [
+            "accumulo-monitor",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TRACER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tracer",
+          "INSTALL": [
+            "accumulo-tracer"
+          ],
+          "PATCH": [
+            "accumulo-tracer"
+          ],
+          "STANDARD": [
+            "accumulo-tracer",
+            "accumulo-client"
+          ]
+        },
+        "ACCUMULO_TSERVER": {
+          "STACK-SELECT-PACKAGE": "accumulo-tablet",
+          "INSTALL": [
+            "accumulo-tablet"
+          ],
+          "PATCH": [
+            "accumulo-tablet"
+          ],
+          "STANDARD": [
+            "accumulo-tablet",
+            "accumulo-client"
+          ]
+        }
+      },
+      "ATLAS": {
+        "ATLAS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "atlas-client",
+          "INSTALL": [
+            "atlas-client"
+          ],
+          "PATCH": [
+            "atlas-client"
+          ],
+          "STANDARD": [
+            "atlas-client"
+          ]
+        },
+        "ATLAS_SERVER": {
+          "STACK-SELECT-PACKAGE": "atlas-server",
+          "INSTALL": [
+            "atlas-server"
+          ],
+          "PATCH": [
+            "atlas-server"
+          ],
+          "STANDARD": [
+            "atlas-server"
+          ]
+        }
+      },
+      "DRUID": {
+        "DRUID_COORDINATOR": {
+          "STACK-SELECT-PACKAGE": "druid-coordinator",
+          "INSTALL": [
+            "druid-coordinator"
+          ],
+          "PATCH": [
+            "druid-coordinator"
+          ],
+          "STANDARD": [
+            "druid-coordinator"
+          ]
+        },
+        "DRUID_OVERLORD": {
+          "STACK-SELECT-PACKAGE": "druid-overlord",
+          "INSTALL": [
+            "druid-overlord"
+          ],
+          "PATCH": [
+            "druid-overlord"
+          ],
+          "STANDARD": [
+            "druid-overlord"
+          ]
+        },
+        "DRUID_HISTORICAL": {
+          "STACK-SELECT-PACKAGE": "druid-historical",
+          "INSTALL": [
+            "druid-historical"
+          ],
+          "PATCH": [
+            "druid-historical"
+          ],
+          "STANDARD": [
+            "druid-historical"
+          ]
+        },
+        "DRUID_BROKER": {
+          "STACK-SELECT-PACKAGE": "druid-broker",
+          "INSTALL": [
+            "druid-broker"
+          ],
+          "PATCH": [
+            "druid-broker"
+          ],
+          "STANDARD": [
+            "druid-broker"
+          ]
+        },
+        "DRUID_MIDDLEMANAGER": {
+          "STACK-SELECT-PACKAGE": "druid-middlemanager",
+          "INSTALL": [
+            "druid-middlemanager"
+          ],
+          "PATCH": [
+            "druid-middlemanager"
+          ],
+          "STANDARD": [
+            "druid-middlemanager"
+          ]
+        },
+        "DRUID_ROUTER": {
+          "STACK-SELECT-PACKAGE": "druid-router",
+          "INSTALL": [
+            "druid-router"
+          ],
+          "PATCH": [
+            "druid-router"
+          ],
+          "STANDARD": [
+            "druid-router"
+          ]
+        },
+        "DRUID_SUPERSET": {
+          "STACK-SELECT-PACKAGE": "druid-superset",
+          "INSTALL": [
+            "druid-superset"
+          ],
+          "PATCH": [
+            "druid-superset"
+          ],
+          "STANDARD": [
+            "druid-superset"
+          ]
+        }
+      },
+      "FALCON": {
+        "FALCON_CLIENT": {
+          "STACK-SELECT-PACKAGE": "falcon-client",
+          "INSTALL": [
+            "falcon-client"
+          ],
+          "PATCH": [
+            "falcon-client"
+          ],
+          "STANDARD": [
+            "falcon-client"
+          ]
+        },
+        "FALCON_SERVER": {
+          "STACK-SELECT-PACKAGE": "falcon-server",
+          "INSTALL": [
+            "falcon-server"
+          ],
+          "PATCH": [
+            "falcon-server"
+          ],
+          "STANDARD": [
+            "falcon-server"
+          ]
+        }
+      },
+      "FLUME": {
+        "FLUME_HANDLER": {
+          "STACK-SELECT-PACKAGE": "flume-server",
+          "INSTALL": [
+            "flume-server"
+          ],
+          "PATCH": [
+            "flume-server"
+          ],
+          "STANDARD": [
+            "flume-server"
+          ]
+        }
+      },
+      "HBASE": {
+        "HBASE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hbase-client",
+          "INSTALL": [
+            "hbase-client"
+          ],
+          "PATCH": [
+            "hbase-client"
+          ],
+          "STANDARD": [
+            "hbase-client",
+            "phoenix-client",
+            "hadoop-client"
+          ]
+        },
+        "HBASE_MASTER": {
+          "STACK-SELECT-PACKAGE": "hbase-master",
+          "INSTALL": [
+            "hbase-master"
+          ],
+          "PATCH": [
+            "hbase-master"
+          ],
+          "STANDARD": [
+            "hbase-master"
+          ]
+        },
+        "HBASE_REGIONSERVER": {
+          "STACK-SELECT-PACKAGE": "hbase-regionserver",
+          "INSTALL": [
+            "hbase-regionserver"
+          ],
+          "PATCH": [
+            "hbase-regionserver"
+          ],
+          "STANDARD": [
+            "hbase-regionserver"
+          ]
+        },
+        "PHOENIX_QUERY_SERVER": {
+          "STACK-SELECT-PACKAGE": "phoenix-server",
+          "INSTALL": [
+            "phoenix-server"
+          ],
+          "PATCH": [
+            "phoenix-server"
+          ],
+          "STANDARD": [
+            "phoenix-server"
+          ]
+        }
+      },
+      "HDFS": {
+        "DATANODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-datanode",
+          "INSTALL": [
+            "hadoop-hdfs-datanode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-datanode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-datanode"
+          ]
+        },
+        "HDFS_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-namenode",
+          "INSTALL": [
+            "hadoop-hdfs-namenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-namenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-namenode"
+          ]
+        },
+        "NFS_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-nfs3",
+          "INSTALL": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-nfs3"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-nfs3"
+          ]
+        },
+        "JOURNALNODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-journalnode",
+          "INSTALL": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-journalnode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-journalnode"
+          ]
+        },
+        "SECONDARY_NAMENODE": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-secondarynamenode",
+          "INSTALL": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-secondarynamenode"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-secondarynamenode"
+          ]
+        },
+        "ZKFC": {
+          "STACK-SELECT-PACKAGE": "hadoop-hdfs-zkfc",
+          "INSTALL": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "PATCH": [
+            "hadoop-hdfs-zkfc"
+          ],
+          "STANDARD": [
+            "hadoop-hdfs-zkfc"
+          ]
+        }
+      },
+      "HIVE": {
+        "HCAT": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        },
+        "HIVE_METASTORE": {
+          "STACK-SELECT-PACKAGE": "hive-metastore",
+          "INSTALL": [
+            "hive-metastore"
+          ],
+          "PATCH": [
+            "hive-metastore"
+          ],
+          "STANDARD": [
+            "hive-metastore"
+          ]
+        },
+        "HIVE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-server2",
+          "INSTALL": [
+            "hive-server2"
+          ],
+          "PATCH": [
+            "hive-server2"
+          ],
+          "STANDARD": [
+            "hive-server2"
+          ]
+        },
+        "HIVE_SERVER_INTERACTIVE": {
+          "STACK-SELECT-PACKAGE": "hive-server2-hive2",
+          "INSTALL": [
+            "hive-server2-hive2"
+          ],
+          "PATCH": [
+            "hive-server2-hive2"
+          ],
+          "STANDARD": [
+            "hive-server2-hive2"
+          ]
+        },
+        "HIVE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        },
+        "WEBHCAT_SERVER": {
+          "STACK-SELECT-PACKAGE": "hive-webhcat",
+          "INSTALL": [
+            "hive-webhcat"
+          ],
+          "PATCH": [
+            "hive-webhcat"
+          ],
+          "STANDARD": [
+            "hive-webhcat"
+          ]
+        }
+      },
+      "KAFKA": {
+        "KAFKA_BROKER": {
+          "STACK-SELECT-PACKAGE": "kafka-broker",
+          "INSTALL": [
+            "kafka-broker"
+          ],
+          "PATCH": [
+            "kafka-broker"
+          ],
+          "STANDARD": [
+            "kafka-broker"
+          ]
+        }
+      },
+      "KNOX": {
+        "KNOX_GATEWAY": {
+          "STACK-SELECT-PACKAGE": "knox-server",
+          "INSTALL": [
+            "knox-server"
+          ],
+          "PATCH": [
+            "knox-server"
+          ],
+          "STANDARD": [
+            "knox-server"
+          ]
+        }
+      },
+      "MAHOUT": {
+        "MAHOUT": {
+          "STACK-SELECT-PACKAGE": "mahout-client",
+          "INSTALL": [
+            "mahout-client"
+          ],
+          "PATCH": [
+            "mahout-client"
+          ],
+          "STANDARD": [
+            "mahout-client"
+          ]
+        }
+      },
+      "MAPREDUCE2": {
+        "HISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-mapreduce-historyserver",
+          "INSTALL": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-historyserver"
+          ],
+          "STANDARD": [
+            "hadoop-mapreduce-historyserver"
+          ]
+        },
+        "MAPREDUCE2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "hadoop-mapreduce-INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "OOZIE": {
+        "OOZIE_CLIENT": {
+          "STACK-SELECT-PACKAGE": "oozie-client",
+          "INSTALL": [
+            "oozie-client"
+          ],
+          "PATCH": [
+            "oozie-client"
+          ],
+          "STANDARD": [
+            "oozie-client"
+          ]
+        },
+        "OOZIE_SERVER": {
+          "STACK-SELECT-PACKAGE": "oozie-server",
+          "INSTALL": [
+            "oozie-client",
+            "oozie-server"
+          ],
+          "PATCH": [
+            "oozie-server"
+          ],
+          "STANDARD": [
+            "oozie-client",
+            "oozie-server"
+          ]
+        }
+      },
+      "PIG": {
+        "PIG": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "R4ML": {
+        "R4ML": {
+          "STACK-SELECT-PACKAGE": "r4ml-client",
+          "INSTALL": [
+            "r4ml-client"
+          ],
+          "PATCH": [
+            "r4ml-client"
+          ],
+          "STANDARD": [
+            "r4ml-client"
+          ]
+        }
+      },
+      "RANGER": {
+        "RANGER_ADMIN": {
+          "STACK-SELECT-PACKAGE": "ranger-admin",
+          "INSTALL": [
+            "ranger-admin"
+          ],
+          "PATCH": [
+            "ranger-admin"
+          ],
+          "STANDARD": [
+            "ranger-admin"
+          ]
+        },
+        "RANGER_TAGSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-tagsync",
+          "INSTALL": [
+            "ranger-tagsync"
+          ],
+          "PATCH": [
+            "ranger-tagsync"
+          ],
+          "STANDARD": [
+            "ranger-tagsync"
+          ]
+        },
+        "RANGER_USERSYNC": {
+          "STACK-SELECT-PACKAGE": "ranger-usersync",
+          "INSTALL": [
+            "ranger-usersync"
+          ],
+          "PATCH": [
+            "ranger-usersync"
+          ],
+          "STANDARD": [
+            "ranger-usersync"
+          ]
+        }
+      },
+      "RANGER_KMS": {
+        "RANGER_KMS_SERVER": {
+          "STACK-SELECT-PACKAGE": "ranger-kms",
+          "INSTALL": [
+            "ranger-kms"
+          ],
+          "PATCH": [
+            "ranger-kms"
+          ],
+          "STANDARD": [
+            "ranger-kms"
+          ]
+        }
+      },
+      "SLIDER": {
+        "SLIDER": {
+          "STACK-SELECT-PACKAGE": "slider-client",
+          "INSTALL": [
+            "slider-client"
+          ],
+          "PATCH": [
+            "slider-client"
+          ],
+          "STANDARD": [
+            "slider-client",
+            "hadoop-client"
+          ]
+        }
+      },
+      "SPARK": {
+        "LIVY_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy-server",
+          "INSTALL": [
+            "livy-server"
+          ],
+          "PATCH": [
+            "livy-server"
+          ],
+          "STANDARD": [
+            "livy-server"
+          ]
+        },
+        "SPARK_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark-client",
+          "INSTALL": [
+            "spark-client"
+          ],
+          "PATCH": [
+            "spark-client"
+          ],
+          "STANDARD": [
+            "spark-client"
+          ]
+        },
+        "SPARK_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-historyserver",
+          "INSTALL": [
+            "spark-historyserver"
+          ],
+          "PATCH": [
+            "spark-historyserver"
+          ],
+          "STANDARD": [
+            "spark-historyserver"
+          ]
+        },
+        "SPARK_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark-thriftserver",
+          "INSTALL": [
+            "spark-thriftserver"
+          ],
+          "PATCH": [
+            "spark-thriftserver"
+          ],
+          "STANDARD": [
+            "spark-thriftserver"
+          ]
+        }
+      },
+      "SPARK2": {
+        "LIVY2_SERVER": {
+          "STACK-SELECT-PACKAGE": "livy2-server",
+          "INSTALL": [
+            "livy2-server"
+          ],
+          "PATCH": [
+            "livy2-server"
+          ],
+          "STANDARD": [
+            "livy2-server"
+          ]
+        },
+        "SPARK2_CLIENT": {
+          "STACK-SELECT-PACKAGE": "spark2-client",
+          "INSTALL": [
+            "spark2-client"
+          ],
+          "PATCH": [
+            "spark2-client"
+          ],
+          "STANDARD": [
+            "spark2-client"
+          ]
+        },
+        "SPARK2_JOBHISTORYSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-historyserver",
+          "INSTALL": [
+            "spark2-historyserver"
+          ],
+          "PATCH": [
+            "spark2-historyserver"
+          ],
+          "STANDARD": [
+            "spark2-historyserver"
+          ]
+        },
+        "SPARK2_THRIFTSERVER": {
+          "STACK-SELECT-PACKAGE": "spark2-thriftserver",
+          "INSTALL": [
+            "spark2-thriftserver"
+          ],
+          "PATCH": [
+            "spark2-thriftserver"
+          ],
+          "STANDARD": [
+            "spark2-thriftserver"
+          ]
+        }
+      },
+      "SQOOP": {
+        "SQOOP": {
+          "STACK-SELECT-PACKAGE": "sqoop-client",
+          "INSTALL": [
+            "sqoop-client"
+          ],
+          "PATCH": [
+            "sqoop-client"
+          ],
+          "STANDARD": [
+            "sqoop-client"
+          ]
+        }
+      },
+      "STORM": {
+        "NIMBUS": {
+          "STACK-SELECT-PACKAGE": "storm-nimbus",
+          "INSTALL": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "PATCH": [
+            "storm-client",
+            "storm-nimbus"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-nimbus"
+          ]
+        },
+        "SUPERVISOR": {
+          "STACK-SELECT-PACKAGE": "storm-supervisor",
+          "INSTALL": [
+            "storm-supervisor"
+          ],
+          "PATCH": [
+            "storm-supervisor"
+          ],
+          "STANDARD": [
+            "storm-client",
+            "storm-supervisor"
+          ]
+        },
+        "DRPC_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        },
+        "STORM_UI_SERVER": {
+          "STACK-SELECT-PACKAGE": "storm-client",
+          "INSTALL": [
+            "storm-client"
+          ],
+          "PATCH": [
+            "storm-client"
+          ],
+          "STANDARD": [
+            "storm-client"
+          ]
+        }
+      },
+      "SYSTEMML": {
+        "SYSTEMML": {
+          "STACK-SELECT-PACKAGE": "systemml-client",
+          "INSTALL": [
+            "systemml-client"
+          ],
+          "PATCH": [
+            "systemml-client"
+          ],
+          "STANDARD": [
+            "systemml-client"
+          ]
+        }
+      },
+      "TEZ": {
+        "TEZ_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "TITAN": {
+        "TITAN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "titan-client",
+          "INSTALL": [
+            "titan-client"
+          ],
+          "PATCH": [
+            "titan-client"
+          ],
+          "STANDARD": [
+            "titan-client"
+          ]
+        },
+        "TITAN_SERVER": {
+          "STACK-SELECT-PACKAGE": "titan-server",
+          "INSTALL": [
+            "titan-server"
+          ],
+          "PATCH": [
+            "titan-server"
+          ],
+          "STANDARD": [
+            "titan-server"
+          ]
+        }
+      },
+      "YARN": {
+        "APP_TIMELINE_SERVER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-timelineserver",
+          "INSTALL": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "PATCH": [
+            "hadoop-yarn-timelineserver"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-timelineserver"
+          ]
+        },
+        "NODEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-nodemanager",
+          "INSTALL": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-nodemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-nodemanager"
+          ]
+        },
+        "RESOURCEMANAGER": {
+          "STACK-SELECT-PACKAGE": "hadoop-yarn-resourcemanager",
+          "INSTALL": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "PATCH": [
+            "hadoop-yarn-resourcemanager"
+          ],
+          "STANDARD": [
+            "hadoop-yarn-resourcemanager"
+          ]
+        },
+        "YARN_CLIENT": {
+          "STACK-SELECT-PACKAGE": "hadoop-client",
+          "INSTALL": [
+            "hadoop-client"
+          ],
+          "PATCH": [
+            "INVALID"
+          ],
+          "STANDARD": [
+            "hadoop-client"
+          ]
+        }
+      },
+      "ZEPPELIN": {
+        "ZEPPELIN_MASTER": {
+          "STACK-SELECT-PACKAGE": "zeppelin-server",
+          "INSTALL": [
+            "zeppelin-server"
+          ],
+          "PATCH": [
+            "zeppelin-server"
+          ],
+          "STANDARD": [
+            "zeppelin-server"
+          ]
+        }
+      },
+      "ZOOKEEPER": {
+        "ZOOKEEPER_CLIENT": {
+          "STACK-SELECT-PACKAGE": "zookeeper-client",
+          "INSTALL": [
+            "zookeeper-client"
+          ],
+          "PATCH": [
+            "zookeeper-client"
+          ],
+          "STANDARD": [
+            "zookeeper-client"
+          ]
+        },
+        "ZOOKEEPER_SERVER": {
+          "STACK-SELECT-PACKAGE": "zookeeper-server",
+          "INSTALL": [
+            "zookeeper-server"
+          ],
+          "PATCH": [
+            "zookeeper-server"
+          ],
+          "STANDARD": [
+            "zookeeper-server"
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py 
b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
index 02810e2..dc21159 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/FLUME/test_flume.py
@@ -27,7 +27,9 @@ import os
 class TestFlumeHandler(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "FLUME/1.4.0.2.0/package"
   STACK_VERSION = "2.0.6"
-  
+
+  CONFIG_OVERRIDES = {"serviceName":"FLUME", "role":"FLUME_HANDLER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/flume_handler.py",
                        classname = "FlumeHandler",
@@ -560,6 +562,7 @@ class TestFlumeHandler(RMFTestCase):
                        classname = "FlumeHandler",
                        command = "pre_upgrade_restart",
                        config_file="flume_22.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
index cffec06..7c5c7f5 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_client.py
@@ -29,6 +29,8 @@ class TestHBaseClient(RMFTestCase):
   STACK_VERSION = "2.0.6"
   TMP_PATH = '/hadoop'
 
+  CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"HBASE_CLIENT"}
+
   def test_configure_secured(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hbase_client.py",
                    classname = "HbaseClient",
@@ -239,6 +241,7 @@ class TestHBaseClient(RMFTestCase):
                        classname = "HbaseClient",
                        command = "restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, 
''), (0, None, '')],
@@ -258,7 +261,7 @@ class TestHBaseClient(RMFTestCase):
        mocks_dict['call'].call_args_list[0][0][0])
     self.assertEquals(
       ('ambari-python-wrap', '/usr/bin/conf-select', 'set-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
-       mocks_dict['checked_call'].call_args_list[4][0][0])
+       mocks_dict['checked_call'].call_args_list[5][0][0])
     self.assertEquals(
       ('ambari-python-wrap', '/usr/bin/conf-select', 'create-conf-dir', 
'--package', 'hadoop', '--stack-version', '2.3.0.0-1234', '--conf-version', 
'0'),
        mocks_dict['call'].call_args_list[1][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index e85757b..a47bda3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -29,6 +29,8 @@ class TestHBaseMaster(RMFTestCase):
   TMP_PATH = "/hadoop"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', 
'/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"HBASE_MASTER"}
+
   def test_install_hbase_master_default_no_phx(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hbase_master.py",
                        classname = "HbaseMaster",
@@ -747,6 +749,7 @@ class TestHBaseMaster(RMFTestCase):
                        classname = "HbaseMaster",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        mocks_dict = mocks_dict)
@@ -770,6 +773,7 @@ class TestHBaseMaster(RMFTestCase):
                        classname = "HbaseMaster",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, 
''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index 93f5d19..6a2d8fb 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -30,6 +30,8 @@ class TestHbaseRegionServer(RMFTestCase):
   STACK_VERSION = "2.0.6"
   TMP_PATH = '/hadoop'
 
+  CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"HBASE_REGIONSERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hbase_regionserver.py",
                    classname = "HbaseRegionServer",
@@ -540,6 +542,7 @@ class TestHbaseRegionServer(RMFTestCase):
                        classname = "HbaseRegionServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -580,6 +583,7 @@ class TestHbaseRegionServer(RMFTestCase):
                        classname = "HbaseRegionServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None), (0, None), (0, 
None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
index 1b324d4..973e274 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_phoenix_queryserver.py
@@ -32,6 +32,8 @@ class TestPhoenixQueryServer(RMFTestCase):
   STACK_VERSION = "2.3"
   TMP_PATH = "/hadoop"
 
+  CONFIG_OVERRIDES = {"serviceName":"HBASE", "role":"PHOENIX_QUERY_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(
       self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/phoenix_queryserver.py",
@@ -432,6 +434,7 @@ class TestPhoenixQueryServer(RMFTestCase):
       classname = "PhoenixQueryServer",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       call_mocks = [(0, "/etc/hbase/2.3.0.0-1234/0", ''), (0, None, None), (0, 
None, None)],
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index d2968f8..0f31ad2 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -32,6 +32,8 @@ class TestDatanode(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"DATANODE"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/datanode.py",
                        classname = "DataNode",
@@ -484,6 +486,7 @@ class TestDatanode(RMFTestCase):
                        classname = "DataNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -504,6 +507,7 @@ class TestDatanode(RMFTestCase):
                        classname = "DataNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
index bcd9c80..680c984 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_hdfs_client.py
@@ -37,6 +37,8 @@ class Test(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HDFS/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"HDFS_CLIENT"}
+
   def test_generate_configs_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hdfs_client.py",
                        classname = "HdfsClient",
@@ -81,6 +83,7 @@ class Test(RMFTestCase):
                    classname = "HdfsClient",
                    command = "restart",
                    config_file="client-upgrade.json",
+                   config_overrides = self.CONFIG_OVERRIDES,
                    stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES)
 
@@ -101,6 +104,7 @@ class Test(RMFTestCase):
                        classname = "HdfsClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -128,6 +132,7 @@ class Test(RMFTestCase):
                        classname = "HdfsClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalledIgnoreEarlier('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-client', version), sudo=True,)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index ff8f92e..06c5fdd 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -29,6 +29,8 @@ class TestJournalnode(RMFTestCase):
   STACK_VERSION = "2.0.6"
   UPGRADE_STACK_VERSION = "2.2"
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"JOURNALNODE"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/journalnode.py",
                        classname = "JournalNode",
@@ -388,6 +390,7 @@ class TestJournalnode(RMFTestCase):
                        classname = "JournalNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'hadoop-hdfs-journalnode', version), sudo=True,)
@@ -406,6 +409,7 @@ class TestJournalnode(RMFTestCase):
                        classname = "JournalNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 862a17e..0d27d15 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -35,6 +35,8 @@ class TestNamenode(RMFTestCase):
   STACK_VERSION = "2.0.6"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', 
'/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"NAMENODE"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/namenode.py",
                        classname = "NameNode",
@@ -1401,6 +1403,7 @@ class TestNamenode(RMFTestCase):
                        classname = "NameNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -1422,6 +1425,7 @@ class TestNamenode(RMFTestCase):
                        classname = "NameNode",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, None), (0, None), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
index de425cd..773d3fe 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_nfsgateway.py
@@ -34,6 +34,8 @@ class TestNFSGateway(RMFTestCase):
   STACK_VERSION = "2.0.6"
   UPGRADE_STACK_VERSION = "2.2"
 
+  CONFIG_OVERRIDES = {"serviceName":"HDFS", "role":"NFS_GATEWAY"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/nfsgateway.py",
                        classname = "NFSGateway",
@@ -291,6 +293,7 @@ class TestNFSGateway(RMFTestCase):
                        classname = "NFSGateway",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None), (0, None), (0, 
None)])

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
index ff7e728..457fccf 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hcat_client.py
@@ -25,6 +25,8 @@ class TestHcatClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"HIVE", "role":"HCAT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hcat_client.py",
                        classname = "HCatClient",
@@ -117,6 +119,7 @@ class TestHcatClient(RMFTestCase):
       classname = "HCatClient",
       command = "pre_upgrade_restart",
       config_dict = json_content,
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
       call_mocks = [(0, None, ''), (0, None, ''), (0, None, ''), (0, None, 
'')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
index 6afc298..3bc597e 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_client.py
@@ -25,11 +25,14 @@ class TestHiveClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = { "serviceName" : "HIVE", "role" : "HIVE_CLIENT" }
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hive_client.py",
                        classname = "HiveClient",
                        command = "configure",
                        config_file="default_client.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -110,6 +113,7 @@ class TestHiveClient(RMFTestCase):
                        classname = "HiveClient",
                        command = "configure",
                        config_file="secured_client.json",
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -198,6 +202,7 @@ class TestHiveClient(RMFTestCase):
                        classname = "HiveClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -219,6 +224,7 @@ class TestHiveClient(RMFTestCase):
                        classname = "HiveClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, ''), (0, None, 
''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
index 03dd391..a6a4fa0 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_webhcat_server.py
@@ -29,6 +29,8 @@ class TestWebHCatServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "HIVE/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"HIVE", "role":"WEBHCAT_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/webhcat_server.py",
                        classname = "WebHCatServer",
@@ -286,6 +288,7 @@ class TestWebHCatServer(RMFTestCase):
                        classname = "WebHCatServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -308,6 +311,7 @@ class TestWebHCatServer(RMFTestCase):
                        classname = "WebHCatServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
index f7e4410..31d54ae 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_client.py
@@ -28,6 +28,8 @@ class TestOozieClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "OOZIE/4.0.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"OOZIE", "role":"OOZIE_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/oozie_client.py",
                        classname = "OozieClient",
@@ -243,6 +245,7 @@ class TestOozieClient(RMFTestCase):
                        classname = "OozieClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -264,6 +267,7 @@ class TestOozieClient(RMFTestCase):
                        classname = "OozieClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
index 5ef6ad9..17b8abf 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/OOZIE/test_oozie_server.py
@@ -39,6 +39,8 @@ class TestOozieServer(RMFTestCase):
   UPGRADE_STACK_VERSION = "2.2"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', 
'/mr-history/done', '/app-logs', '/tmp']
 
+  CONFIG_OVERRIDES = {"serviceName":"OOZIE", "role":"OOZIE_SERVER"}
+
   def setUp(self):
     self.maxDiff = None
 
@@ -1194,6 +1196,7 @@ class TestOozieServer(RMFTestCase):
 
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/oozie_server.py",
      classname = "OozieServer", command = "pre_upgrade_restart", config_file = 
"oozie-upgrade.json",
+     config_overrides = self.CONFIG_OVERRIDES,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES,
      call_mocks = [(0, prepare_war_stdout)])
@@ -1206,6 +1209,9 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(glob_mock.call_count,1)
     
glob_mock.assert_called_with('/usr/hdp/2.2.1.0-2135/hadoop/lib/hadoop-lzo*.jar')
 
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-client', u'2.2.1.0-2135'),
+      sudo = True )
+
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.1.0-2135'),
       sudo = True )
 
@@ -1250,6 +1256,7 @@ class TestOozieServer(RMFTestCase):
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/oozie_server.py",
      classname = "OozieServer", command = "pre_upgrade_restart", config_dict = 
json_content,
+     config_overrides = self.CONFIG_OVERRIDES,
      stack_version = self.UPGRADE_STACK_VERSION,
      target = RMFTestCase.TARGET_COMMON_SERVICES,
      call_mocks = [(0, None, ''), (0, prepare_war_stdout)],
@@ -1266,6 +1273,8 @@ class TestOozieServer(RMFTestCase):
     self.assertResourceCalled('Link', '/etc/oozie/conf',
                               to = '/usr/hdp/current/oozie-client/conf',
     )
+
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-client', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
     self.assertResourceCalled('Directory', 
'/usr/hdp/current/oozie-server/libext', mode = 0777)
@@ -1313,6 +1322,7 @@ class TestOozieServer(RMFTestCase):
     self.assertEqual(isfile_mock.call_count,1)
     isfile_mock.assert_called_with('/usr/share/HDP-oozie/ext-2.2.zip')
 
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-client', u'2.2.0.0-0000'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-server', u'2.2.0.0-0000'), sudo = True)
 
     self.assertResourceCalled('Directory', 
'/usr/hdp/current/oozie-server/libext',mode = 0777)
@@ -1507,6 +1517,7 @@ class TestOozieServer(RMFTestCase):
     
glob_mock.assert_called_with('/usr/hdp/2.3.0.0-1234/hadoop/lib/hadoop-lzo*.jar')
 
     self.assertResourceCalled('Link', '/etc/oozie/conf', to = 
'/usr/hdp/current/oozie-client/conf')
+    self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-client', '2.3.0.0-1234'), sudo = True)
     self.assertResourceCalled('Execute', ('ambari-python-wrap', 
'/usr/bin/hdp-select', 'set', 'oozie-server', '2.3.0.0-1234'), sudo = True)
 
     self.assertResourceCalled('Directory', 
'/usr/hdp/current/oozie-server/libext', mode = 0777)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
index 804abe7..63076f9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
@@ -25,6 +25,8 @@ class TestPigClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "PIG/0.12.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"PIG", "role":"PIG"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/pig_client.py",
                        classname = "PigClient",
@@ -143,6 +145,7 @@ class TestPigClient(RMFTestCase):
                        classname = "PigClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -161,6 +164,7 @@ class TestPigClient(RMFTestCase):
                        classname = "PigClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, '')],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py 
b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
index 87b8fec..4622ae3 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/SQOOP/test_sqoop.py
@@ -25,6 +25,8 @@ class TestSqoop(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "SQOOP/1.4.4.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"SQOOP", "role":"SQOOP"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/sqoop_client.py",
                        classname = "SqoopClient",
@@ -136,6 +138,7 @@ class TestSqoop(RMFTestCase):
                        classname = "SqoopClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
index ea5b468..0cfc6df 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_historyserver.py
@@ -34,7 +34,9 @@ class TestHistoryServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
   DEFAULT_IMMUTABLE_PATHS = ['/apps/hive/warehouse', '/apps/falcon', 
'/mr-history/done', '/app-logs', '/tmp']
-  
+
+  CONFIG_OVERRIDES = {"serviceName":"MAPREDUCE2", "role":"HISTORYSERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/historyserver.py",
                        classname="HistoryServer",
@@ -767,6 +769,7 @@ class TestHistoryServer(RMFTestCase):
                        classname = "HistoryServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None, None), (0, None, 
None), (0, None, None), (0, None, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
index 7e06969..5898355 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_mapreduce2_client.py
@@ -34,6 +34,8 @@ class TestMapReduce2Client(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"MAPREDUCE2", "role":"MAPREDUCE2_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/mapreduce2_client.py",
                        classname = "MapReduce2Client",
@@ -390,6 +392,7 @@ class TestMapReduce2Client(RMFTestCase):
                    classname = "MapReduce2Client",
                    command = "restart",
                    config_file="client-upgrade.json",
+                   config_overrides = self.CONFIG_OVERRIDES,
                    stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -410,6 +413,7 @@ class TestMapReduce2Client(RMFTestCase):
                        classname = "MapReduce2Client",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -439,6 +443,7 @@ class TestMapReduce2Client(RMFTestCase):
                        classname = "MapReduce2Client",
                        command = "stack_upgrade_save_new_config",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
index ed8fb27..d132e73 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_nodemanager.py
@@ -34,6 +34,8 @@ class TestNodeManager(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"YARN", "role":"NODEMANAGER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/nodemanager.py",
                        classname="Nodemanager",
@@ -577,6 +579,7 @@ class TestNodeManager(RMFTestCase):
       classname = "Nodemanager",
       command = "post_upgrade_restart",
       config_file = "default.json",
+      config_overrides = self.CONFIG_OVERRIDES,
       stack_version = self.STACK_VERSION,
       target = RMFTestCase.TARGET_COMMON_SERVICES,
       checked_call_mocks = [(0, process_output)],
@@ -605,6 +608,7 @@ class TestNodeManager(RMFTestCase):
                          classname="Nodemanager",
                          command = "post_upgrade_restart",
                          config_file="default.json",
+                         config_overrides = self.CONFIG_OVERRIDES,
                          stack_version = self.STACK_VERSION,
                          target = RMFTestCase.TARGET_COMMON_SERVICES,
                          call_mocks = [(0, process_output)],
@@ -628,6 +632,7 @@ class TestNodeManager(RMFTestCase):
                          classname="Nodemanager",
                          command = "post_upgrade_restart",
                          config_file="default.json",
+                         config_overrides = self.CONFIG_OVERRIDES,
                          stack_version = self.STACK_VERSION,
                          target = RMFTestCase.TARGET_COMMON_SERVICES,
                          call_mocks = [(999, process_output)],
@@ -651,6 +656,7 @@ class TestNodeManager(RMFTestCase):
                        classname = "Nodemanager",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
index e8b5f78..82d3a1c 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_resourcemanager.py
@@ -37,6 +37,8 @@ class TestResourceManager(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"YARN", "role":"RESOURCEMANAGER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/resourcemanager.py",
                        classname="Resourcemanager",
@@ -549,6 +551,7 @@ class TestResourceManager(RMFTestCase):
                        classname = "Resourcemanager",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
index f71c93a..09a6278 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/YARN/test_yarn_client.py
@@ -34,6 +34,8 @@ class TestYarnClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "YARN/2.1.0.2.0/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"YARN", "role":"YARN_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/yarn_client.py",
                        classname = "YarnClient",
@@ -556,6 +558,7 @@ class TestYarnClient(RMFTestCase):
                    classname = "YarnClient",
                    command = "restart",
                    config_file="client-upgrade.json",
+                   config_overrides = self.CONFIG_OVERRIDES,
                    stack_version = self.STACK_VERSION,
                    target = RMFTestCase.TARGET_COMMON_SERVICES
     )
@@ -577,6 +580,7 @@ class TestYarnClient(RMFTestCase):
                        classname = "YarnClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
index e4c6fbd..f074036 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_client.py
@@ -27,6 +27,8 @@ class TestZookeeperClient(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "ZOOKEEPER/3.4.5/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"ZOOKEEPER", "role":"ZOOKEEPER_CLIENT"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/zookeeper_client.py",
                        classname = "ZookeeperClient",
@@ -170,6 +172,7 @@ class TestZookeeperClient(RMFTestCase):
                        classname = "ZookeeperClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -191,6 +194,7 @@ class TestZookeeperClient(RMFTestCase):
                        classname = "ZookeeperClient",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py 
b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
index 6d38a67..bc58e56 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/ZOOKEEPER/test_zookeeper_server.py
@@ -28,6 +28,8 @@ class TestZookeeperServer(RMFTestCase):
   COMMON_SERVICES_PACKAGE_DIR = "ZOOKEEPER/3.4.5/package"
   STACK_VERSION = "2.0.6"
 
+  CONFIG_OVERRIDES = {"serviceName":"ZOOKEEPER", "role":"ZOOKEEPER_SERVER"}
+
   def test_configure_default(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/zookeeper_server.py",
                        classname = "ZookeeperServer",
@@ -257,6 +259,7 @@ class TestZookeeperServer(RMFTestCase):
                        classname = "ZookeeperServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES)
     self.assertResourceCalled('Execute',
@@ -278,6 +281,7 @@ class TestZookeeperServer(RMFTestCase):
                        classname = "ZookeeperServer",
                        command = "pre_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [(0, None, ''), (0, None)],
@@ -313,6 +317,7 @@ class TestZookeeperServer(RMFTestCase):
                        classname = "ZookeeperServer",
                        command = "post_upgrade_restart",
                        config_dict = json_content,
+                       config_overrides = self.CONFIG_OVERRIDES,
                        stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
                        call_mocks = [

Reply via email to