AMBARI-21580 - Replace Hard Coded stack-select Structures (jonathanhurley)

Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/56d2ade2
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/56d2ade2
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/56d2ade2

Branch: refs/heads/branch-feature-AMBARI-21450
Commit: 56d2ade2b78adf170a92a99dbbd8d5d1a074389a
Parents: 863343a
Author: Jonathan Hurley <jhur...@hortonworks.com>
Authored: Wed Jul 26 15:01:33 2017 -0400
Committer: Jonathan Hurley <jhur...@hortonworks.com>
Committed: Thu Jul 27 15:29:07 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/stack_select.py         | 171 +++-
 .../libraries/script/script.py                  |  23 +-
 .../BlueprintConfigurationProcessor.java        |   5 +-
 .../ambari/server/state/ConfigHelper.java       |   1 +
 .../server/upgrade/FinalUpgradeCatalog.java     |   4 +-
 .../package/scripts/accumulo_client.py          |   5 +-
 .../package/scripts/accumulo_script.py          |  34 +-
 .../0.1.0.2.3/package/scripts/atlas_client.py   |   5 +-
 .../package/scripts/metadata_server.py          |   6 +-
 .../DRUID/0.9.2/package/scripts/druid_node.py   |   6 +-
 .../DRUID/0.9.2/package/scripts/superset.py     |   5 +-
 .../0.5.0.2.1/package/scripts/falcon_client.py  |   5 +-
 .../0.5.0.2.1/package/scripts/falcon_server.py  |   5 +-
 .../1.4.0.2.0/package/scripts/flume_handler.py  |   5 +-
 .../0.96.0.2.0/package/scripts/hbase_client.py  |   7 +-
 .../0.96.0.2.0/package/scripts/hbase_master.py  |   5 +-
 .../package/scripts/hbase_regionserver.py       |   5 +-
 .../package/scripts/phoenix_queryserver.py      |   6 +-
 .../HBASE/0.96.0.2.0/package/scripts/upgrade.py |   4 +-
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |   7 +-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |   5 +-
 .../2.1.0.2.0/package/scripts/journalnode.py    |   5 +-
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |   7 +-
 .../2.1.0.2.0/package/scripts/nfsgateway.py     |   6 +-
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |   5 +-
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |   9 +-
 .../0.12.0.2.0/package/scripts/hcat_client.py   |   8 +-
 .../0.12.0.2.0/package/scripts/hive_client.py   |   5 +-
 .../package/scripts/hive_metastore.py           |   6 +-
 .../0.12.0.2.0/package/scripts/hive_server.py   |   5 +-
 .../package/scripts/hive_server_interactive.py  |   6 +-
 .../package/scripts/webhcat_server.py           |   5 +-
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py |   5 +-
 .../0.5.0.2.2/package/scripts/knox_gateway.py   |   5 +-
 .../1.0.0.2.3/package/scripts/mahout_client.py  |   7 +-
 .../4.0.0.2.0/package/scripts/oozie_client.py   |   5 +-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |   8 +-
 .../0.12.0.2.0/package/scripts/pig_client.py    |   5 +-
 .../R4ML/0.8.0/package/scripts/r4ml_client.py   |   5 +-
 .../0.4.0/package/scripts/ranger_admin.py       |   5 +-
 .../0.4.0/package/scripts/ranger_tagsync.py     |   7 +-
 .../0.4.0/package/scripts/ranger_usersync.py    |   3 -
 .../RANGER/0.4.0/package/scripts/upgrade.py     |   2 +-
 .../0.5.0.2.3/package/scripts/kms_server.py     |   3 -
 .../0.5.0.2.3/package/scripts/upgrade.py        |   2 +-
 .../0.60.0.2.2/package/scripts/slider_client.py |   6 +-
 .../1.2.1/package/scripts/job_history_server.py |   5 +-
 .../SPARK/1.2.1/package/scripts/livy_server.py  |   5 +-
 .../SPARK/1.2.1/package/scripts/spark_client.py |   5 +-
 .../package/scripts/spark_thrift_server.py      |   5 +-
 .../2.0.0/package/scripts/job_history_server.py |   5 +-
 .../2.0.0/package/scripts/livy2_server.py       |   5 +-
 .../2.0.0/package/scripts/spark_client.py       |   5 +-
 .../package/scripts/spark_thrift_server.py      |   5 +-
 .../1.4.4.2.0/package/scripts/service_check.py  |   3 -
 .../1.4.4.2.0/package/scripts/sqoop_client.py   |   5 +-
 .../STORM/0.9.1/package/scripts/drpc_server.py  |   5 +-
 .../STORM/0.9.1/package/scripts/nimbus.py       |   7 +-
 .../STORM/0.9.1/package/scripts/nimbus_prod.py  |   6 +-
 .../STORM/0.9.1/package/scripts/pacemaker.py    |   5 +-
 .../STORM/0.9.1/package/scripts/rest_api.py     |   3 -
 .../STORM/0.9.1/package/scripts/supervisor.py   |   6 +-
 .../0.9.1/package/scripts/supervisor_prod.py    |   6 +-
 .../STORM/0.9.1/package/scripts/ui_server.py    |   5 +-
 .../0.10.0/package/scripts/systemml_client.py   |   5 +-
 .../TEZ/0.4.0.2.1/package/scripts/tez_client.py |   5 +-
 .../TITAN/1.0.0/package/scripts/titan_client.py |   5 +-
 .../TITAN/1.0.0/package/scripts/titan_server.py |   5 +-
 .../scripts/application_timeline_server.py      |   5 +-
 .../2.1.0.2.0/package/scripts/historyserver.py  |   5 +-
 .../package/scripts/mapreduce2_client.py        |   5 +-
 .../2.1.0.2.0/package/scripts/nodemanager.py    |   5 +-
 .../package/scripts/resourcemanager.py          |   5 +-
 .../2.1.0.2.0/package/scripts/yarn_client.py    |   5 +-
 .../0.6.0.2.5/package/scripts/master.py         |   5 +-
 .../3.4.5/package/scripts/zookeeper_client.py   |   4 +-
 .../3.4.5/package/scripts/zookeeper_server.py   |   5 +-
 .../4.0/properties/stack_select_packages.json   | 775 +++++++++++++++
 .../FLUME/package/scripts/flume_handler.py      |   5 +-
 .../HBASE/package/scripts/hbase_client.py       |   6 +-
 .../HBASE/package/scripts/hbase_master.py       |   6 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   5 +-
 .../package/scripts/hbase_restgatewayserver.py  |   5 +-
 .../services/HBASE/package/scripts/upgrade.py   |   4 +-
 .../services/HDFS/package/scripts/datanode.py   |   5 +-
 .../HDFS/package/scripts/hdfs_client.py         |   5 +-
 .../HDFS/package/scripts/journalnode.py         |   5 +-
 .../services/HDFS/package/scripts/namenode.py   |   5 +-
 .../services/HDFS/package/scripts/nfsgateway.py |   5 +-
 .../services/HDFS/package/scripts/snamenode.py  |   5 +-
 .../HIVE/package/scripts/hcat_client.py         |   4 +-
 .../HIVE/package/scripts/hive_client.py         |   7 +-
 .../HIVE/package/scripts/hive_metastore.py      |   5 +-
 .../HIVE/package/scripts/hive_server.py         |   5 +-
 .../HIVE/package/scripts/webhcat_server.py      |   5 +-
 .../KAFKA/package/scripts/kafka_broker.py       |   5 +-
 .../services/KAFKA/package/scripts/upgrade.py   |   4 +-
 .../KNOX/package/scripts/knox_gateway.py        |   6 +-
 .../OOZIE/package/scripts/oozie_client.py       |   5 +-
 .../OOZIE/package/scripts/oozie_server.py       |   8 +-
 .../services/PIG/package/scripts/pig_client.py  |   5 +-
 .../SLIDER/package/scripts/slider_client.py     |   6 +-
 .../SOLR/package/scripts/solr_server.py         |   2 +-
 .../SPARK/package/scripts/job_history_server.py |   5 +-
 .../SPARK/package/scripts/spark_client.py       |   5 +-
 .../package/scripts/spark_thrift_server.py      |   6 +-
 .../SQOOP/package/scripts/service_check.py      |   3 -
 .../SQOOP/package/scripts/sqoop_client.py       |   5 +-
 .../scripts/application_timeline_server.py      |   5 +-
 .../YARN/package/scripts/historyserver.py       |   5 +-
 .../YARN/package/scripts/mapreduce2_client.py   |   5 +-
 .../YARN/package/scripts/nodemanager.py         |   5 +-
 .../YARN/package/scripts/resourcemanager.py     |   5 +-
 .../YARN/package/scripts/yarn_client.py         |   5 +-
 .../ZOOKEEPER/package/scripts/zookeeper.py      |   2 +-
 .../package/scripts/zookeeper_client.py         |   5 +-
 .../package/scripts/zookeeper_server.py         |   5 +-
 .../scripts/shared_initialization.py            |  19 +-
 .../HBASE/package/scripts/hbase_client.py       |   8 +-
 .../HBASE/package/scripts/hbase_master.py       |   5 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   5 +-
 .../package/scripts/hbase_restgatewayserver.py  |   5 +-
 .../package/scripts/phoenix_queryserver.py      |   6 +-
 .../services/HBASE/package/scripts/upgrade.py   |   4 +-
 .../FLUME/package/scripts/flume_handler.py      |   5 +-
 .../HBASE/package/scripts/hbase_client.py       |   7 +-
 .../HBASE/package/scripts/hbase_master.py       |   5 +-
 .../HBASE/package/scripts/hbase_regionserver.py |   5 +-
 .../package/scripts/hbase_restgatewayserver.py  |   5 +-
 .../package/scripts/phoenix_queryserver.py      |   7 +-
 .../services/HBASE/package/scripts/upgrade.py   |   4 +-
 .../services/HDFS/package/scripts/datanode.py   |   7 +-
 .../HDFS/package/scripts/hdfs_client.py         |   5 +-
 .../HDFS/package/scripts/journalnode.py         |   5 +-
 .../services/HDFS/package/scripts/namenode.py   |   7 +-
 .../services/HDFS/package/scripts/nfsgateway.py |   5 +-
 .../services/HDFS/package/scripts/snamenode.py  |   5 +-
 .../HIVE/package/scripts/hcat_client.py         |   4 +-
 .../HIVE/package/scripts/hive_client.py         |   7 +-
 .../HIVE/package/scripts/hive_metastore.py      |   5 +-
 .../HIVE/package/scripts/hive_server.py         |   5 +-
 .../HIVE/package/scripts/webhcat_server.py      |   5 +-
 .../KAFKA/package/scripts/kafka_broker.py       |   5 +-
 .../KNOX/package/scripts/knox_gateway.py        |   7 +-
 .../OOZIE/package/scripts/oozie_client.py       |   5 +-
 .../OOZIE/package/scripts/oozie_server.py       |  10 +-
 .../services/PIG/package/scripts/pig_client.py  |   5 +-
 .../RANGER/package/scripts/ranger_admin.py      |   5 +-
 .../RANGER/package/scripts/ranger_usersync.py   |   4 -
 .../services/RANGER/package/scripts/upgrade.py  |   2 +-
 .../RANGER_KMS/package/scripts/kms_server.py    |   3 -
 .../RANGER_KMS/package/scripts/upgrade.py       |   2 +-
 .../SLIDER/package/scripts/slider_client.py     |   6 +-
 .../SOLR/package/scripts/solr_server.py         |   2 +-
 .../SPARK/package/scripts/job_history_server.py |   5 +-
 .../SPARK/package/scripts/spark_client.py       |   5 +-
 .../package/scripts/spark_thrift_server.py      |   6 +-
 .../SQOOP/package/scripts/service_check.py      |   3 -
 .../SQOOP/package/scripts/sqoop_client.py       |   5 +-
 .../SYSTEMML/package/scripts/systemml_client.py |   5 +-
 .../TITAN/package/scripts/titan_client.py       |   5 +-
 .../scripts/application_timeline_server.py      |   5 +-
 .../YARN/package/scripts/historyserver.py       |   5 +-
 .../YARN/package/scripts/mapreduce2_client.py   |   5 +-
 .../YARN/package/scripts/nodemanager.py         |   5 +-
 .../YARN/package/scripts/resourcemanager.py     |   5 +-
 .../YARN/package/scripts/yarn_client.py         |   5 +-
 .../ZOOKEEPER/package/scripts/zookeeper.py      |   2 +-
 .../package/scripts/zookeeper_client.py         |   5 +-
 .../package/scripts/zookeeper_server.py         |   5 +-
 .../HDP/2.0.6/configuration/cluster-env.xml     |  15 +
 .../2.0.6/hooks/after-INSTALL/scripts/hook.py   |   2 +-
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |   3 -
 .../scripts/shared_initialization.py            |  39 +-
 .../2.0.6/properties/stack_select_packages.json | 952 +++++++++++++++++++
 .../python/stacks/2.0.6/FLUME/test_flume.py     |   5 +-
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   5 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   4 +
 .../2.0.6/HBASE/test_hbase_regionserver.py      |   4 +
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |   3 +
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   4 +
 .../stacks/2.0.6/HDFS/test_hdfs_client.py       |   5 +
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   4 +
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   4 +
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |   3 +
 .../stacks/2.0.6/HIVE/test_hcat_client.py       |   3 +
 .../stacks/2.0.6/HIVE/test_hive_client.py       |   6 +
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |   4 +
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |   4 +
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  11 +
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |   4 +
 .../python/stacks/2.0.6/SQOOP/test_sqoop.py     |   3 +
 .../stacks/2.0.6/YARN/test_historyserver.py     |   5 +-
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   5 +
 .../stacks/2.0.6/YARN/test_nodemanager.py       |   6 +
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |   3 +
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   4 +
 .../2.0.6/ZOOKEEPER/test_zookeeper_client.py    |   4 +
 .../2.0.6/ZOOKEEPER/test_zookeeper_server.py    |   5 +
 .../hooks/after-INSTALL/test_after_install.py   |  43 +-
 .../stacks/2.1/FALCON/test_falcon_client.py     |   4 +
 .../stacks/2.1/FALCON/test_falcon_server.py     |   5 +
 .../stacks/2.1/HIVE/test_hive_metastore.py      |  26 +-
 .../stacks/2.1/STORM/test_storm_drpc_server.py  |   4 +
 .../stacks/2.1/STORM/test_storm_nimbus.py       |   3 +
 .../stacks/2.1/STORM/test_storm_nimbus_prod.py  |   3 +
 .../stacks/2.1/STORM/test_storm_supervisor.py   |   4 +
 .../2.1/STORM/test_storm_supervisor_prod.py     |   4 +
 .../stacks/2.1/STORM/test_storm_ui_server.py    |   5 +-
 .../python/stacks/2.1/TEZ/test_tez_client.py    |   4 +
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   3 +
 .../stacks/2.2/ACCUMULO/test_accumulo_client.py |   4 +
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |   4 +
 .../stacks/2.2/SLIDER/test_slider_client.py     |   6 +
 .../stacks/2.2/SPARK/test_job_history_server.py |   3 +
 .../stacks/2.2/SPARK/test_spark_client.py       |   4 +
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   4 +
 .../2.3/SPARK/test_spark_thrift_server.py       |   3 +
 .../src/test/python/stacks/utils/RMFTestCase.py |  12 +
 219 files changed, 2320 insertions(+), 807 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index 265e7df..9e28907 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -22,6 +22,7 @@ limitations under the License.
 import os
 import sys
 import re
+import ambari_simplejson as json
 
 # Local Imports
 from resource_management.core.logger import Logger
@@ -40,50 +41,6 @@ from resource_management.libraries.functions import 
StackFeature
 
 STACK_SELECT_PREFIX = 'ambari-python-wrap'
 
-# a mapping of Ambari server role to <stack-selector-tool> component name for 
all
-# non-clients
-SERVER_ROLE_DIRECTORY_MAP = {
-  'ACCUMULO_MASTER' : 'accumulo-master',
-  'ACCUMULO_MONITOR' : 'accumulo-monitor',
-  'ACCUMULO_GC' : 'accumulo-gc',
-  'ACCUMULO_TRACER' : 'accumulo-tracer',
-  'ACCUMULO_TSERVER' : 'accumulo-tablet',
-  'ATLAS_SERVER' : 'atlas-server',
-  'FLUME_HANDLER' : 'flume-server',
-  'FALCON_SERVER' : 'falcon-server',
-  'NAMENODE' : 'hadoop-hdfs-namenode',
-  'DATANODE' : 'hadoop-hdfs-datanode',
-  'SECONDARY_NAMENODE' : 'hadoop-hdfs-secondarynamenode',
-  'NFS_GATEWAY' : 'hadoop-hdfs-nfs3',
-  'JOURNALNODE' : 'hadoop-hdfs-journalnode',
-  'HBASE_MASTER' : 'hbase-master',
-  'HBASE_REGIONSERVER' : 'hbase-regionserver',
-  'HIVE_METASTORE' : 'hive-metastore',
-  'HIVE_SERVER' : 'hive-server2',
-  'HIVE_SERVER_INTERACTIVE' : 'hive-server2-hive2',
-  'WEBHCAT_SERVER' : 'hive-webhcat',
-  'KAFKA_BROKER' : 'kafka-broker',
-  'KNOX_GATEWAY' : 'knox-server',
-  'OOZIE_SERVER' : 'oozie-server',
-  'RANGER_ADMIN' : 'ranger-admin',
-  'RANGER_USERSYNC' : 'ranger-usersync',
-  'RANGER_TAGSYNC' : 'ranger-tagsync',
-  'RANGER_KMS' : 'ranger-kms',
-  'SPARK_JOBHISTORYSERVER' : 'spark-historyserver',
-  'SPARK_THRIFTSERVER' : 'spark-thriftserver',
-  'NIMBUS' : 'storm-nimbus',
-  'SUPERVISOR' : 'storm-supervisor',
-  'HISTORYSERVER' : 'hadoop-mapreduce-historyserver',
-  'APP_TIMELINE_SERVER' : 'hadoop-yarn-timelineserver',
-  'NODEMANAGER' : 'hadoop-yarn-nodemanager',
-  'RESOURCEMANAGER' : 'hadoop-yarn-resourcemanager',
-  'ZOOKEEPER_SERVER' : 'zookeeper-server',
-
-  # ZKFC is tied to NN since it doesn't have its own componnet in 
<stack-selector-tool> and there is
-  # a requirement that the ZKFC is installed on each NN
-  'ZKFC' : 'hadoop-hdfs-namenode'
-}
-
 # mapping of service check to <stack-selector-tool> component
 SERVICE_CHECK_DIRECTORY_MAP = {
   "HDFS_SERVICE_CHECK" : "hadoop-client",
@@ -113,6 +70,110 @@ HADOOP_DIR_DEFAULTS = {
   "lib": "/usr/lib/hadoop/lib"
 }
 
+PACKAGE_SCOPE_INSTALL = "INSTALL"
+PACKAGE_SCOPE_STANDARD = "STANDARD"
+PACKAGE_SCOPE_PATCH = "PATCH"
+PACKAGE_SCOPE_STACK_SELECT = "STACK-SELECT-PACKAGE"
+_PACKAGE_SCOPES = (PACKAGE_SCOPE_INSTALL, PACKAGE_SCOPE_STANDARD, 
PACKAGE_SCOPE_PATCH, PACKAGE_SCOPE_STACK_SELECT)
+
+
+def get_package_name(default_package = None):
+  """
+  Gets the stack-select package name for the service name and
+  component from the current command. Not all services/components are used 
with the
+  stack-select tools, so those will return no packages.
+
+  :return:  the stack-select package name for the command's component or None
+  """
+  config = Script.get_config()
+
+  if 'role' not in config or 'serviceName' not in config:
+    raise Fail("Both the role and the service name must be included in the 
command in order to determine which packages to use with the stack-select tool")
+
+  service_name = config['serviceName']
+  component_name = config['role']
+
+  # should return a single item
+  try:
+    package = get_packages(PACKAGE_SCOPE_STACK_SELECT, service_name, 
component_name)
+    if package is None:
+      package = default_package
+
+    return package
+  except:
+    if default_package is not None:
+      return default_package
+    else:
+      raise
+
+
+
+def get_packages(scope, service_name = None, component_name = None):
+  """
+  Gets the packages which should be used with the stack's stack-select tool 
for the
+  specified service/component. Not all services/components are used with the 
stack-select tools,
+  so those will return no packages.
+
+  :param scope: the scope of the command
+  :param service_name:  the service name, such as ZOOKEEPER
+  :param component_name: the component name, such as ZOOKEEPER_SERVER
+  :return:  the packages to use with stack-select or None
+  """
+  from resource_management.libraries.functions.default import default
+
+  import time
+
+  if scope not in _PACKAGE_SCOPES:
+    raise Fail("The specified scope of {0} is not valid".format(scope))
+
+  config = Script.get_config()
+
+  if service_name is None or component_name is None:
+    if 'role' not in config or 'serviceName' not in config:
+      raise Fail("Both the role and the service name must be included in the 
command in order to determine which packages to use with the stack-select tool")
+
+    service_name = config['serviceName']
+    component_name = config['role']
+
+
+  stack_name = default("/hostLevelParams/stack_name", None)
+  if stack_name is None:
+    raise Fail("The stack name is not present in the command. Packages for 
stack-select tool cannot be loaded.")
+
+  stack_select_packages_config = 
default("/configurations/cluster-env/stack_select_packages", None)
+  if stack_select_packages_config is None:
+    raise Fail("The stack packages are not defined on the command. Unable to 
load packages for the stack-select tool")
+
+  data = json.loads(stack_select_packages_config)
+
+  if stack_name not in data:
+    raise Fail(
+      "Cannot find stack-select packages for the {0} stack".format(stack_name))
+
+  stack_select_key = "stack-select"
+  data = data[stack_name]
+  if stack_select_key not in data:
+    raise Fail(
+      "There are no stack-select packages defined for this command for the {0} 
stack".format(stack_name))
+
+  # this should now be the dictionary of role name to package name
+  data = data[stack_select_key]
+  service_name = service_name.upper()
+  component_name = component_name.upper()
+
+  if service_name not in data:
+    Logger.info("Skipping stack-select on {0} because it does not exist in the 
stack-select package structure.".format(service_name))
+    return None
+
+  data = data[service_name]
+
+  if component_name not in data:
+    Logger.info("Skipping stack-select on {0} because it does not exist in the 
stack-select package structure.".format(component_name))
+    return None
+
+  return data[component_name][scope]
+
+
 def select_all(version_to_select):
   """
   Executes <stack-selector-tool> on every component for the specified version. 
If the value passed in is a
@@ -135,6 +196,20 @@ def select_all(version_to_select):
   Execute(command, only_if = only_if_command)
 
 
+def select_packages(version):
+  """
+  Uses the command's service and role to determine the stack-select packages 
which need to be invoked.
+  :param version: the version to select
+  :return: None
+  """
+  stack_select_packages = get_packages(PACKAGE_SCOPE_STANDARD)
+  if stack_select_packages is None:
+    return
+
+  for stack_select_package_name in stack_select_packages:
+    select(stack_select_package_name, version)
+
+
 def select(component, version):
   """
   Executes <stack-selector-tool> on the specific component and version. Some 
global
@@ -170,15 +245,15 @@ def get_role_component_current_stack_version():
   Gets the current HDP version of the component that this role command is for.
   :return:  the current HDP version of the specified component or None
   """
-  stack_select_component = None
   role = default("/role", "")
   role_command =  default("/roleCommand", "")
+
   stack_selector_name = 
stack_tools.get_stack_tool_name(stack_tools.STACK_SELECTOR_NAME)
 
-  if role in SERVER_ROLE_DIRECTORY_MAP:
-    stack_select_component = SERVER_ROLE_DIRECTORY_MAP[role]
-  elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
+  if role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP:
     stack_select_component = SERVICE_CHECK_DIRECTORY_MAP[role]
+  else:
+    stack_select_component = get_package_name()
 
   if stack_select_component is None:
     return None

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index cce3ac4..abbada4 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -182,12 +182,6 @@ class Script(object):
     except IOError, err:
       Script.structuredOut.update({"errMsg" : "Unable to write to " + 
self.stroutfile})
 
-  def get_component_name(self):
-    """
-    To be overridden by subclasses.
-     Returns a string with the component name used in selecting the version.
-    """
-    pass
 
   def get_config_dir_during_stack_upgrade(self, env, base_dir, 
conf_select_name):
     """
@@ -217,11 +211,13 @@ class Script(object):
     :param stack_name: One of HDP, HDPWIN, PHD, BIGTOP.
     :return: Append the version number to the structured out.
     """
+    from resource_management.libraries.functions import stack_select
+
     stack_name = Script.get_stack_name()
-    component_name = self.get_component_name()
+    stack_select_package_name = stack_select.get_package_name()
 
-    if component_name and stack_name:
-      component_version = get_component_version(stack_name, component_name)
+    if stack_select_package_name and stack_name:
+      component_version = get_component_version(stack_name, 
stack_select_package_name)
 
       if component_version:
         self.put_structured_out({"version": component_version})
@@ -426,11 +422,12 @@ class Script(object):
 
     :return: stack version including the build number. e.g.: 2.3.4.0-1234.
     """
+    from resource_management.libraries.functions import stack_select
+
     # preferred way is to get the actual selected version of current component
-    component_name = self.get_component_name()
-    if not Script.stack_version_from_distro_select and component_name:
-      from resource_management.libraries.functions import stack_select
-      Script.stack_version_from_distro_select = 
stack_select.get_stack_version_before_install(component_name)
+    stack_select_package_name = stack_select.get_package_name()
+    if not Script.stack_version_from_distro_select and 
stack_select_package_name:
+      Script.stack_version_from_distro_select = 
stack_select.get_stack_version_before_install(stack_select_package_name)
 
     # If <stack-selector-tool> has not yet been done (situations like first 
install),
     # we can use <stack-selector-tool> version itself.

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 50cea9e..ec8b38c 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -2953,14 +2953,15 @@ public class BlueprintConfigurationProcessor {
 
     Set<String> properties = 
Sets.newHashSet(ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY,
         ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY, 
ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY,
-        ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY);
+        ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY,
+        ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY);
 
     try {
       Map<String, Map<String, String>> defaultStackProperties = 
configHelper.getDefaultStackProperties(stackId);
       Map<String,String> clusterEnvDefaultProperties = 
defaultStackProperties.get(CLUSTER_ENV_CONFIG_TYPE_NAME);
 
       for( String property : properties ){
-        if (defaultStackProperties.containsKey(property)) {
+        if (clusterEnvDefaultProperties.containsKey(property)) {
           configuration.setProperty(CLUSTER_ENV_CONFIG_TYPE_NAME, property,
               clusterEnvDefaultProperties.get(property));
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 974ad4f..8270370 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -92,6 +92,7 @@ public class ConfigHelper {
   public static final String CLUSTER_ENV_STACK_FEATURES_PROPERTY = 
"stack_features";
   public static final String CLUSTER_ENV_STACK_TOOLS_PROPERTY = "stack_tools";
   public static final String CLUSTER_ENV_STACK_ROOT_PROPERTY = "stack_root";
+  public static final String CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY = 
"stack_select_packages";
 
   public static final String HTTP_ONLY = "HTTP_ONLY";
   public static final String HTTPS_ONLY = "HTTPS_ONLY";

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
index dad0ecf..55f9665 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
@@ -63,6 +63,7 @@ public class FinalUpgradeCatalog extends 
AbstractFinalUpgradeCatalog {
    * <ul>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_FEATURES_PROPERTY} 
from stack</li>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_TOOLS_PROPERTY} 
from stack</li>
+   * <li>Adds/Updates {@link 
ConfigHelper#CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY} from stack</li>
    * </ul>
    *
    * Note: Config properties stack_features and stack_tools should always be 
updated to latest values as defined
@@ -84,7 +85,8 @@ public class FinalUpgradeCatalog extends 
AbstractFinalUpgradeCatalog {
       List<PropertyInfo> properties = stackInfo.getProperties();
       for(PropertyInfo property : properties) {
         
if(property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY) 
||
-            
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY)) {
+            
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY) || 
+            
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY))
 {
           propertyMap.put(property.getName(), property.getValue());
         }
       }

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
index 67ca525..856446c 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
@@ -30,9 +30,6 @@ from accumulo_configuration import setup_conf_dir
 
 
 class AccumuloClient(Script):
-  def get_component_name(self):
-    return "accumulo-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -60,7 +57,7 @@ class AccumuloClient(Script):
 
     Logger.info("Executing Accumulo Client Upgrade pre-restart")
     conf_select.select(params.stack_name, "accumulo", params.version)
-    stack_select.select("accumulo-client", params.version)
+    stack_select.select_packages(params.version)
 
 if __name__ == "__main__":
   AccumuloClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
index ebd418d..a0519b2 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
@@ -37,33 +37,10 @@ from accumulo_service import accumulo_service
 
 class AccumuloScript(Script):
 
-  # a mapping between the component named used by these scripts and the name
-  # which is used by <stack-selector-tool>
-  COMPONENT_TO_STACK_SELECT_MAPPING = {
-    "gc" : "accumulo-gc",
-    "master" : "accumulo-master",
-    "monitor" : "accumulo-monitor",
-    "tserver" : "accumulo-tablet",
-    "tracer" : "accumulo-tracer"
-  }
-
   def __init__(self, component):
     self.component = component
 
 
-  def get_component_name(self):
-    """
-    Gets the <stack-selector-tool> component name given the script component
-    :return:  the name of the component on the stack which is used by
-              <stack-selector-tool>
-    """
-    if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
-      return None
-
-    stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
-    return stack_component
-
-
   def install(self, env):
     self.install_packages(env)
 
@@ -107,19 +84,12 @@ class AccumuloScript(Script):
     if not (params.stack_version_formatted and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.stack_version_formatted)):
       return
 
-    if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING:
-      Logger.info("Unable to execute an upgrade for unknown component 
{0}".format(self.component))
-      raise Fail("Unable to execute an upgrade for unknown component 
{0}".format(self.component))
-
-    stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component]
+    stack_component = stack_select.get_package_name()
 
     Logger.info("Executing Accumulo Upgrade pre-restart for 
{0}".format(stack_component))
     conf_select.select(params.stack_name, "accumulo", params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)
 
-    # some accumulo components depend on the client, so update that too
-    stack_select.select("accumulo-client", params.version)
-      
   def get_log_folder(self):
     import params
     return params.log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
index d01ff84..6e4a168 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
@@ -30,16 +30,13 @@ from metadata import metadata
 
 class AtlasClient(Script):
 
-  def get_component_name(self):
-    return "atlas-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, 
params.version_for_stack_feature_checks):
       conf_select.select(params.stack_name, "atlas", params.version)
-      stack_select.select("atlas-client", params.version)
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
index 38f9a41..ff7bf5f 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
@@ -38,10 +38,6 @@ from setup_ranger_atlas import setup_ranger_atlas
 from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class MetadataServer(Script):
-
-  def get_component_name(self):
-    return "atlas-server"
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -63,7 +59,7 @@ class MetadataServer(Script):
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
       conf_select.select(params.stack_name, "atlas", params.version)
-      stack_select.select("atlas-server", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
index 7c6bf39..20623f7 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
@@ -35,10 +35,6 @@ class DruidBase(Script):
   def __init__(self, nodeType=None):
     self.nodeType = nodeType
 
-  def get_component_name(self):
-    node_type_lower = self.nodeType.lower()
-    return format("druid-{node_type_lower}")
-
   def install(self, env):
     self.install_packages(env)
 
@@ -55,7 +51,7 @@ class DruidBase(Script):
     env.set_params(params)
 
     if params.stack_version and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
-      stack_select.select(self.get_component_name(), params.stack_version)
+      stack_select.select_packages(params.stack_version)
     if params.stack_version and 
check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
       conf_select.select(params.stack_name, "druid", params.stack_version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
index b837b24..36dab51 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
@@ -36,9 +36,6 @@ from resource_management.libraries.resources.properties_file 
import PropertiesFi
 
 class Superset(Script):
 
-  def get_component_name(self):
-    return format("druid-superset")
-
   def install(self, env):
     self.install_packages(env)
 
@@ -98,7 +95,7 @@ class Superset(Script):
     env.set_params(params)
 
     if params.stack_version and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
-      stack_select.select(self.get_component_name(), params.stack_version)
+      stack_select.select_packages(params.version)
     if params.stack_version and 
check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
       conf_select.select(params.stack_name, "superset", params.stack_version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
index 365f661..85d80ba 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
@@ -38,9 +38,6 @@ class FalconClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FalconClientLinux(FalconClient):
-  def get_component_name(self):
-    return "falcon-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -57,7 +54,7 @@ class FalconClientLinux(FalconClient):
 
     Logger.info("Executing Falcon Client Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "falcon", params.version)
-    stack_select.select("falcon-client", params.version)
+    stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class FalconClientWindows(FalconClient):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
index 5b2db44..c7ba556 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
@@ -60,9 +60,6 @@ class FalconServer(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FalconServerLinux(FalconServer):
-  def get_component_name(self):
-    return "falcon-server"
-
   def install(self, env):
     import params
     self.install_packages(env)
@@ -85,7 +82,7 @@ class FalconServerLinux(FalconServer):
 
     Logger.info("Executing Falcon Server Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "falcon", params.version)
-    stack_select.select("falcon-server", params.version)
+    stack_select.select_packages(params.version)
 
     falcon_server_upgrade.pre_start_restore()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
index 107ce6d..f57fae7 100644
--- 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
+++ 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
@@ -40,9 +40,6 @@ class FlumeHandler(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class FlumeHandlerLinux(FlumeHandler):
-  def get_component_name(self):
-    return "flume-server"
-
   def install(self, env):
     import params
     self.install_packages(env)
@@ -90,7 +87,7 @@ class FlumeHandlerLinux(FlumeHandler):
 
     Logger.info("Executing Flume Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "flume", params.version)
-    stack_select.select("flume-server", params.version)
+    stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
index f18a96a..4820d24 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
@@ -52,20 +52,16 @@ class HbaseClientWindows(HbaseClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseClientDefault(HbaseClient):
-  def get_component_name(self):
-    return "hbase-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version): 
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("hbase-client", params.version)
 
       # phoenix may not always be deployed
       try:
-        stack_select.select("phoenix-client", params.version)
+        stack_select.select_packages(params.version)
       except Exception as e:
         print "Ignoring error due to missing phoenix-client"
         print str(e)
@@ -75,7 +71,6 @@ class HbaseClientDefault(HbaseClient):
       # of the final "CLIENTS" group and we need to ensure that hadoop-client
       # is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
index 8151572..5932c65 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_master.py
@@ -76,13 +76,10 @@ class HbaseMasterWindows(HbaseMaster):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseMasterDefault(HbaseMaster):
-  def get_component_name(self):
-    return "hbase-master"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-master")
+    upgrade.prestart(env)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
index 9194991..10dde75 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_regionserver.py
@@ -70,13 +70,10 @@ class HbaseRegionServerWindows(HbaseRegionServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HbaseRegionServerDefault(HbaseRegionServer):
-  def get_component_name(self):
-    return "hbase-regionserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
-    upgrade.prestart(env, "hbase-regionserver")
+    upgrade.prestart(env)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
index b1bdb78..f2b44bd 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
@@ -34,10 +34,6 @@ class PhoenixQueryServer(Script):
     self.install_packages(env)
 
 
-  def get_component_name(self):
-    return "phoenix-server"
-
-
   def configure(self, env):
     import params
     env.set_params(params)
@@ -64,7 +60,7 @@ class PhoenixQueryServer(Script):
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
       # phoenix uses hbase configs
       conf_select.select(params.stack_name, "hbase", params.version)
-      stack_select.select("phoenix-server", params.version)
+      stack_select.select_packages(params.version)
 
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
index 43c7ff4..8ea3616 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
@@ -33,12 +33,12 @@ from resource_management.libraries.functions.format import 
format
 from resource_management.libraries.functions import check_process_status
 
 
-def prestart(env, stack_component):
+def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
     conf_select.select(params.stack_name, "hbase", params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)
 
 def post_regionserver(env):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
index c7b813f..ba926cb 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
@@ -38,14 +38,11 @@ from utils import get_dfsadmin_base_command
 
 class DataNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-datanode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
+    component_name = stack_select.get_package_name()
     return get_hdfs_binary(component_name)
 
 
@@ -133,7 +130,7 @@ class DataNodeDefault(DataNode):
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-datanode", params.version)
+      stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing DataNode Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
index 87a6f52..f320eee 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
@@ -57,15 +57,12 @@ class HdfsClient(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HdfsClientDefault(HdfsClient):
 
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
 class HdfsClientWindows(HdfsClient):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
index 0805ff4..402e7ad 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
@@ -42,9 +42,6 @@ class JournalNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class JournalNodeDefault(JournalNode):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-journalnode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -52,7 +49,7 @@ class JournalNodeDefault(JournalNode):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-journalnode", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
index 7f7e30c..aac7977 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
@@ -67,14 +67,11 @@ except ImportError:
 
 class NameNode(Script):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-namenode"
-
   def get_hdfs_binary(self):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
+    component_name = stack_select.get_package_name()
     return get_hdfs_binary(component_name)
 
   def install(self, env):
@@ -204,7 +201,7 @@ class NameNodeDefault(NameNode):
     if upgrade_type != constants.UPGRADE_TYPE_NON_ROLLING or 
params.upgrade_direction != Direction.DOWNGRADE:
       conf_select.select(params.stack_name, "hadoop", params.version)
 
-    stack_select.select("hadoop-hdfs-namenode", params.version)
+    stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade post-restart")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
index 03a497a..dea9d43 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
@@ -31,10 +31,6 @@ from resource_management.libraries.functions.stack_features 
import check_stack_f
 
 
 class NFSGateway(Script):
-
-  def get_component_name(self):
-    return "hadoop-hdfs-nfs3"
-
   def install(self, env):
     import params
 
@@ -48,7 +44,7 @@ class NFSGateway(Script):
 
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.NFS, params.stack_version_formatted):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-nfs3", params.version)
+      stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
index ac45ffd..115a830 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
@@ -64,9 +64,6 @@ class SNameNode(Script):
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SNameNodeDefault(SNameNode):
 
-  def get_component_name(self):
-    return "hadoop-hdfs-secondarynamenode"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
@@ -74,7 +71,7 @@ class SNameNodeDefault(SNameNode):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-secondarynamenode", params.version)
+      stack_select.select_packages(params.version)
       
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
index db68544..fabbabb 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
@@ -35,13 +35,6 @@ from resource_management.libraries.script import Script
 from resource_management.core.resources.zkmigrator import ZkMigrator
 
 class ZkfcSlave(Script):
-  def get_component_name(self):
-    import params
-    if params.version_for_stack_feature_checks and 
check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, 
params.version_for_stack_feature_checks):
-      # params.version is not defined when installing cluster from blueprint
-      return "hadoop-hdfs-zkfc"
-    pass
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -137,7 +130,7 @@ class ZkfcSlaveDefault(ZkfcSlave):
     env.set_params(params)
     if check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, 
params.version_for_stack_feature_checks):
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-hdfs-zkfc", params.version)
+      stack_select.select_packages(params.version)
 
 def initialize_ha_zookeeper(params):
   try:

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
index b37698e..677479f 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py
@@ -51,12 +51,6 @@ class HCatClientWindows(HCatClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HCatClientDefault(HCatClient):
-  def get_component_name(self):
-    # HCat client doesn't have a first-class entry in <stack-selector-tool>. 
Since clients always
-    # update after daemons, this ensures that the hcat directories are correct 
on hosts
-    # which do not include the WebHCat daemon
-    return "hive-webhcat"
-
 
   def pre_upgrade_restart(self, env, upgrade_type=None):
     """
@@ -78,7 +72,7 @@ class HCatClientDefault(HCatClient):
     # HCat client doesn't have a first-class entry in <stack-selector-tool>. 
Since clients always
     # update after daemons, this ensures that the hcat directories are correct 
on hosts
     # which do not include the WebHCat daemon
-    stack_select.select("hive-webhcat", params.version)
+    stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
index 3d9bfd7..1694816 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
@@ -50,9 +50,6 @@ class HiveClientWindows(HiveClient):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveClientDefault(HiveClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Hive client Stack Upgrade pre-restart")
 
@@ -61,7 +58,7 @@ class HiveClientDefault(HiveClient):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hive", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
+      stack_select.select_packages(params.version)
 
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
index 633e82e..c84f47e 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -87,10 +87,6 @@ class HiveMetastoreWindows(HiveMetastore):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveMetastoreDefault(HiveMetastore):
-  def get_component_name(self):
-    return "hive-metastore"
-
-
   def status(self, env):
     import status_params
     from resource_management.libraries.functions import check_process_status
@@ -111,7 +107,7 @@ class HiveMetastoreDefault(HiveMetastore):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-metastore", params.version)
+      stack_select.select_packages(params.version)
 
     if is_upgrade and params.stack_version_formatted_major and \
             check_stack_feature(StackFeature.HIVE_METASTORE_UPGRADE_SCHEMA, 
params.stack_version_formatted_major):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 8f7d068..a1212e0 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -72,9 +72,6 @@ class HiveServerWindows(HiveServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerDefault(HiveServer):
-  def get_component_name(self):
-    return "hive-server2"
-
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -118,7 +115,7 @@ class HiveServerDefault(HiveServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "hive", params.version)
-      stack_select.select("hive-server2", params.version)
+      stack_select.select_packages(params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS
       resource_created = copy_to_hdfs(

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index c9582a3..16d7907 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -67,10 +67,6 @@ class HiveServerInteractive(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class HiveServerInteractiveDefault(HiveServerInteractive):
-
-    def get_component_name(self):
-      return "hive-server2-hive2"
-
     def install(self, env):
       import params
       self.install_packages(env)
@@ -86,7 +82,7 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
       env.set_params(params)
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-        stack_select.select("hive-server2-hive2", params.version)
+        stack_select.select_packages(params.version)
         conf_select.select(params.stack_name, "hive2", params.version)
 
         # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index da5e82b..db86d9e 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -64,9 +64,6 @@ class WebHCatServerWindows(WebHCatServer):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class WebHCatServerDefault(WebHCatServer):
-  def get_component_name(self):
-    return "hive-webhcat"
-
   def status(self, env):
     import status_params
     env.set_params(status_params)
@@ -81,7 +78,7 @@ class WebHCatServerDefault(WebHCatServer):
       # webhcat has no conf, but uses hadoop home, so verify that regular 
hadoop conf is set
       conf_select.select(params.stack_name, "hive-hcatalog", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hive-webhcat", params.version)
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
index 4512038..08c3dd1 100644
--- 
a/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
+++ 
b/ambari-server/src/main/resources/common-services/KAFKA/0.8.1/package/scripts/kafka_broker.py
@@ -36,9 +36,6 @@ from setup_ranger_kafka import setup_ranger_kafka
 
 class KafkaBroker(Script):
 
-  def get_component_name(self):
-    return "kafka-broker"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -52,7 +49,7 @@ class KafkaBroker(Script):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      stack_select.select("kafka-broker", params.version)
+      stack_select.select_packages(params.version)
 
     if params.version and check_stack_feature(StackFeature.CONFIG_VERSIONING, 
params.version):
       conf_select.select(params.stack_name, "kafka", params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
index c91b394..6df68cc 100644
--- 
a/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
+++ 
b/ambari-server/src/main/resources/common-services/KNOX/0.5.0.2.2/package/scripts/knox_gateway.py
@@ -49,9 +49,6 @@ from setup_ranger_knox import setup_ranger_knox
 
 
 class KnoxGateway(Script):
-  def get_component_name(self):
-    return "knox-server"
-
   def install(self, env):
     import params
     env.set_params(params)
@@ -122,7 +119,7 @@ class KnoxGatewayDefault(KnoxGateway):
 
     # <conf-selector-tool> will change the symlink to the conf folder.
     conf_select.select(params.stack_name, "knox", params.version)
-    stack_select.select("knox-server", params.version)
+    stack_select.select_packages(params.version)
 
     # seed the new Knox data directory with the keystores of yesteryear
     if params.upgrade_direction == Direction.UPGRADE:

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
index 3486add..b598d17 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/mahout_client.py
@@ -29,18 +29,13 @@ from resource_management.libraries.functions.default import 
default
 
 class MahoutClient(Script):
 
-  def get_component_name(self):
-    return "mahout-client"
-
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     Logger.info("Executing Stack Upgrade pre-restart")
     import params
     env.set_params(params)
 
     conf_select.select(params.stack_name, "mahout", params.version)
-    stack_select.select("mahout-client", params.version )
-
+    stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
index 9bbca9f..ac8b078 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_client.py
@@ -30,9 +30,6 @@ from resource_management.core.exceptions import 
ClientComponentHasNoStatus
 
 class OozieClient(Script):
 
-  def get_component_name(self):
-    return "oozie-client"
-
   def install(self, env):
     self.install_packages(env)
     self.configure(env)
@@ -59,7 +56,7 @@ class OozieClient(Script):
 
     Logger.info("Executing Oozie Client Stack Upgrade pre-restart")
     conf_select.select(params.stack_name, "oozie", params.version)
-    stack_select.select("oozie-client", params.version)
+    stack_select.select_packages(params.version)
 
   # We substitute some configs (oozie.authentication.kerberos.principal) 
before generation (see oozie.py and params.py).
   # This function returns changed configs (it's used for config generation 
before config download)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
index b3a8643..82a764d 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server.py
@@ -46,9 +46,6 @@ from resource_management.core.resources.zkmigrator import 
ZkMigrator
 
 class OozieServer(Script):
 
-  def get_component_name(self):
-    return "oozie-server"
-
   def install(self, env):
     self.install_packages(env)
 
@@ -67,9 +64,8 @@ class OozieServer(Script):
         # This is required as both need to be pointing to new installed oozie 
version.
 
         # Sets the symlink : eg: <stack-root>/current/oozie-client -> 
<stack-root>/a.b.c.d-<version>/oozie
-        stack_select.select("oozie-client", params.version)
         # Sets the symlink : eg: <stack-root>/current/oozie-server -> 
<stack-root>/a.b.c.d-<version>/oozie
-        stack_select.select("oozie-server", params.version)
+        stack_select.select_packages(params.version)
 
       if params.version and 
check_stack_feature(StackFeature.CONFIG_VERSIONING, params.version):
         conf_select.select(params.stack_name, "oozie", params.version)
@@ -127,7 +123,7 @@ class OozieServerDefault(OozieServer):
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "oozie", params.version)
-      stack_select.select("oozie-server", params.version)
+      stack_select.select_packages(params.version)
 
     OozieUpgrade.prepare_libext_directory(upgrade_type=upgrade_type)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
index 3233381..693af5c 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
@@ -43,9 +43,6 @@ class PigClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class PigClientLinux(PigClient):
-  def get_component_name(self):
-    return "hadoop-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
     env.set_params(params)
@@ -53,7 +50,7 @@ class PigClientLinux(PigClient):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version): 
       conf_select.select(params.stack_name, "pig", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version) # includes 
pig-client
+      stack_select.select_packages(params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
 
b/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
index 3dbce5c..f973e69 100755
--- 
a/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
+++ 
b/ambari-server/src/main/resources/common-services/R4ML/0.8.0/package/scripts/r4ml_client.py
@@ -34,9 +34,6 @@ class R4MLClient(Script):
     import params
     env.set_params(params)
 
-  def get_component_name(self):
-    return "r4ml-client"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -44,7 +41,7 @@ class R4MLClient(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       Logger.info("Executing R4ML Client Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "r4ml", params.version)
-      stack_select.select("r4ml-client", params.version)
+      stack_select.select_packages(params.version)
 
   def stack_upgrade_save_new_config(self, env):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
index ecbacbf..e27a03e 100644
--- 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_admin.py
@@ -38,9 +38,6 @@ import os, errno
 
 class RangerAdmin(Script):
 
-  def get_component_name(self):
-    return "ranger-admin"
-
   def install(self, env):
     self.install_packages(env)
     import params
@@ -210,7 +207,7 @@ class RangerAdmin(Script):
     stack_name = upgrade_stack[0]
     stack_version = upgrade_stack[1]
 
-    stack_select.select("ranger-admin", stack_version)
+    stack_select.select_packages(params.version)
     conf_select.select(stack_name, "ranger-admin", stack_version)
 
   def get_log_folder(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
index a474e76..5ff498f 100644
--- 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_tagsync.py
@@ -86,10 +86,7 @@ class RangerTagsync(Script):
     if params.stack_supports_ranger_tagsync:
       Logger.info("Executing Ranger Tagsync Stack Upgrade pre-restart")
       conf_select.select(params.stack_name, "ranger-tagsync", params.version)
-      stack_select.select("ranger-tagsync", params.version)
-
-  def get_component_name(self):
-    return "ranger-tagsync"
+      stack_select.select_packages(params.version)
 
   def get_log_folder(self):
     import params
@@ -111,7 +108,7 @@ class RangerTagsync(Script):
     stack_name = upgrade_stack[0]
     stack_version = upgrade_stack[1]
 
-    stack_select.select("ranger-tagsync", stack_version)
+    stack_select.select_packages(params.version)
     conf_select.select(stack_name, "ranger-tagsync", stack_version)
     if params.stack_supports_ranger_tagsync_ssl_xml_support:
       Logger.info("Upgrading Tagsync, stack support Atlas user for Tagsync, 
creating keystore for same.")

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
index b9366f6..8654bc2 100644
--- 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/ranger_usersync.py
@@ -109,9 +109,6 @@ class RangerUsersync(Script):
     env.set_params(params)
     upgrade.prestart(env, "ranger-usersync")
 
-  def get_component_name(self):
-    return "ranger-usersync"
-
   def get_log_folder(self):
     import params
     return params.usersync_log_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
index a07a1fd..ca1b2bf 100644
--- 
a/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER/0.4.0/package/scripts/upgrade.py
@@ -28,4 +28,4 @@ def prestart(env, stack_component):
 
   if params.version and params.stack_supports_rolling_upgrade:
     conf_select.select(params.stack_name, stack_component, params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
 
b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
index 829a998..0bd11f3 100755
--- 
a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/kms_server.py
@@ -33,9 +33,6 @@ import upgrade
 
 class KmsServer(Script):
 
-  def get_component_name(self):
-    return "ranger-kms"
-
   def install(self, env):
     self.install_packages(env)
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
index 8478bb8..73c32ff 100644
--- 
a/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/RANGER_KMS/0.5.0.2.3/package/scripts/upgrade.py
@@ -27,4 +27,4 @@ def prestart(env, stack_component):
 
   if params.version and params.stack_supports_config_versioning:
     conf_select.select(params.stack_name, stack_component, params.version)
-    stack_select.select(stack_component, params.version)
+    stack_select.select_packages(params.version)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
index 08c8569..adc7026 100644
--- 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
+++ 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/slider_client.py
@@ -35,22 +35,18 @@ class SliderClient(Script):
 
 @OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
 class SliderClientLinux(SliderClient):
-  def get_component_name(self):
-    return "slider-client"
-
   def pre_upgrade_restart(self, env,  upgrade_type=None):
     import params
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       conf_select.select(params.stack_name, "slider", params.version)
-      stack_select.select("slider-client", params.version)
+      stack_select.select_packages(params.version)
 
       # also set all of the hadoop clients since slider client is upgraded as
       # part of the final "CLIENTS" group and we need to ensure that
       # hadoop-client is also set
       conf_select.select(params.stack_name, "hadoop", params.version)
-      stack_select.select("hadoop-client", params.version)
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/56d2ade2/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
index 4fe2f54..c36def4 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.1/package/scripts/job_history_server.py
@@ -68,9 +68,6 @@ class JobHistoryServer(Script):
     check_process_status(status_params.spark_history_server_pid_file)
     
 
-  def get_component_name(self):
-    return "spark-historyserver"
-
   def pre_upgrade_restart(self, env, upgrade_type=None):
     import params
 
@@ -78,7 +75,7 @@ class JobHistoryServer(Script):
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
       Logger.info("Executing Spark Job History Server Stack Upgrade 
pre-restart")
       conf_select.select(params.stack_name, "spark", params.version)
-      stack_select.select("spark-historyserver", params.version)
+      stack_select.select_packages(params.version)
 
       # Spark 1.3.1.2.3, and higher, which was included in HDP 2.3, does not 
have a dependency on Tez, so it does not
       # need to copy the tarball, otherwise, copy it.

Reply via email to