AMBARI-21581 - Replace Hard Coded conf-select Structures (jonathanhurley)

Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/97ccf3bf
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/97ccf3bf
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/97ccf3bf

Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 97ccf3bff96f8717c185776c7c000496df90daa0
Parents: 0a9f6fa
Author: Jonathan Hurley <jhur...@hortonworks.com>
Authored: Tue Aug 1 10:02:37 2017 -0400
Committer: Jonathan Hurley <jhur...@hortonworks.com>
Committed: Wed Aug 2 08:45:31 2017 -0400

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          |  213 +---
 .../libraries/functions/stack_select.py         |    8 +-
 .../BlueprintConfigurationProcessor.java        |    2 +-
 .../ambari/server/state/ConfigHelper.java       |    2 +-
 .../server/upgrade/FinalUpgradeCatalog.java     |    4 +-
 .../package/scripts/accumulo_client.py          |    2 -
 .../package/scripts/accumulo_script.py          |    2 -
 .../0.1.0.2.3/package/scripts/atlas_client.py   |    3 +-
 .../package/scripts/metadata_server.py          |    3 +-
 .../0.7.0.3.0/package/scripts/atlas_client.py   |    3 +-
 .../package/scripts/metadata_server.py          |    3 +-
 .../DRUID/0.9.2/package/scripts/druid_node.py   |    3 -
 .../DRUID/0.9.2/package/scripts/superset.py     |    3 -
 .../0.5.0.2.1/package/scripts/falcon_client.py  |    3 +-
 .../0.5.0.2.1/package/scripts/falcon_server.py  |    2 -
 .../1.4.0.2.0/package/scripts/flume_handler.py  |    3 +-
 .../0.96.0.2.0/package/scripts/hbase_client.py  |   11 +-
 .../package/scripts/phoenix_queryserver.py      |    5 +-
 .../HBASE/0.96.0.2.0/package/scripts/upgrade.py |    3 +-
 .../2.0.0.3.0/package/scripts/hbase_client.py   |    9 +-
 .../package/scripts/phoenix_queryserver.py      |    2 -
 .../HBASE/2.0.0.3.0/package/scripts/upgrade.py  |    3 +-
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |    6 +-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |    3 +-
 .../2.1.0.2.0/package/scripts/journalnode.py    |    3 +-
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |    9 +-
 .../2.1.0.2.0/package/scripts/nfsgateway.py     |    2 -
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |    3 +-
 .../2.1.0.2.0/package/scripts/zkfc_slave.py     |    3 +-
 .../HDFS/3.0.0.3.0/package/scripts/datanode.py  |    6 +-
 .../3.0.0.3.0/package/scripts/hdfs_client.py    |    3 +-
 .../3.0.0.3.0/package/scripts/journalnode.py    |    3 +-
 .../HDFS/3.0.0.3.0/package/scripts/namenode.py  |    6 +-
 .../3.0.0.3.0/package/scripts/nfsgateway.py     |    2 -
 .../HDFS/3.0.0.3.0/package/scripts/snamenode.py |    3 +-
 .../3.0.0.3.0/package/scripts/zkfc_slave.py     |    3 +-
 .../0.12.0.2.0/package/scripts/hive_client.py   |    4 +-
 .../package/scripts/hive_metastore.py           |    2 -
 .../0.12.0.2.0/package/scripts/hive_server.py   |    2 -
 .../package/scripts/hive_server_interactive.py  |    2 -
 .../package/scripts/webhcat_server.py           |    5 +-
 .../2.1.0.3.0/package/scripts/hive_client.py    |    4 +-
 .../2.1.0.3.0/package/scripts/hive_metastore.py |    2 -
 .../2.1.0.3.0/package/scripts/hive_server.py    |    2 -
 .../package/scripts/hive_server_interactive.py  |    2 -
 .../2.1.0.3.0/package/scripts/webhcat_server.py |    4 +-
 .../0.10.0.3.0/package/scripts/kafka_broker.py  |    4 -
 .../KAFKA/0.8.1/package/scripts/kafka_broker.py |    6 +-
 .../0.5.0.2.2/package/scripts/knox_gateway.py   |    4 +-
 .../0.5.0.3.0/package/scripts/knox_gateway.py   |    4 +-
 .../1.0.0.2.3/package/scripts/mahout_client.py  |    2 -
 .../4.0.0.2.0/package/scripts/oozie_client.py   |    3 +-
 .../4.0.0.2.0/package/scripts/oozie_server.py   |    5 -
 .../4.2.0.3.0/package/scripts/oozie_client.py   |    3 +-
 .../4.2.0.3.0/package/scripts/oozie_server.py   |    5 -
 .../0.12.0.2.0/package/scripts/pig_client.py    |    4 +-
 .../0.16.1.3.0/package/scripts/pig_client.py    |    4 +-
 .../0.4.0/package/scripts/ranger_admin.py       |    8 +-
 .../0.4.0/package/scripts/ranger_tagsync.py     |    8 +-
 .../0.4.0/package/scripts/ranger_usersync.py    |    4 +-
 .../RANGER/0.4.0/package/scripts/upgrade.py     |   31 -
 .../1.0.0.3.0/package/scripts/ranger_admin.py   |    4 +-
 .../1.0.0.3.0/package/scripts/ranger_tagsync.py |    7 +-
 .../package/scripts/ranger_usersync.py          |    2 +-
 .../RANGER/1.0.0.3.0/package/scripts/upgrade.py |    4 +-
 .../0.5.0.2.3/package/scripts/kms_server.py     |    3 +-
 .../0.5.0.2.3/package/scripts/upgrade.py        |   30 -
 .../1.0.0.3.0/package/scripts/kms_server.py     |    2 +-
 .../1.0.0.3.0/package/scripts/upgrade.py        |    3 +-
 .../0.60.0.2.2/package/scripts/slider_client.py |    8 +-
 .../0.91.0.3.0/package/scripts/slider_client.py |    8 +-
 .../1.2.1/package/scripts/job_history_server.py |    3 +-
 .../SPARK/1.2.1/package/scripts/livy_server.py  |    3 +-
 .../SPARK/1.2.1/package/scripts/spark_client.py |    4 +-
 .../package/scripts/spark_thrift_server.py      |    3 +-
 .../2.2.0/package/scripts/job_history_server.py |    3 +-
 .../SPARK/2.2.0/package/scripts/livy_server.py  |    3 +-
 .../SPARK/2.2.0/package/scripts/spark_client.py |    3 +-
 .../package/scripts/spark_thrift_server.py      |    3 +-
 .../2.0.0/package/scripts/job_history_server.py |    3 +-
 .../2.0.0/package/scripts/livy2_server.py       |    3 +-
 .../2.0.0/package/scripts/spark_client.py       |    3 +-
 .../package/scripts/spark_thrift_server.py      |    3 +-
 .../1.4.4.2.0/package/scripts/sqoop_client.py   |    4 +-
 .../1.4.4.3.0/package/scripts/sqoop_client.py   |    4 +-
 .../STORM/0.9.1/package/scripts/drpc_server.py  |    2 -
 .../STORM/0.9.1/package/scripts/nimbus.py       |    2 -
 .../STORM/0.9.1/package/scripts/nimbus_prod.py  |    2 -
 .../STORM/0.9.1/package/scripts/pacemaker.py    |    2 -
 .../STORM/0.9.1/package/scripts/supervisor.py   |    2 -
 .../0.9.1/package/scripts/supervisor_prod.py    |    2 -
 .../STORM/0.9.1/package/scripts/ui_server.py    |    2 -
 .../1.0.1.3.0/package/scripts/drpc_server.py    |    2 -
 .../STORM/1.0.1.3.0/package/scripts/nimbus.py   |    2 -
 .../1.0.1.3.0/package/scripts/nimbus_prod.py    |    2 -
 .../1.0.1.3.0/package/scripts/pacemaker.py      |    2 -
 .../1.0.1.3.0/package/scripts/supervisor.py     |    2 -
 .../package/scripts/supervisor_prod.py          |    2 -
 .../1.0.1.3.0/package/scripts/ui_server.py      |    2 -
 .../TEZ/0.4.0.2.1/package/scripts/tez_client.py |    6 -
 .../TEZ/0.9.0.3.0/package/scripts/tez_client.py |    4 -
 .../scripts/application_timeline_server.py      |    3 +-
 .../2.1.0.2.0/package/scripts/historyserver.py  |    3 +-
 .../package/scripts/mapreduce2_client.py        |    6 +-
 .../2.1.0.2.0/package/scripts/nodemanager.py    |    3 +-
 .../package/scripts/resourcemanager.py          |    2 -
 .../2.1.0.2.0/package/scripts/yarn_client.py    |    3 +-
 .../scripts/application_timeline_server.py      |    3 +-
 .../3.0.0.3.0/package/scripts/historyserver.py  |    3 +-
 .../package/scripts/mapreduce2_client.py        |    4 +-
 .../3.0.0.3.0/package/scripts/nodemanager.py    |    3 +-
 .../package/scripts/resourcemanager.py          |    2 -
 .../3.0.0.3.0/package/scripts/yarn_client.py    |    3 +-
 .../0.6.0.2.5/package/scripts/master.py         |    2 -
 .../0.6.0.3.0/package/scripts/master.py         |    2 -
 .../3.4.5/package/scripts/zookeeper_client.py   |    3 +-
 .../3.4.5/package/scripts/zookeeper_server.py   |    2 -
 .../custom_actions/scripts/install_packages.py  |    4 -
 .../custom_actions/scripts/ru_set_all.py        |    1 -
 .../HDP/2.0.6/configuration/cluster-env.xml     |    6 +-
 .../scripts/shared_initialization.py            |   10 +-
 .../HDP/2.0.6/properties/stack_packages.json    | 1146 ++++++++++++++++++
 .../2.0.6/properties/stack_select_packages.json |  952 ---------------
 .../HDP/3.0/properties/stack_packages.json      | 1042 ++++++++++++++++
 .../3.0/properties/stack_select_packages.json   |  848 -------------
 .../python/custom_actions/test_ru_set_all.py    |    2 +
 .../stacks/2.0.6/HBASE/test_hbase_client.py     |   17 -
 .../stacks/2.0.6/HBASE/test_hbase_master.py     |   11 -
 .../2.0.6/HBASE/test_hbase_regionserver.py      |   10 -
 .../2.0.6/HBASE/test_phoenix_queryserver.py     |    5 -
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   11 -
 .../stacks/2.0.6/HDFS/test_hdfs_client.py       |   10 -
 .../stacks/2.0.6/HDFS/test_journalnode.py       |    9 -
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |    9 -
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |    4 +-
 .../stacks/2.0.6/HIVE/test_hive_client.py       |   17 -
 .../stacks/2.0.6/HIVE/test_hive_server.py       |   11 -
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    |   15 -
 .../stacks/2.0.6/OOZIE/test_oozie_client.py     |   11 -
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |   31 +-
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |   16 -
 .../python/stacks/2.0.6/SQOOP/test_sqoop.py     |   11 -
 .../stacks/2.0.6/YARN/test_historyserver.py     |    3 -
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   10 -
 .../stacks/2.0.6/YARN/test_nodemanager.py       |   10 -
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |   10 -
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   10 -
 .../2.0.6/ZOOKEEPER/test_zookeeper_client.py    |   10 -
 .../2.0.6/ZOOKEEPER/test_zookeeper_server.py    |   12 -
 .../hooks/after-INSTALL/test_after_install.py   |   13 +-
 .../stacks/2.1/FALCON/test_falcon_client.py     |   10 -
 .../stacks/2.1/FALCON/test_falcon_server.py     |   10 -
 .../stacks/2.1/HIVE/test_hive_metastore.py      |   16 -
 .../stacks/2.1/STORM/test_storm_drpc_server.py  |   10 -
 .../stacks/2.1/STORM/test_storm_nimbus.py       |   11 -
 .../stacks/2.1/STORM/test_storm_nimbus_prod.py  |   11 -
 .../stacks/2.1/STORM/test_storm_supervisor.py   |   10 -
 .../2.1/STORM/test_storm_supervisor_prod.py     |   11 -
 .../stacks/2.1/STORM/test_storm_ui_server.py    |   10 -
 .../python/stacks/2.1/TEZ/test_tez_client.py    |   15 -
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   10 -
 .../stacks/2.2/ACCUMULO/test_accumulo_client.py |   11 -
 .../stacks/2.2/KAFKA/test_kafka_broker.py       |   11 -
 .../python/stacks/2.2/KNOX/test_knox_gateway.py |   26 -
 .../stacks/2.2/RANGER/test_ranger_admin.py      |   10 -
 .../stacks/2.2/RANGER/test_ranger_usersync.py   |    9 -
 .../stacks/2.2/SLIDER/test_slider_client.py     |   17 -
 .../stacks/2.2/SPARK/test_job_history_server.py |   10 -
 .../stacks/2.2/SPARK/test_spark_client.py       |    9 -
 .../stacks/2.2/common/test_conf_select.py       |   30 +-
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |   14 +-
 .../2.3/SPARK/test_spark_thrift_server.py       |   10 -
 .../src/test/python/stacks/utils/RMFTestCase.py |   12 +-
 173 files changed, 2346 insertions(+), 2906 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 4f11633..ffcaad5 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -21,9 +21,9 @@ limitations under the License.
 __all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir", 
"get_package_dirs"]
 
 # Python Imports
-import copy
 import os
 import subprocess
+import ambari_simplejson as json
 
 # Local Imports
 import stack_select
@@ -41,191 +41,6 @@ from resource_management.core.shell import as_sudo
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions import StackFeature
 
-STACK_ROOT_PATTERN = "{{ stack_root }}"
-
-_PACKAGE_DIRS = {
-  "atlas": [
-    {
-      "conf_dir": "/etc/atlas/conf",
-      "current_dir": "{0}/current/atlas-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "accumulo": [
-    {
-      "conf_dir": "/etc/accumulo/conf",
-      "current_dir": 
"{0}/current/accumulo-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "falcon": [
-    {
-      "conf_dir": "/etc/falcon/conf",
-      "current_dir": 
"{0}/current/falcon-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hadoop": [
-    {
-      "conf_dir": "/etc/hadoop/conf",
-      "current_dir": 
"{0}/current/hadoop-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hbase": [
-    {
-      "conf_dir": "/etc/hbase/conf",
-      "current_dir": "{0}/current/hbase-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hive": [
-    {
-      "conf_dir": "/etc/hive/conf",
-      "current_dir": "{0}/current/hive-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hive2": [
-    {
-      "conf_dir": "/etc/hive2/conf",
-      "current_dir": 
"{0}/current/hive-server2-hive2/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "kafka": [
-    {
-      "conf_dir": "/etc/kafka/conf",
-      "current_dir": "{0}/current/kafka-broker/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "knox": [
-    {
-      "conf_dir": "/etc/knox/conf",
-      "current_dir": "{0}/current/knox-server/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "mahout": [
-    {
-      "conf_dir": "/etc/mahout/conf",
-      "current_dir": 
"{0}/current/mahout-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "nifi": [
-    {
-      "conf_dir": "/etc/nifi/conf",
-      "current_dir": "{0}/current/nifi/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "oozie": [
-    {
-      "conf_dir": "/etc/oozie/conf",
-      "current_dir": "{0}/current/oozie-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "phoenix": [
-    {
-      "conf_dir": "/etc/phoenix/conf",
-      "current_dir": 
"{0}/current/phoenix-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "ranger-admin": [
-    {
-      "conf_dir": "/etc/ranger/admin/conf",
-      "current_dir": "{0}/current/ranger-admin/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "ranger-tagsync": [
-    {
-      "conf_dir": "/etc/ranger/tagsync/conf",
-      "current_dir": 
"{0}/current/ranger-tagsync/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "ranger-kms": [
-    {
-      "conf_dir": "/etc/ranger/kms/conf",
-      "current_dir": "{0}/current/ranger-kms/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "ranger-usersync": [
-    {
-      "conf_dir": "/etc/ranger/usersync/conf",
-      "current_dir": 
"{0}/current/ranger-usersync/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "slider": [
-    {
-      "conf_dir": "/etc/slider/conf",
-      "current_dir": 
"{0}/current/slider-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "spark": [
-    {
-      "conf_dir": "/etc/spark/conf",
-      "current_dir": "{0}/current/spark-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "zeppelin": [
-    {
-      "conf_dir": "/etc/zeppelin/conf",
-      "current_dir": 
"{0}/current/zeppelin-server/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "spark2": [
-    {
-      "conf_dir": "/etc/spark2/conf",
-      "current_dir": 
"{0}/current/spark2-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "sqoop": [
-    {
-      "conf_dir": "/etc/sqoop/conf",
-      "current_dir": "{0}/current/sqoop-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "storm": [
-    {
-      "conf_dir": "/etc/storm/conf",
-      "current_dir": "{0}/current/storm-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "tez": [
-    {
-      "conf_dir": "/etc/tez/conf",
-      "current_dir": "{0}/current/tez-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "zookeeper": [
-    {
-      "conf_dir": "/etc/zookeeper/conf",
-      "current_dir": 
"{0}/current/zookeeper-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "pig": [
-    {
-      "conf_dir": "/etc/pig/conf",
-      "current_dir": "{0}/current/pig-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "flume": [
-    {
-      "conf_dir": "/etc/flume/conf",
-      "current_dir": "{0}/current/flume-server/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "storm-slider-client": [
-    {
-      "conf_dir": "/etc/storm-slider-client/conf",
-      "current_dir": 
"{0}/current/storm-slider-client/conf".format(STACK_ROOT_PATTERN)
-    }
-  ],
-  "hive-hcatalog": [
-    {
-      "conf_dir": "/etc/hive-webhcat/conf",
-      "prefix": "/etc/hive-webhcat",
-      "current_dir": 
"{0}/current/hive-webhcat/etc/webhcat".format(STACK_ROOT_PATTERN)
-    },
-    {
-      "conf_dir": "/etc/hive-hcatalog/conf",
-      "prefix": "/etc/hive-hcatalog",
-      "current_dir": 
"{0}/current/hive-webhcat/etc/hcatalog".format(STACK_ROOT_PATTERN)
-    }
-  ]
-}
-
 DIRECTORY_TYPE_BACKUP = "backup"
 DIRECTORY_TYPE_CURRENT = "current"
 
@@ -241,13 +56,35 @@ def get_package_dirs():
   Get package dir mappings
   :return:
   """
+  stack_name = default("/hostLevelParams/stack_name", None)
+  if stack_name is None:
+    raise Fail("The stack name is not present in the command. Packages for 
conf-select tool cannot be loaded.")
+
+  stack_packages_config = 
default("/configurations/cluster-env/stack_packages", None)
+  if stack_packages_config is None:
+    raise Fail("The stack packages are not defined on the command. Unable to 
load packages for the conf-select tool")
+
+  data = json.loads(stack_packages_config)
+
+  if stack_name not in data:
+    raise Fail(
+      "Cannot find conf-select packages for the {0} stack".format(stack_name))
+
+  conf_select_key = "conf-select"
+  data = data[stack_name]
+  if conf_select_key not in data:
+    raise Fail(
+      "There are no conf-select packages defined for this command for the {0} 
stack".format(stack_name))
+
+  package_dirs = data[conf_select_key]
+
   stack_root = Script.get_stack_root()
-  package_dirs = copy.deepcopy(_PACKAGE_DIRS)
   for package_name, directories in package_dirs.iteritems():
     for dir in directories:
       current_dir = dir['current_dir']
-      current_dir = current_dir.replace(STACK_ROOT_PATTERN, stack_root)
+      current_dir =  current_dir.format(stack_root)
       dir['current_dir'] = current_dir
+
   return package_dirs
 
 def create(stack_name, package, version, dry_run = False):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
index 723871b..53c8e9f 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py
@@ -121,8 +121,6 @@ def get_packages(scope, service_name = None, component_name 
= None):
   """
   from resource_management.libraries.functions.default import default
 
-  import time
-
   if scope not in _PACKAGE_SCOPES:
     raise Fail("The specified scope of {0} is not valid".format(scope))
 
@@ -140,11 +138,11 @@ def get_packages(scope, service_name = None, 
component_name = None):
   if stack_name is None:
     raise Fail("The stack name is not present in the command. Packages for 
stack-select tool cannot be loaded.")
 
-  stack_select_packages_config = 
default("/configurations/cluster-env/stack_select_packages", None)
-  if stack_select_packages_config is None:
+  stack_packages_config = 
default("/configurations/cluster-env/stack_packages", None)
+  if stack_packages_config is None:
     raise Fail("The stack packages are not defined on the command. Unable to 
load packages for the stack-select tool")
 
-  data = json.loads(stack_select_packages_config)
+  data = json.loads(stack_packages_config)
 
   if stack_name not in data:
     raise Fail(

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
index 34102b6..91a84ea 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/BlueprintConfigurationProcessor.java
@@ -2948,7 +2948,7 @@ public class BlueprintConfigurationProcessor {
     Set<String> properties = 
Sets.newHashSet(ConfigHelper.CLUSTER_ENV_STACK_NAME_PROPERTY,
         ConfigHelper.CLUSTER_ENV_STACK_ROOT_PROPERTY, 
ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY,
         ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY,
-        ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY);
+        ConfigHelper.CLUSTER_ENV_STACK_PACKAGES_PROPERTY);
 
     try {
       Map<String, Map<String, String>> defaultStackProperties = 
configHelper.getDefaultStackProperties(stackId);

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 5393f81..5ac4c8f 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -92,7 +92,7 @@ public class ConfigHelper {
   public static final String CLUSTER_ENV_STACK_FEATURES_PROPERTY = 
"stack_features";
   public static final String CLUSTER_ENV_STACK_TOOLS_PROPERTY = "stack_tools";
   public static final String CLUSTER_ENV_STACK_ROOT_PROPERTY = "stack_root";
-  public static final String CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY = 
"stack_select_packages";
+  public static final String CLUSTER_ENV_STACK_PACKAGES_PROPERTY = 
"stack_packages";
 
   public static final String HTTP_ONLY = "HTTP_ONLY";
   public static final String HTTPS_ONLY = "HTTPS_ONLY";

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
index 216d39d..052edba 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/FinalUpgradeCatalog.java
@@ -77,7 +77,7 @@ public class FinalUpgradeCatalog extends 
AbstractUpgradeCatalog {
    * <ul>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_FEATURES_PROPERTY} 
from stack</li>
    * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_TOOLS_PROPERTY} 
from stack</li>
-   * <li>Adds/Updates {@link 
ConfigHelper#CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY} from stack</li>
+   * <li>Adds/Updates {@link ConfigHelper#CLUSTER_ENV_STACK_PACKAGES_PROPERTY} 
from stack</li>
    * </ul>
    *
    * Note: Config properties stack_features and stack_tools should always be 
updated to latest values as defined
@@ -108,7 +108,7 @@ public class FinalUpgradeCatalog extends 
AbstractUpgradeCatalog {
         for(PropertyInfo property : properties) {
           
if(property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_FEATURES_PROPERTY) 
||
               
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_TOOLS_PROPERTY) ||
-              
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_SELECT_PACKAGES_PROPERTY))
 {
+              
property.getName().equals(ConfigHelper.CLUSTER_ENV_STACK_PACKAGES_PROPERTY)) {
             propertyMap.put(property.getName(), property.getValue());
           }
         }

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
index 856446c..ae5181f 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 from resource_management.core.logger import Logger
 from resource_management.core.exceptions import ClientComponentHasNoStatus
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -56,7 +55,6 @@ class AccumuloClient(Script):
       return
 
     Logger.info("Executing Accumulo Client Upgrade pre-restart")
-    conf_select.select(params.stack_name, "accumulo", params.version)
     stack_select.select_packages(params.version)
 
 if __name__ == "__main__":

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
index d884bcd..6aafb05 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py
@@ -21,7 +21,6 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import check_process_status
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.security_commons import 
build_expectations
 from resource_management.libraries.functions.security_commons import 
cached_kinit_executor
@@ -87,7 +86,6 @@ class AccumuloScript(Script):
     stack_component = stack_select.get_package_name()
 
     Logger.info("Executing Accumulo Upgrade pre-restart for 
{0}".format(stack_component))
-    conf_select.select(params.stack_name, "accumulo", params.version)
     stack_select.select_packages(params.version)
 
   def get_log_folder(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
index 4a8210d..e234164 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -35,7 +35,6 @@ class AtlasClient(Script):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, 
params.version_for_stack_feature_checks):
-      conf_select.select(params.stack_name, "atlas", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
index 948fe8c..038b723 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py
@@ -22,7 +22,7 @@ import os
 # Local Imports
 from metadata import metadata
 from resource_management import Fail
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute, File
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.version import 
format_stack_version
@@ -60,7 +60,6 @@ class MetadataServer(Script):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
-      conf_select.select(params.stack_name, "atlas", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
index 3f9a5bc..ea1d547 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/atlas_client.py
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.core.exceptions import ClientComponentHasNoStatus
@@ -35,7 +35,6 @@ class AtlasClient(Script):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
-      conf_select.select(params.stack_name, "atlas", params.version)
       stack_select.select_packages(params.version)
 
   def install(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
index daaa871..cc19858 100644
--- 
a/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
+++ 
b/ambari-server/src/main/resources/common-services/ATLAS/0.7.0.3.0/package/scripts/metadata_server.py
@@ -22,7 +22,7 @@ import os
 # Local Imports
 from metadata import metadata
 from resource_management import Fail
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.core.resources.system import Execute, File
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.version import 
format_stack_version
@@ -61,7 +61,6 @@ class MetadataServer(Script):
     env.set_params(params)
 
     if check_stack_feature(StackFeature.ATLAS_UPGRADE_SUPPORT, params.version):
-      conf_select.select(params.stack_name, "atlas", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
index 20623f7..8053dcb 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/druid_node.py
@@ -22,7 +22,6 @@ from resource_management import Script
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.format import format
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -52,8 +51,6 @@ class DruidBase(Script):
 
     if params.stack_version and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
       stack_select.select_packages(params.stack_version)
-    if params.stack_version and 
check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
-      conf_select.select(params.stack_name, "druid", params.stack_version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
index 36dab51..a5dd4fb 100644
--- 
a/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
+++ 
b/ambari-server/src/main/resources/common-services/DRUID/0.9.2/package/scripts/superset.py
@@ -26,7 +26,6 @@ from resource_management.core.resources.system import Execute
 from resource_management.core.source import InlineTemplate
 from resource_management.core.source import Template
 from resource_management.libraries.functions import StackFeature
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions.format import format
@@ -96,8 +95,6 @@ class Superset(Script):
 
     if params.stack_version and 
check_stack_feature(StackFeature.ROLLING_UPGRADE, params.stack_version):
       stack_select.select_packages(params.version)
-    if params.stack_version and 
check_stack_feature(StackFeature.CONFIG_VERSIONING, params.stack_version):
-      conf_select.select(params.stack_name, "superset", params.stack_version)
 
   def start(self, env, upgrade_type=None):
     import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
index f75f34f..540027d 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from falcon import falcon
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -53,7 +53,6 @@ class FalconClientLinux(FalconClient):
       return
 
     Logger.info("Executing Falcon Client Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "falcon", params.version)
     stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
index d547a1a..055d6cb 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py
@@ -21,7 +21,6 @@ import falcon_server_upgrade
 
 from resource_management.core.logger import Logger
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import check_process_status
 from resource_management.libraries.functions.security_commons import 
build_expectations
@@ -81,7 +80,6 @@ class FalconServerLinux(FalconServer):
       return
 
     Logger.info("Executing Falcon Server Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "falcon", params.version)
     stack_select.select_packages(params.version)
 
     falcon_server_upgrade.pre_start_restore()

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
index a21ecf5..4340618 100644
--- 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
+++ 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
@@ -21,7 +21,7 @@ from flume import flume
 from flume import get_desired_state
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.flume_agent_helper import 
find_expected_agent_names, get_flume_status, get_flume_pid_files
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.logger import Logger
@@ -86,7 +86,6 @@ class FlumeHandlerLinux(FlumeHandler):
       return
 
     Logger.info("Executing Flume Stack Upgrade pre-restart")
-    conf_select.select(params.stack_name, "flume", params.version)
     stack_select.select_packages(params.version)
 
   def get_log_folder(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
index 3027bff..650931f 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from hbase import hbase
@@ -56,8 +56,6 @@ class HbaseClientDefault(HbaseClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version): 
-      conf_select.select(params.stack_name, "hbase", params.version)
-
       # phoenix may not always be deployed
       try:
         stack_select.select_packages(params.version)
@@ -65,12 +63,5 @@ class HbaseClientDefault(HbaseClient):
         print "Ignoring error due to missing phoenix-client"
         print str(e)
 
-
-      # set all of the hadoop clients since hbase client is upgraded as part
-      # of the final "CLIENTS" group and we need to ensure that hadoop-client
-      # is also set
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
-
 if __name__ == "__main__":
   HbaseClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
index e6dff39..dfc35fb 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py
@@ -17,7 +17,6 @@ limitations under the License.
 
 """
 
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -57,9 +56,7 @@ class PhoenixQueryServer(Script):
     import params
     env.set_params(params)
 
-    if params.stack_version_formatted and 
check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
-      # phoenix uses hbase configs
-      conf_select.select(params.stack_name, "hbase", params.version)
+    if params.stack_version_formatted and 
check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
index a502c1d..b5e2262 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py
@@ -25,7 +25,7 @@ from resource_management.core import shell
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.decorator import retry
@@ -37,7 +37,6 @@ def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-    conf_select.select(params.stack_name, "hbase", params.version)
     stack_select.select_packages(params.version)
 
 def post_regionserver(env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
index 3027bff..b301c75 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/hbase_client.py
@@ -20,7 +20,7 @@ limitations under the License.
 
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from hbase import hbase
@@ -56,8 +56,6 @@ class HbaseClientDefault(HbaseClient):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version): 
-      conf_select.select(params.stack_name, "hbase", params.version)
-
       # phoenix may not always be deployed
       try:
         stack_select.select_packages(params.version)
@@ -66,11 +64,6 @@ class HbaseClientDefault(HbaseClient):
         print str(e)
 
 
-      # set all of the hadoop clients since hbase client is upgraded as part
-      # of the final "CLIENTS" group and we need to ensure that hadoop-client
-      # is also set
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
 
 if __name__ == "__main__":
   HbaseClient().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
index 872a5c1..cdd1495 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/phoenix_queryserver.py
@@ -17,7 +17,6 @@ limitations under the License.
 
 """
 
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -59,7 +58,6 @@ class PhoenixQueryServer(Script):
 
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.PHOENIX, params.stack_version_formatted):     
       # phoenix uses hbase configs
-      conf_select.select(params.stack_name, "hbase", params.version)
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
index a502c1d..b5e2262 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/2.0.0.3.0/package/scripts/upgrade.py
@@ -25,7 +25,7 @@ from resource_management.core import shell
 from resource_management.core.exceptions import ComponentIsNotRunning
 from resource_management.core.exceptions import Fail
 from resource_management.core.logger import Logger
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.decorator import retry
@@ -37,7 +37,6 @@ def prestart(env):
   import params
 
   if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-    conf_select.select(params.stack_name, "hbase", params.version)
     stack_select.select_packages(params.version)
 
 def post_regionserver(env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
index 257ccf9..0aa0bc0 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
@@ -23,7 +23,7 @@ from ambari_commons.constants import UPGRADE_TYPE_ROLLING
 from hdfs_datanode import datanode
 from resource_management import Script, Fail, shell, Logger
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions import format
@@ -43,8 +43,7 @@ class DataNode(Script):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = stack_select.get_package_name()
-    return get_hdfs_binary(component_name)
+    return get_hdfs_binary("hadoop-hdfs-datanode")
 
 
   def install(self, env):
@@ -130,7 +129,6 @@ class DataNodeDefault(DataNode):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
index 5633cba..0896f30 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -60,7 +60,6 @@ class HdfsClientDefault(HdfsClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
index bb2895e..75b2eeb 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py
@@ -19,7 +19,7 @@ limitations under the License.
 from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
@@ -49,7 +49,6 @@ class JournalNodeDefault(JournalNode):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
index 47b8021..50bf1e0 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
@@ -29,7 +29,6 @@ from ambari_commons import constants
 from resource_management.libraries.script.script import Script
 from resource_management.core.resources.system import Execute, File
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -71,8 +70,7 @@ class NameNode(Script):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = stack_select.get_package_name()
-    return get_hdfs_binary(component_name)
+    return get_hdfs_binary("hadoop-hdfs-namenode")
 
   def install(self, env):
     import params
@@ -196,11 +194,6 @@ class NameNodeDefault(NameNode):
     import params
     env.set_params(params)
 
-    # When downgrading an Express Upgrade, the first thing we do is to revert 
the symlinks.
-    # Therefore, we cannot call this code in that scenario.
-    if upgrade_type != constants.UPGRADE_TYPE_NON_ROLLING or 
params.upgrade_direction != Direction.DOWNGRADE:
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
     stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
index 66968b7..f16e260 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py
@@ -24,7 +24,6 @@ from resource_management.libraries.functions.security_commons 
import build_expec
   FILE_TYPE_XML
 from hdfs_nfsgateway import nfsgateway
 from hdfs import hdfs
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -43,7 +42,6 @@ class NFSGateway(Script):
     env.set_params(params)
 
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.NFS, params.stack_version_formatted):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
index 0494df0..4977e1c 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -69,7 +69,6 @@ class SNameNodeDefault(SNameNode):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
       
   def get_log_folder(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
index 628b01a..955ff60 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/zkfc_slave.py
@@ -27,7 +27,7 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Directory
 from resource_management.core.resources.service import Service
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -133,7 +133,6 @@ class ZkfcSlaveDefault(ZkfcSlave):
     import params
     env.set_params(params)
     if check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, 
params.version_for_stack_feature_checks):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 def initialize_ha_zookeeper(params):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
index 9f72aa0..d8fb361 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
@@ -19,7 +19,7 @@ limitations under the License.
 import datanode_upgrade
 from hdfs_datanode import datanode
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -36,8 +36,7 @@ class DataNode(Script):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
-    return get_hdfs_binary(component_name)
+    return get_hdfs_binary("hadoop-hdfs-datanode")
 
 
   def install(self, env):
@@ -84,7 +83,6 @@ class DataNodeDefault(DataNode):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
index 5633cba..0896f30 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -60,7 +60,6 @@ class HdfsClientDefault(HdfsClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 @OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
index bb2895e..75b2eeb 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/journalnode.py
@@ -19,7 +19,7 @@ limitations under the License.
 from ambari_commons.constants import UPGRADE_TYPE_NON_ROLLING
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
@@ -49,7 +49,6 @@ class JournalNodeDefault(JournalNode):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
index a904de8..7a0e784 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
@@ -29,7 +29,6 @@ from ambari_commons import constants
 from resource_management.libraries.script.script import Script
 from resource_management.core.resources.system import Execute, File
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -71,8 +70,7 @@ class NameNode(Script):
     """
     Get the name or path to the hdfs binary depending on the component name.
     """
-    component_name = self.get_component_name()
-    return get_hdfs_binary(component_name)
+    return get_hdfs_binary("hadoop-hdfs-namenode")
 
   def install(self, env):
     import params
@@ -195,8 +193,6 @@ class NameNodeDefault(NameNode):
     # When downgrading an Express Upgrade, the first thing we do is to revert 
the symlinks.
     # Therefore, we cannot call this code in that scenario.
     if upgrade_type != constants.UPGRADE_TYPE_NON_ROLLING or 
params.upgrade_direction != Direction.DOWNGRADE:
-      conf_select.select(params.stack_name, "hadoop", params.version)
-
       stack_select.select_packages(params.version)
 
   def post_upgrade_restart(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
index ba38526..a3f9d35 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/nfsgateway.py
@@ -24,7 +24,6 @@ from resource_management.libraries.functions.security_commons 
import build_expec
   FILE_TYPE_XML
 from hdfs_nfsgateway import nfsgateway
 from hdfs import hdfs
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -44,7 +43,6 @@ class NFSGateway(Script):
     env.set_params(params)
 
     if params.stack_version_formatted and 
check_stack_feature(StackFeature.NFS, params.stack_version_formatted):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def start(self, env, upgrade_type=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
index 5a4cc5a..f5ff3e1 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
@@ -18,7 +18,7 @@ limitations under the License.
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -69,7 +69,6 @@ class SNameNodeDefault(SNameNode):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
index 6ea9b52..3ff6a3d 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/zkfc_slave.py
@@ -27,7 +27,7 @@ from resource_management.core.exceptions import Fail
 from resource_management.core.resources.system import Directory
 from resource_management.core.resources.service import Service
 from resource_management.core import shell
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions.security_commons import 
build_expectations
@@ -140,7 +140,6 @@ class ZkfcSlaveDefault(ZkfcSlave):
     env.set_params(params)
     if params.version and 
check_stack_feature(StackFeature.ZKFC_VERSION_ADVERTISED, params.version) \
         and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version):
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 def initialize_ha_zookeeper(params):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
index e6c9aab..2cdfc31 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py
@@ -19,7 +19,7 @@ limitations under the License.
 """
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.core.logger import Logger
@@ -56,8 +56,6 @@ class HiveClientDefault(HiveClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
index 43f0c86..9b5cf43 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py
@@ -22,7 +22,6 @@ import os
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, Directory
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -105,7 +104,6 @@ class HiveMetastoreDefault(HiveMetastore):
     is_upgrade = params.upgrade_direction == Direction.UPGRADE
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
     if is_upgrade and params.stack_version_formatted_major and \

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 6c76af8..080d62b 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import 
check_process_status
@@ -113,7 +112,6 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
index df2a295..e8e9666 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py
@@ -36,7 +36,6 @@ from resource_management.core.resources.system import 
Execute, Directory
 # Imports needed for Rolling/Express Upgrade
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 
@@ -83,7 +82,6 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
         stack_select.select_packages(params.version)
-        conf_select.select(params.stack_name, "hive2", params.version)
 
         # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS
         resource_created = copy_to_hdfs(

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index 9bd5c6e..efec613 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -19,7 +19,7 @@ Ambari Agent
 
 """
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
@@ -76,9 +76,6 @@ class WebHCatServerDefault(WebHCatServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version): 
-      # webhcat has no conf, but uses hadoop home, so verify that regular 
hadoop conf is set
-      conf_select.select(params.stack_name, "hive-hcatalog", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
   def get_log_folder(self):

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_client.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_client.py
index e6c9aab..2cdfc31 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_client.py
@@ -19,7 +19,7 @@ limitations under the License.
 """
 import sys
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select, stack_select
+from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.core.logger import Logger
@@ -56,8 +56,6 @@ class HiveClientDefault(HiveClient):
     import params
     env.set_params(params)
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
-      conf_select.select(params.stack_name, "hadoop", params.version)
       stack_select.select_packages(params.version)
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
index 43f0c86..9b5cf43 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_metastore.py
@@ -22,7 +22,6 @@ import os
 from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Execute, Directory
 from resource_management.libraries.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.constants import Direction
 from resource_management.libraries.functions.format import format
@@ -105,7 +104,6 @@ class HiveMetastoreDefault(HiveMetastore):
     is_upgrade = params.upgrade_direction == Direction.UPGRADE
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
     if is_upgrade and params.stack_version_formatted_major and \

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server.py
index 6c76af8..080d62b 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 from resource_management.libraries.functions.check_process_status import 
check_process_status
@@ -113,7 +112,6 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
 
     if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
-      conf_select.select(params.stack_name, "hive", params.version)
       stack_select.select_packages(params.version)
 
       # Copy mapreduce.tar.gz and tez.tar.gz to HDFS

http://git-wip-us.apache.org/repos/asf/ambari/blob/97ccf3bf/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_interactive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_interactive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_interactive.py
index 3b6fd36..1d2899a 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_interactive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/2.1.0.3.0/package/scripts/hive_server_interactive.py
@@ -36,7 +36,6 @@ from resource_management.core.resources.system import 
Execute, Directory
 # Imports needed for Rolling/Express Upgrade
 from resource_management.libraries.functions import StackFeature
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
-from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions.copy_tarball import copy_to_hdfs
 
@@ -84,7 +83,6 @@ class HiveServerInteractiveDefault(HiveServerInteractive):
 
       if params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, 
params.version):
         stack_select.select_packages(params.version)
-        conf_select.select(params.stack_name, "hive2", params.version)
 
         # Copy hive.tar.gz and tez.tar.gz used by Hive Interactive to HDFS
         resource_created = copy_to_hdfs(

Reply via email to