http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py new file mode 100644 index 0000000..c94d956 --- /dev/null +++ b/ambari-common/src/main/python/resource_management/libraries/functions/stack_select.py @@ -0,0 +1,307 @@ +#!/usr/bin/env python +""" +Licensed to the Apache Software Foundation (ASF) under one +or more contributor license agreements. See the NOTICE file +distributed with this work for additional information +regarding copyright ownership. The ASF licenses this file +to you under the Apache License, Version 2.0 (the +"License"); you may not use this file except in compliance +with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +""" + +import os +import sys +import re +from resource_management.core.logger import Logger +from resource_management.core.exceptions import Fail +from resource_management.core.resources.system import Execute +from resource_management.libraries.functions.default import default +from resource_management.libraries.functions.get_stack_version import get_stack_version +from resource_management.libraries.functions.format import format +from resource_management.libraries.script.script import Script +from resource_management.core.shell import call +from resource_management.libraries.functions.version import format_stack_version +from resource_management.libraries.functions.version_select_util import get_versions_from_stack_root + +STACK_SELECT = '/usr/bin/hdp-select' +STACK_SELECT_PREFIX = ('ambari-python-wrap', STACK_SELECT) + +# hdp-select set oozie-server 2.2.0.0-1234 +TEMPLATE = STACK_SELECT_PREFIX + ('set',) + +# a mapping of Ambari server role to hdp-select component name for all +# non-clients +SERVER_ROLE_DIRECTORY_MAP = { + 'ACCUMULO_MASTER' : 'accumulo-master', + 'ACCUMULO_MONITOR' : 'accumulo-monitor', + 'ACCUMULO_GC' : 'accumulo-gc', + 'ACCUMULO_TRACER' : 'accumulo-tracer', + 'ACCUMULO_TSERVER' : 'accumulo-tablet', + 'ATLAS_SERVER' : 'atlas-server', + 'FLUME_HANDLER' : 'flume-server', + 'FALCON_SERVER' : 'falcon-server', + 'NAMENODE' : 'hadoop-hdfs-namenode', + 'DATANODE' : 'hadoop-hdfs-datanode', + 'SECONDARY_NAMENODE' : 'hadoop-hdfs-secondarynamenode', + 'NFS_GATEWAY' : 'hadoop-hdfs-nfs3', + 'JOURNALNODE' : 'hadoop-hdfs-journalnode', + 'HBASE_MASTER' : 'hbase-master', + 'HBASE_REGIONSERVER' : 'hbase-regionserver', + 'HIVE_METASTORE' : 'hive-metastore', + 'HIVE_SERVER' : 'hive-server2', + 'WEBHCAT_SERVER' : 'hive-webhcat', + 'KAFKA_BROKER' : 'kafka-broker', + 'KNOX_GATEWAY' : 'knox-server', + 'OOZIE_SERVER' : 'oozie-server', + 'RANGER_ADMIN' : 'ranger-admin', + 'RANGER_USERSYNC' : 'ranger-usersync', + 'SPARK_JOBHISTORYSERVER' : 'spark-historyserver', + 'SPARK_THRIFTSERVER' : 'spark-thriftserver', + 'NIMBUS' : 'storm-nimbus', + 'SUPERVISOR' : 'storm-supervisor', + 'HISTORYSERVER' : 'hadoop-mapreduce-historyserver', + 'APP_TIMELINE_SERVER' : 'hadoop-yarn-timelineserver', + 'NODEMANAGER' : 'hadoop-yarn-nodemanager', + 'RESOURCEMANAGER' : 'hadoop-yarn-resourcemanager', + 'ZOOKEEPER_SERVER' : 'zookeeper-server', + + # ZKFC is tied to NN since it doesn't have its own componnet in hdp-select and there is + # a requirement that the ZKFC is installed on each NN + 'ZKFC' : 'hadoop-hdfs-namenode' +} + +# mapping of service check to hdp-select component +SERVICE_CHECK_DIRECTORY_MAP = { + "HDFS_SERVICE_CHECK" : "hadoop-client", + "TEZ_SERVICE_CHECK" : "hadoop-client", + "PIG_SERVICE_CHECK" : "hadoop-client", + "HIVE_SERVICE_CHECK" : "hadoop-client", + "OOZIE_SERVICE_CHECK" : "hadoop-client", + "MAHOUT_SERVICE_CHECK" : "mahout-client" +} + +# /usr/hdp/current/hadoop-client/[bin|sbin|libexec|lib] +# /usr/hdp/2.3.0.0-1234/hadoop/[bin|sbin|libexec|lib] +HADOOP_DIR_TEMPLATE = "/usr/hdp/{0}/{1}/{2}" + +# /usr/hdp/current/hadoop-client +# /usr/hdp/2.3.0.0-1234/hadoop +HADOOP_HOME_DIR_TEMPLATE = "/usr/hdp/{0}/{1}" + +HADOOP_DIR_DEFAULTS = { + "home": "/usr/lib/hadoop", + "libexec": "/usr/lib/hadoop/libexec", + "sbin": "/usr/lib/hadoop/sbin", + "bin": "/usr/bin", + "lib": "/usr/lib/hadoop/lib" +} + +def select_all(version_to_select): + """ + Executes hdp-select on every component for the specified version. If the value passed in is a + stack version such as "2.3", then this will find the latest installed version which + could be "2.3.0.0-9999". If a version is specified instead, such as 2.3.0.0-1234, it will use + that exact version. + :param version_to_select: the version to hdp-select on, such as "2.3" or "2.3.0.0-1234" + """ + # it's an error, but it shouldn't really stop anything from working + if version_to_select is None: + Logger.error("Unable to execute hdp-select after installing because there was no version specified") + return + + Logger.info("Executing hdp-select set all on {0}".format(version_to_select)) + + command = format('{sudo} /usr/bin/hdp-select set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^{version_to_select} | tail -1`') + only_if_command = format('ls -d /usr/hdp/{version_to_select}*') + Execute(command, only_if = only_if_command) + + +def select(component, version): + """ + Executes hdp-select on the specific component and version. Some global + variables that are imported via params/status_params/params_linux will need + to be recalcuated after the hdp-select. However, python does not re-import + existing modules. The only way to ensure that the configuration variables are + recalculated is to call reload(...) on each module that has global parameters. + After invoking hdp-select, this function will also reload params, status_params, + and params_linux. + :param component: the hdp-select component, such as oozie-server. If "all", then all components + will be updated. + :param version: the version to set the component to, such as 2.2.0.0-1234 + """ + command = TEMPLATE + (component, version) + Execute(command, sudo=True) + + # don't trust the ordering of modules: + # 1) status_params + # 2) params_linux + # 3) params + modules = sys.modules + param_modules = "status_params", "params_linux", "params" + for moduleName in param_modules: + if moduleName in modules: + module = modules.get(moduleName) + reload(module) + Logger.info("After {0}, reloaded module {1}".format(command, moduleName)) + + +def get_role_component_current_stack_version(): + """ + Gets the current HDP version of the component that this role command is for. + :return: the current HDP version of the specified component or None + """ + stack_select_component = None + role = default("/role", "") + role_command = default("/roleCommand", "") + + if role in SERVER_ROLE_DIRECTORY_MAP: + stack_select_component = SERVER_ROLE_DIRECTORY_MAP[role] + elif role_command == "SERVICE_CHECK" and role in SERVICE_CHECK_DIRECTORY_MAP: + stack_select_component = SERVICE_CHECK_DIRECTORY_MAP[role] + + if stack_select_component is None: + return None + + current_stack_version = get_stack_version(stack_select_component) + + if current_stack_version is None: + Logger.warning("Unable to determine hdp-select version for {0}".format( + stack_select_component)) + else: + Logger.info("{0} is currently at version {1}".format( + stack_select_component, current_stack_version)) + + return current_stack_version + + +def get_hadoop_dir(target, force_latest_on_upgrade=False): + """ + Return the hadoop shared directory in the following override order + 1. Use default for 2.1 and lower + 2. If 2.2 and higher, use /usr/hdp/current/hadoop-client/{target} + 3. If 2.2 and higher AND for an upgrade, use /usr/hdp/<version>/hadoop/{target}. + However, if the upgrade has not yet invoked hdp-select, return the current + version of the component. + :target: the target directory + :force_latest_on_upgrade: if True, then this will return the "current" directory + without the HDP version built into the path, such as /usr/hdp/current/hadoop-client + """ + + if not target in HADOOP_DIR_DEFAULTS: + raise Fail("Target {0} not defined".format(target)) + + hadoop_dir = HADOOP_DIR_DEFAULTS[target] + + if Script.is_stack_greater_or_equal("2.2"): + # home uses a different template + if target == "home": + hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format("current", "hadoop-client") + else: + hadoop_dir = HADOOP_DIR_TEMPLATE.format("current", "hadoop-client", target) + + # if we are not forcing "current" for HDP 2.2, then attempt to determine + # if the exact version needs to be returned in the directory + if not force_latest_on_upgrade: + stack_info = _get_upgrade_stack() + + if stack_info is not None: + stack_version = stack_info[1] + + # determine if hdp-select has been run and if not, then use the current + # hdp version until this component is upgraded + current_stack_version = get_role_component_current_stack_version() + if current_stack_version is not None and stack_version != current_stack_version: + stack_version = current_stack_version + + if target == "home": + # home uses a different template + hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_version, "hadoop") + else: + hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_version, "hadoop", target) + + return hadoop_dir + +def get_hadoop_dir_for_stack_version(target, stack_version): + """ + Return the hadoop shared directory for the provided stack version. This is necessary + when folder paths of downgrade-source stack-version are needed after hdp-select. + :target: the target directory + :stack_version: stack version to get hadoop dir for + """ + + if not target in HADOOP_DIR_DEFAULTS: + raise Fail("Target {0} not defined".format(target)) + + hadoop_dir = HADOOP_DIR_DEFAULTS[target] + + formatted_stack_version = format_stack_version(stack_version) + if Script.is_stack_greater_or_equal_to(formatted_stack_version, "2.2"): + # home uses a different template + if target == "home": + hadoop_dir = HADOOP_HOME_DIR_TEMPLATE.format(stack_version, "hadoop") + else: + hadoop_dir = HADOOP_DIR_TEMPLATE.format(stack_version, "hadoop", target) + + return hadoop_dir + + +def _get_upgrade_stack(): + """ + Gets the stack name and stack version if an upgrade is currently in progress. + :return: the stack name and stack version as a tuple, or None if an + upgrade is not in progress. + """ + from resource_management.libraries.functions.default import default + direction = default("/commandParams/upgrade_direction", None) + stack_name = default("/hostLevelParams/stack_name", None) + stack_version = default("/commandParams/version", None) + + if direction and stack_name and stack_version: + return (stack_name, stack_version) + + return None + + +def get_stack_versions(stack_root): + """ + Gets list of stack versions installed on the host. + Be default a call to hdp-select versions is made to get the list of installed stack versions. + As a fallback list of installed versions is collected from stack version directories in stack install root. + :param stack_root: Stack install root + :return: Returns list of installed stack versions. + """ + code, out = call(STACK_SELECT_PREFIX + ('versions',)) + versions = [] + if 0 == code: + for line in out.splitlines(): + versions.append(line.rstrip('\n')) + if not versions: + versions = get_versions_from_stack_root(stack_root) + return versions + +def get_stack_version_before_install(component_name): + """ + Works in the similar way to 'hdp-select status component', + but also works for not yet installed packages. + + Note: won't work if doing initial install. + """ + component_dir = HADOOP_HOME_DIR_TEMPLATE.format("current", component_name) + if os.path.islink(component_dir): + stack_version = os.path.basename(os.path.dirname(os.readlink(component_dir))) + match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', stack_version) + if match is None: + Logger.info('Failed to get extracted version with hdp-select in method get_stack_version_before_install') + return None # lazy fail + return stack_version + else: + return None
http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-common/src/main/python/resource_management/libraries/functions/version.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/version.py b/ambari-common/src/main/python/resource_management/libraries/functions/version.py index 6269989..2500430 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/version.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/version.py @@ -34,7 +34,7 @@ def _normalize(v, desired_segments=0): return [int(x) for x in v_list] -def format_hdp_stack_version(input): +def format_stack_version(input): """ :param input: Input string, e.g. "2.2" or "GlusterFS", or "2.0.6.GlusterFS", or "2.2.0.1-885" :return: Returns a well-formatted HDP stack version of the form #.#.#.# as a string. @@ -67,11 +67,11 @@ def compare_versions(version1, version2, format=False): Stack Version 2.0.6.0 vs 2.2.0.0 :param version1: First parameter for version :param version2: Second parameter for version - :param format: optionally format the versions via format_hdp_stack_version before comparing them + :param format: optionally format the versions via format_stack_version before comparing them :return: Returns -1 if version1 is before version2, 0 if they are equal, and 1 if version1 is after version2 """ - v1 = version1 if not format else format_hdp_stack_version(version1) - v2 = version2 if not format else format_hdp_stack_version(version2) + v1 = version1 if not format else format_stack_version(version1) + v2 = version2 if not format else format_stack_version(version2) max_segments = max(len(v1.split(".")), len(v2.split("."))) return cmp(_normalize(v1, desired_segments=max_segments), _normalize(v2, desired_segments=max_segments)) \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py b/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py index f1a484b..95c5cba 100644 --- a/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py +++ b/ambari-common/src/main/python/resource_management/libraries/functions/version_select_util.py @@ -47,23 +47,23 @@ def get_component_version(stack_name, component_name): if stack_name == "HDP": tmpfile = tempfile.NamedTemporaryFile() - get_hdp_comp_version_cmd = "" + get_stack_comp_version_cmd = "" try: # This is necessary because Ubuntu returns "stdin: is not a tty", see AMBARI-8088 with open(tmpfile.name, 'r') as file: - get_hdp_comp_version_cmd = '/usr/bin/hdp-select status %s > %s' % (component_name, tmpfile.name) - code, stdoutdata = shell.call(get_hdp_comp_version_cmd) + get_stack_comp_version_cmd = '/usr/bin/hdp-select status %s > %s' % (component_name, tmpfile.name) + code, stdoutdata = shell.call(get_stack_comp_version_cmd) out = file.read() if code != 0 or out is None: raise Exception("Code is nonzero or output is empty") - Logger.debug("Command: %s\nOutput: %s" % (get_hdp_comp_version_cmd, str(out))) + Logger.debug("Command: %s\nOutput: %s" % (get_stack_comp_version_cmd, str(out))) matches = re.findall(r"([\d\.]+\-\d+)", out) version = matches[0] if matches and len(matches) > 0 else None except Exception, e: Logger.error("Could not determine HDP version for component %s by calling '%s'. Return Code: %s, Output: %s." % - (component_name, get_hdp_comp_version_cmd, str(code), str(out))) + (component_name, get_stack_comp_version_cmd, str(code), str(out))) elif stack_name == "HDPWIN": pass elif stack_name == "GlusterFS": http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-common/src/main/python/resource_management/libraries/script/script.py ---------------------------------------------------------------------- diff --git a/ambari-common/src/main/python/resource_management/libraries/script/script.py b/ambari-common/src/main/python/resource_management/libraries/script/script.py index 5e76562..a8098a0 100644 --- a/ambari-common/src/main/python/resource_management/libraries/script/script.py +++ b/ambari-common/src/main/python/resource_management/libraries/script/script.py @@ -42,7 +42,7 @@ from resource_management.core.exceptions import Fail, ClientComponentHasNoStatus from resource_management.core.resources.packaging import Package from resource_management.libraries.functions.version_select_util import get_component_version from resource_management.libraries.functions.version import compare_versions -from resource_management.libraries.functions.version import format_hdp_stack_version +from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.constants import Direction from resource_management.libraries.functions import packages_analyzer from resource_management.libraries.script.config_dictionary import ConfigDictionary, UnknownConfiguration @@ -52,7 +52,7 @@ from contextlib import closing import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set. if OSCheck.is_windows_family(): - from resource_management.libraries.functions.install_hdp_msi import install_windows_msi + from resource_management.libraries.functions.install_windows_msi import install_windows_msi from resource_management.libraries.functions.reload_windows_env import reload_windows_env from resource_management.libraries.functions.zip_archive import archive_dir from resource_management.libraries.resources import Msi @@ -177,8 +177,8 @@ class Script(object): """ from resource_management.libraries.functions.default import default stack_version_unformatted = str(default("/hostLevelParams/stack_version", "")) - hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) - if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0: + stack_version_formatted = format_stack_version(stack_version_unformatted) + if stack_version_formatted != "" and compare_versions(stack_version_formatted, '2.2') >= 0: if command_name.lower() == "status": request_version = default("/commandParams/request_version", None) if request_version is not None: @@ -259,13 +259,13 @@ class Script(object): before the call. However takes a bit of time, so better to avoid. - :return: hdp version including the build number. e.g.: 2.3.4.0-1234. + :return: stack version including the build number. e.g.: 2.3.4.0-1234. """ # preferred way is to get the actual selected version of current component component_name = self.get_component_name() if not Script.stack_version_from_distro_select and component_name: - from resource_management.libraries.functions import hdp_select - Script.stack_version_from_distro_select = hdp_select.get_hdp_version_before_install(component_name) + from resource_management.libraries.functions import stack_select + Script.stack_version_from_distro_select = stack_select.get_stack_version_before_install(component_name) # if hdp-select has not yet been done (situations like first install), we can use hdp-select version itself. if not Script.stack_version_from_distro_select: @@ -329,7 +329,7 @@ class Script(object): return default("/hostLevelParams/stack_name", None) @staticmethod - def get_hdp_stack_version(): + def get_stack_version(): """ Gets the normalized version of the HDP stack in the form #.#.#.# if it is present on the configurations sent. @@ -348,7 +348,7 @@ class Script(object): if stack_version_unformatted is None or stack_version_unformatted == '': return None - return format_hdp_stack_version(stack_version_unformatted) + return format_stack_version(stack_version_unformatted) @staticmethod @@ -360,57 +360,57 @@ class Script(object): @staticmethod - def is_hdp_stack_greater(formatted_hdp_stack_version, compare_to_version): + def is_stack_greater(stack_version_formatted, compare_to_version): """ - Gets whether the provided formatted_hdp_stack_version (normalized) + Gets whether the provided stack_version_formatted (normalized) is greater than the specified stack version - :param formatted_hdp_stack_version: the version of stack to compare + :param stack_version_formatted: the version of stack to compare :param compare_to_version: the version of stack to compare to :return: True if the command's stack is greater than the specified version """ - if formatted_hdp_stack_version is None or formatted_hdp_stack_version == "": + if stack_version_formatted is None or stack_version_formatted == "": return False - return compare_versions(formatted_hdp_stack_version, compare_to_version) > 0 + return compare_versions(stack_version_formatted, compare_to_version) > 0 @staticmethod - def is_hdp_stack_greater_or_equal(compare_to_version): + def is_stack_greater_or_equal(compare_to_version): """ Gets whether the hostLevelParams/stack_version, after being normalized, is greater than or equal to the specified stack version :param compare_to_version: the version to compare to :return: True if the command's stack is greater than or equal the specified version """ - return Script.is_hdp_stack_greater_or_equal_to(Script.get_hdp_stack_version(), compare_to_version) + return Script.is_stack_greater_or_equal_to(Script.get_stack_version(), compare_to_version) @staticmethod - def is_hdp_stack_greater_or_equal_to(formatted_hdp_stack_version, compare_to_version): + def is_stack_greater_or_equal_to(stack_version_formatted, compare_to_version): """ - Gets whether the provided formatted_hdp_stack_version (normalized) + Gets whether the provided stack_version_formatted (normalized) is greater than or equal to the specified stack version - :param formatted_hdp_stack_version: the version of stack to compare + :param stack_version_formatted: the version of stack to compare :param compare_to_version: the version of stack to compare to :return: True if the command's stack is greater than or equal to the specified version """ - if formatted_hdp_stack_version is None or formatted_hdp_stack_version == "": + if stack_version_formatted is None or stack_version_formatted == "": return False - return compare_versions(formatted_hdp_stack_version, compare_to_version) >= 0 + return compare_versions(stack_version_formatted, compare_to_version) >= 0 @staticmethod - def is_hdp_stack_less_than(compare_to_version): + def is_stack_less_than(compare_to_version): """ Gets whether the hostLevelParams/stack_version, after being normalized, is less than the specified stack version :param compare_to_version: the version to compare to :return: True if the command's stack is less than the specified version """ - hdp_stack_version = Script.get_hdp_stack_version() + stack_version_formatted = Script.get_stack_version() - if hdp_stack_version is None: + if stack_version_formatted is None: return False - return compare_versions(hdp_stack_version, compare_to_version) < 0 + return compare_versions(stack_version_formatted, compare_to_version) < 0 def install(self, env): """ http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py index febee1c..fd975c5 100644 --- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py +++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_client.py @@ -21,7 +21,7 @@ limitations under the License. from resource_management.core.logger import Logger from resource_management.core.exceptions import ClientComponentHasNoStatus from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.script.script import Script from accumulo_configuration import setup_conf_dir @@ -54,12 +54,12 @@ class AccumuloClient(Script): # this function should not execute if the version can't be determined or # is not at least HDP 2.2.0.0 - if Script.is_hdp_stack_less_than("2.2"): + if Script.is_stack_less_than("2.2"): return Logger.info("Executing Accumulo Client Upgrade pre-restart") conf_select.select(params.stack_name, "accumulo", params.version) - hdp_select.select("accumulo-client", params.version) + stack_select.select("accumulo-client", params.version) if __name__ == "__main__": AccumuloClient().execute() http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py index eda333d..12ca388 100644 --- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py +++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_script.py @@ -22,7 +22,7 @@ from resource_management.core.logger import Logger from resource_management.libraries.functions import format from resource_management.libraries.functions import check_process_status from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions.security_commons import build_expectations from resource_management.libraries.functions.security_commons import cached_kinit_executor from resource_management.libraries.functions.security_commons import get_params_from_filesystem @@ -37,7 +37,7 @@ class AccumuloScript(Script): # a mapping between the component named used by these scripts and the name # which is used by hdp-select - COMPONENT_TO_HDP_SELECT_MAPPING = { + COMPONENT_TO_STACK_SELECT_MAPPING = { "gc" : "accumulo-gc", "master" : "accumulo-master", "monitor" : "accumulo-monitor", @@ -55,11 +55,11 @@ class AccumuloScript(Script): :return: the name of the component on the HDP stack which is used by hdp-select """ - if self.component not in self.COMPONENT_TO_HDP_SELECT_MAPPING: + if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING: return None - hdp_component = self.COMPONENT_TO_HDP_SELECT_MAPPING[self.component] - return {"HDP": hdp_component} + stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component] + return {"HDP": stack_component} def install(self, env): @@ -102,21 +102,21 @@ class AccumuloScript(Script): # this function should not execute if the version can't be determined or # is not at least HDP 2.2.0.0 - if Script.is_hdp_stack_less_than("2.2"): + if Script.is_stack_less_than("2.2"): return - if self.component not in self.COMPONENT_TO_HDP_SELECT_MAPPING: + if self.component not in self.COMPONENT_TO_STACK_SELECT_MAPPING: Logger.info("Unable to execute an upgrade for unknown component {0}".format(self.component)) raise Fail("Unable to execute an upgrade for unknown component {0}".format(self.component)) - hdp_component = self.COMPONENT_TO_HDP_SELECT_MAPPING[self.component] + stack_component = self.COMPONENT_TO_STACK_SELECT_MAPPING[self.component] - Logger.info("Executing Accumulo Upgrade pre-restart for {0}".format(hdp_component)) + Logger.info("Executing Accumulo Upgrade pre-restart for {0}".format(stack_component)) conf_select.select(params.stack_name, "accumulo", params.version) - hdp_select.select(hdp_component, params.version) + stack_select.select(stack_component, params.version) # some accumulo components depend on the client, so update that too - hdp_select.select("accumulo-client", params.version) + stack_select.select("accumulo-client", params.version) def security_status(self, env): http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py index 993d4cf..a8aebbf 100644 --- a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py @@ -18,10 +18,10 @@ limitations under the License. """ from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.resources.hdfs_resource import HdfsResource from resource_management.libraries.functions import format -from resource_management.libraries.functions.version import format_hdp_stack_version +from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.default import default from resource_management.libraries.functions.get_bare_principal import get_bare_principal from resource_management.libraries.script.script import Script @@ -39,10 +39,10 @@ security_enabled = status_params.security_enabled stack_name = default("/hostLevelParams/stack_name", None) version = default("/commandParams/version", None) stack_version_unformatted = str(config['hostLevelParams']['stack_version']) -hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) +stack_version_formatted = format_stack_version(stack_version_unformatted) has_secure_user_auth = False -if Script.is_hdp_stack_greater_or_equal("2.3"): +if Script.is_stack_greater_or_equal("2.3"): has_secure_user_auth = True # configuration directories @@ -50,8 +50,8 @@ conf_dir = status_params.conf_dir server_conf_dir = status_params.server_conf_dir # service locations -hadoop_prefix = hdp_select.get_hadoop_dir("home") -hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") +hadoop_prefix = stack_select.get_hadoop_dir("home") +hadoop_bin_dir = stack_select.get_hadoop_dir("bin") zookeeper_home = "/usr/hdp/current/zookeeper-client" # the configuration direction for HDFS/YARN/MapR is the hadoop config http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py index 59e0562..d000846 100644 --- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py +++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/atlas_client.py @@ -21,7 +21,7 @@ limitations under the License. import sys from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from metadata import metadata @@ -37,9 +37,9 @@ class AtlasClient(Script): # import params # env.set_params(params) # - # if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0: + # if params.version and compare_versions(format_stack_version(params.version), '2.3.0.0') >= 0: # conf_select.select(params.stack_name, "atlas", params.version) - # hdp_select.select("atlas-client", params.version) + # stack_select.select("atlas-client", params.version) def install(self, env): self.install_packages(env) http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py index 9ba519f..5a39278 100644 --- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py +++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/metadata_server.py @@ -18,10 +18,10 @@ limitations under the License. """ from metadata import metadata from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management import Execute, check_process_status, Script from resource_management.libraries.functions import format -from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version +from resource_management.libraries.functions.version import compare_versions, format_stack_version from resource_management.libraries.functions.security_commons import build_expectations, \ get_params_from_filesystem, validate_security_config_properties, \ FILE_TYPE_PROPERTIES @@ -43,9 +43,9 @@ class MetadataServer(Script): import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.3.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.3.0.0') >= 0: # conf_select.select(params.stack_name, "atlas", params.version) - hdp_select.select("atlas-server", params.version) + stack_select.select("atlas-server", params.version) def start(self, env, upgrade_type=None): import params http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py index eb2d816..38c2c9b 100644 --- a/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/ATLAS/0.1.0.2.3/package/scripts/params.py @@ -19,7 +19,7 @@ limitations under the License. """ import os import sys -from resource_management import format_hdp_stack_version, Script +from resource_management import format_stack_version, Script from resource_management.libraries.functions import format from resource_management.libraries.functions.default import default @@ -46,7 +46,7 @@ version = default("/commandParams/version", None) # hdp version stack_version_unformatted = str(config['hostLevelParams']['stack_version']) -hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) +stack_version_formatted = format_stack_version(stack_version_unformatted) metadata_home = os.environ['METADATA_HOME_DIR'] if 'METADATA_HOME_DIR' in os.environ else '/usr/hdp/current/atlas-server' metadata_bin = format("{metadata_home}/bin") http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py index ef65ecb..2894844 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_client.py @@ -19,7 +19,7 @@ limitations under the License. from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from falcon import falcon from ambari_commons import OSConst from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl @@ -49,12 +49,12 @@ class FalconClientLinux(FalconClient): # this function should not execute if the version can't be determined or # is not at least HDP 2.2.0.0 - if not params.version or compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') < 0: + if not params.version or compare_versions(format_stack_version(params.version), '2.2.0.0') < 0: return Logger.info("Executing Falcon Client Stack Upgrade pre-restart") conf_select.select(params.stack_name, "falcon", params.version) - hdp_select.select("falcon-client", params.version) + stack_select.select("falcon-client", params.version) def security_status(self, env): import status_params http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py index 13401dc..ccc1c9d 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon_server.py @@ -22,7 +22,7 @@ import falcon_server_upgrade from resource_management.core.logger import Logger from resource_management.libraries.script import Script from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import check_process_status from resource_management.libraries.functions.security_commons import build_expectations from resource_management.libraries.functions.security_commons import cached_kinit_executor @@ -77,12 +77,12 @@ class FalconServerLinux(FalconServer): # this function should not execute if the version can't be determined or # is not at least HDP 2.2.0.0 - if Script.is_hdp_stack_less_than("2.2"): + if Script.is_stack_less_than("2.2"): return Logger.info("Executing Falcon Server Stack Upgrade pre-restart") conf_select.select(params.stack_name, "falcon", params.version) - hdp_select.select("falcon-server", params.version) + stack_select.select("falcon-server", params.version) falcon_server_upgrade.pre_start_restore() def security_status(self, env): http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py index 74ac010..707c4ed 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py @@ -19,9 +19,9 @@ limitations under the License. import status_params from resource_management.libraries.resources.hdfs_resource import HdfsResource -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import format -from resource_management.libraries.functions.version import format_hdp_stack_version +from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.default import default from resource_management.libraries.functions import get_kinit_path from resource_management.libraries.script.script import Script @@ -35,14 +35,14 @@ stack_name = default("/hostLevelParams/stack_name", None) version = default("/commandParams/version", None) stack_version_unformatted = str(config['hostLevelParams']['stack_version']) -hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) +stack_version_formatted = format_stack_version(stack_version_unformatted) etc_prefix_dir = "/etc/falcon" # hadoop params -hadoop_home_dir = hdp_select.get_hadoop_dir("home") -hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") +hadoop_home_dir = stack_select.get_hadoop_dir("home") +hadoop_bin_dir = stack_select.get_hadoop_dir("bin") -if Script.is_hdp_stack_greater_or_equal("2.2"): +if Script.is_stack_greater_or_equal("2.2"): # if this is a server action, then use the server binaries; smoke tests # use the client binaries http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py index 399ff22..2c06c40 100644 --- a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py +++ b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py @@ -46,7 +46,7 @@ else: hadoop_conf_dir = conf_select.get_hadoop_conf_dir() falcon_conf_dir = "/etc/falcon/conf" - if Script.is_hdp_stack_greater_or_equal("2.2"): + if Script.is_stack_greater_or_equal("2.2"): falcon_conf_dir = format("/usr/hdp/current/{component_directory}/conf") # Security related/required params http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py index 937547c..60138bb 100644 --- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py +++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py @@ -24,7 +24,7 @@ from flume import get_desired_state from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions.flume_agent_helper import find_expected_agent_names from resource_management.libraries.functions.flume_agent_helper import get_flume_status @@ -89,12 +89,12 @@ class FlumeHandlerLinux(FlumeHandler): # this function should not execute if the version can't be determined or # is not at least HDP 2.2.0.0 - if not params.version or Script.is_hdp_stack_less_than("2.2"): + if not params.version or Script.is_stack_less_than("2.2"): return Logger.info("Executing Flume Stack Upgrade pre-restart") conf_select.select(params.stack_name, "flume", params.version) - hdp_select.select("flume-server", params.version) + stack_select.select("flume-server", params.version) # only restore on upgrade, not downgrade if params.upgrade_direction == Direction.UPGRADE: http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py index 5ec879c..29f71a7 100644 --- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py +++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params.py @@ -19,7 +19,7 @@ limitations under the License. from ambari_commons import OSCheck from resource_management.libraries.functions.default import default from resource_management.libraries.functions import format -from resource_management.libraries.functions.version import format_hdp_stack_version +from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.default import default from resource_management.libraries.script.script import Script @@ -42,7 +42,7 @@ proxyuser_group = config['configurations']['hadoop-env']['proxyuser_group'] security_enabled = False stack_version_unformatted = str(config['hostLevelParams']['stack_version']) -hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) +stack_version_formatted = format_stack_version(stack_version_unformatted) # hadoop default parameters flume_bin = '/usr/bin/flume-ng' @@ -50,7 +50,7 @@ flume_hive_home = '/usr/lib/hive' flume_hcat_home = '/usr/lib/hive-hcatalog' # hadoop parameters for 2.2+ -if Script.is_hdp_stack_greater_or_equal("2.2"): +if Script.is_stack_greater_or_equal("2.2"): flume_bin = '/usr/hdp/current/flume-server/bin/flume-ng' flume_hive_home = '/usr/hdp/current/hive-metastore' flume_hcat_home = '/usr/hdp/current/hive-webhcat' http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py index 22f3324..1067ba7 100644 --- a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/params_linux.py @@ -29,7 +29,7 @@ upgrade_direction = default("/commandParams/upgrade_direction", Direction.UPGRAD stack_version_unformatted = str(config['hostLevelParams']['stack_version']) flume_conf_dir = '/etc/flume/conf' -if Script.is_hdp_stack_greater_or_equal("2.2"): +if Script.is_stack_greater_or_equal("2.2"): flume_conf_dir = '/usr/hdp/current/flume-server/conf' flume_user = 'flume' http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py index 8c42848..2c3493a 100644 --- a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py +++ b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/hawqmaster.py @@ -21,7 +21,7 @@ from resource_management.core.resources.system import Execute from resource_management.core.logger import Logger from resource_management.libraries.functions.check_process_status import check_process_status try: - from resource_management.libraries.functions import hdp_select as hadoop_select + from resource_management.libraries.functions import stack_select as hadoop_select except ImportError: from resource_management.libraries.functions import phd_select as hadoop_select http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py index ac34d40..c31bbf6 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase_client.py @@ -21,7 +21,7 @@ limitations under the License. import sys from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from hbase import hbase from ambari_commons import OSCheck, OSConst from ambari_commons.os_family_impl import OsFamilyImpl @@ -57,13 +57,13 @@ class HbaseClientDefault(HbaseClient): import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hbase", params.version) - hdp_select.select("hbase-client", params.version) + stack_select.select("hbase-client", params.version) # phoenix may not always be deployed try: - hdp_select.select("phoenix-client", params.version) + stack_select.select("phoenix-client", params.version) except Exception as e: print "Ignoring error due to missing phoenix-client" print str(e) @@ -73,7 +73,7 @@ class HbaseClientDefault(HbaseClient): # of the final "CLIENTS" group and we need to ensure that hadoop-client # is also set conf_select.select(params.stack_name, "hadoop", params.version) - hdp_select.select("hadoop-client", params.version) + stack_select.select("hadoop-client", params.version) if __name__ == "__main__": http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py index f9694c6..01503fe 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py @@ -28,9 +28,9 @@ from ambari_commons.str_utils import cbool, cint from resource_management.libraries.resources.hdfs_resource import HdfsResource from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import format -from resource_management.libraries.functions.version import format_hdp_stack_version +from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.default import default from resource_management.libraries.functions import get_kinit_path from resource_management.libraries.functions import is_empty @@ -51,10 +51,10 @@ component_directory = status_params.component_directory etc_prefix_dir = "/etc/hbase" stack_version_unformatted = str(config['hostLevelParams']['stack_version']) -hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) +stack_version_formatted = format_stack_version(stack_version_unformatted) # hadoop default parameters -hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") +hadoop_bin_dir = stack_select.get_hadoop_dir("bin") hadoop_conf_dir = conf_select.get_hadoop_conf_dir() daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh" region_mover = "/usr/lib/hbase/bin/region_mover.rb" @@ -63,7 +63,7 @@ hbase_cmd = "/usr/lib/hbase/bin/hbase" hbase_max_direct_memory_size = None # hadoop parameters for 2.2+ -if Script.is_hdp_stack_greater_or_equal("2.2"): +if Script.is_stack_greater_or_equal("2.2"): daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh') region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb') region_drainer = format('/usr/hdp/current/hbase-client/bin/draining_servers.rb') http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py index 693bb08..87e4899 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/phoenix_queryserver.py @@ -18,7 +18,7 @@ limitations under the License. """ from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.script import Script from phoenix_service import phoenix_service from hbase import hbase @@ -59,10 +59,10 @@ class PhoenixQueryServer(Script): import params env.set_params(params) - if Script.is_hdp_stack_greater_or_equal("2.3"): + if Script.is_stack_greater_or_equal("2.3"): # phoenix uses hbase configs conf_select.select(params.stack_name, "hbase", params.version) - hdp_select.select("phoenix-server", params.version) + stack_select.select("phoenix-server", params.version) def status(self, env): http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py index 014e8d7..535c821 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/status_params.py @@ -51,5 +51,5 @@ else: hbase_conf_dir = "/etc/hbase/conf" limits_conf_dir = "/etc/security/limits.d" - if Script.is_hdp_stack_greater_or_equal("2.2"): + if Script.is_stack_greater_or_equal("2.2"): hbase_conf_dir = format("/usr/hdp/current/{component_directory}/conf") \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py index 00040fa..c5ba682 100644 --- a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py +++ b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/upgrade.py @@ -22,16 +22,16 @@ from resource_management import * from resource_management.core.resources.system import Execute from resource_management.core import shell from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select -from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions.version import compare_versions, format_stack_version from resource_management.libraries.functions.decorator import retry def prestart(env, hdp_component): import params - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hbase", params.version) - hdp_select.select(hdp_component, params.version) + stack_select.select(hdp_component, params.version) def post_regionserver(env): import params http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py index 5adeab4..3cdfda9 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py @@ -20,8 +20,8 @@ import datanode_upgrade from hdfs_datanode import datanode from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select -from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions.version import compare_versions, format_stack_version from resource_management.libraries.functions.security_commons import build_expectations, \ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, FILE_TYPE_XML from hdfs import hdfs @@ -87,9 +87,9 @@ class DataNodeDefault(DataNode): Logger.info("Executing DataNode Stack Upgrade pre-restart") import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hadoop", params.version) - hdp_select.select("hadoop-hdfs-datanode", params.version) + stack_select.select("hadoop-hdfs-datanode", params.version) def post_upgrade_restart(self, env, upgrade_type=None): Logger.info("Executing DataNode Stack Upgrade post-restart") http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py index 21c0eda..c5ae35e 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py @@ -19,7 +19,7 @@ limitations under the License. from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions.security_commons import build_expectations, \ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ FILE_TYPE_XML @@ -60,9 +60,9 @@ class HdfsClientDefault(HdfsClient): def pre_upgrade_restart(self, env, upgrade_type=None): import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hadoop", params.version) - hdp_select.select("hadoop-client", params.version) + stack_select.select("hadoop-client", params.version) def security_status(self, env): import status_params http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py index 7715f6c..6f26b40 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/journalnode.py @@ -19,9 +19,9 @@ limitations under the License. from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions.version import compare_versions, \ - format_hdp_stack_version + format_stack_version from resource_management.libraries.functions.format import format from resource_management.libraries.functions.security_commons import build_expectations, \ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ @@ -50,9 +50,9 @@ class JournalNodeDefault(JournalNode): import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hadoop", params.version) - hdp_select.select("hadoop-hdfs-journalnode", params.version) + stack_select.select("hadoop-hdfs-journalnode", params.version) def start(self, env, upgrade_type=None): import params http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py index 910bc0a..02905ec 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py @@ -29,9 +29,9 @@ from resource_management import Script from resource_management.core.resources.system import Execute, File from resource_management.core import shell from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import Direction -from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version +from resource_management.libraries.functions.version import compare_versions, format_stack_version from resource_management.libraries.functions.format import format from resource_management.libraries.functions.security_commons import build_expectations, \ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ @@ -190,14 +190,14 @@ class NameNodeDefault(NameNode): import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: # When downgrading an Express Upgrade, the first thing we do is to revert the symlinks. # Therefore, we cannot call this code in that scenario. call_if = [("rolling", "upgrade"), ("rolling", "downgrade"), ("nonrolling", "upgrade")] for e in call_if: if (upgrade_type, params.upgrade_direction) == e: conf_select.select(params.stack_name, "hadoop", params.version) - hdp_select.select("hadoop-hdfs-namenode", params.version) + stack_select.select("hadoop-hdfs-namenode", params.version) def post_upgrade_restart(self, env, upgrade_type=None): Logger.info("Executing Stack Upgrade post-restart") http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py index 4b9ad06..c705fca 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/nfsgateway.py @@ -25,8 +25,8 @@ from resource_management.libraries.functions.security_commons import build_expec from hdfs_nfsgateway import nfsgateway from hdfs import hdfs from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select -from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions.version import compare_versions, format_stack_version class NFSGateway(Script): @@ -45,9 +45,9 @@ class NFSGateway(Script): import params env.set_params(params) - if Script.is_hdp_stack_greater_or_equal('2.3.0.0'): + if Script.is_stack_greater_or_equal('2.3.0.0'): conf_select.select(params.stack_name, "hadoop", params.version) - hdp_select.select("hadoop-hdfs-nfs3", params.version) + stack_select.select("hadoop-hdfs-nfs3", params.version) def start(self, env, upgrade_type=None): import params http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py index 5242694..f0bf4d2 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py @@ -27,9 +27,9 @@ from ambari_commons.os_check import OSCheck from ambari_commons.str_utils import cbool, cint from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import format -from resource_management.libraries.functions.version import format_hdp_stack_version +from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.default import default from resource_management.libraries.functions import get_klist_path from resource_management.libraries.functions import get_kinit_path @@ -47,7 +47,7 @@ tmp_dir = Script.get_tmp_dir() stack_name = default("/hostLevelParams/stack_name", None) upgrade_direction = default("/commandParams/upgrade_direction", None) stack_version_unformatted = str(config['hostLevelParams']['stack_version']) -hdp_stack_version = format_hdp_stack_version(stack_version_unformatted) +stack_version_formatted = format_stack_version(stack_version_unformatted) agent_stack_retry_on_unavailability = cbool(config["hostLevelParams"]["agent_stack_retry_on_unavailability"]) agent_stack_retry_count = cint(config["hostLevelParams"]["agent_stack_retry_count"]) @@ -77,17 +77,17 @@ secure_dn_ports_are_in_use = False # hadoop default parameters mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*" -hadoop_libexec_dir = hdp_select.get_hadoop_dir("libexec") -hadoop_bin = hdp_select.get_hadoop_dir("sbin") -hadoop_bin_dir = hdp_select.get_hadoop_dir("bin") -hadoop_home = hdp_select.get_hadoop_dir("home") +hadoop_libexec_dir = stack_select.get_hadoop_dir("libexec") +hadoop_bin = stack_select.get_hadoop_dir("sbin") +hadoop_bin_dir = stack_select.get_hadoop_dir("bin") +hadoop_home = stack_select.get_hadoop_dir("home") hadoop_secure_dn_user = hdfs_user hadoop_conf_dir = conf_select.get_hadoop_conf_dir() hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure") -hadoop_lib_home = hdp_select.get_hadoop_dir("lib") +hadoop_lib_home = stack_select.get_hadoop_dir("lib") # hadoop parameters for 2.2+ -if Script.is_hdp_stack_greater_or_equal("2.2"): +if Script.is_stack_greater_or_equal("2.2"): mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*" if not security_enabled: @@ -114,7 +114,7 @@ limits_conf_dir = "/etc/security/limits.d" hdfs_user_nofile_limit = default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000") hdfs_user_nproc_limit = default("/configurations/hadoop-env/hdfs_user_nproc_limit", "65536") -create_lib_snappy_symlinks = not Script.is_hdp_stack_greater_or_equal("2.2") +create_lib_snappy_symlinks = not Script.is_stack_greater_or_equal("2.2") jsvc_path = "/usr/lib/bigtop-utils" execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py index b8a1726..f96ac01 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py @@ -19,8 +19,8 @@ limitations under the License. from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select -from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version +from resource_management.libraries.functions import stack_select +from resource_management.libraries.functions.version import compare_versions, format_stack_version from resource_management.libraries.functions.security_commons import build_expectations, \ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ FILE_TYPE_XML @@ -71,9 +71,9 @@ class SNameNodeDefault(SNameNode): import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hadoop", params.version) - hdp_select.select("hadoop-hdfs-secondarynamenode", params.version) + stack_select.select("hadoop-hdfs-secondarynamenode", params.version) def security_status(self, env): import status_params http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py index e59dd78..c626028 100644 --- a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py +++ b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/utils.py @@ -224,12 +224,12 @@ def service(action=None, name=None, user=None, options="", create_pid_dir=False, hadoop_secure_dn_pid_file = format("{hadoop_secure_dn_pid_dir}/hadoop_secure_dn.pid") # At Champlain stack and further, we may start datanode as a non-root even in secure cluster - if not (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0) or params.secure_dn_ports_are_in_use: + if not (params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0) or params.secure_dn_ports_are_in_use: user = "root" pid_file = format( "{hadoop_pid_dir_prefix}/{hdfs_user}/hadoop-{hdfs_user}-{name}.pid") - if action == 'stop' and (params.hdp_stack_version != "" and compare_versions(params.hdp_stack_version, '2.2') >= 0) and \ + if action == 'stop' and (params.stack_version_formatted != "" and compare_versions(params.stack_version_formatted, '2.2') >= 0) and \ os.path.isfile(hadoop_secure_dn_pid_file): # We need special handling for this case to handle the situation # when we configure non-root secure DN and then restart it @@ -354,7 +354,7 @@ def get_hdfs_binary(distro_component_name): if params.stack_name == "HDP": # This was used in HDP 2.1 and earlier hdfs_binary = "hdfs" - if Script.is_hdp_stack_greater_or_equal("2.2"): + if Script.is_stack_greater_or_equal("2.2"): hdfs_binary = "/usr/hdp/current/{0}/bin/hdfs".format(distro_component_name) return hdfs_binary http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py index 70bebb4..85e7012 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_client.py @@ -23,7 +23,7 @@ from ambari_commons import OSConst from ambari_commons.os_family_impl import OsFamilyImpl from resource_management.core.logger import Logger from resource_management.core.exceptions import ClientComponentHasNoStatus -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions.version import compare_versions from resource_management.libraries.script.script import Script @@ -78,7 +78,7 @@ class HCatClientDefault(HCatClient): # HCat client doesn't have a first-class entry in hdp-select. Since clients always # update after daemons, this ensures that the hcat directories are correct on hosts # which do not include the WebHCat daemon - hdp_select.select("hive-webhcat", params.version) + stack_select.select("hive-webhcat", params.version) if __name__ == "__main__": http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py index 92e4ad7..c5d45ee 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py @@ -107,7 +107,7 @@ def hive(name=None): if name == 'hiveserver2': # HDP 2.1.* or lower - if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0: + if params.stack_version_formatted_major != "" and compare_versions(params.stack_version_formatted_major, "2.2.0.0") < 0: params.HdfsResource(params.webhcat_apps_dir, type="directory", action="create_on_execute", @@ -134,7 +134,7 @@ def hive(name=None): # ****** Begin Copy Tarballs ****** # ********************************* # HDP 2.2 or higher, copy mapreduce.tar.gz to HDFS - if params.hdp_stack_version_major != "" and compare_versions(params.hdp_stack_version_major, '2.2') >= 0: + if params.stack_version_formatted_major != "" and compare_versions(params.stack_version_formatted_major, '2.2') >= 0: copy_to_hdfs("mapreduce", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped) copy_to_hdfs("tez", params.user_group, params.hdfs_user, host_sys_prepped=params.host_sys_prepped) http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py index e4aace3..ba2a129 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_client.py @@ -20,7 +20,7 @@ limitations under the License. import sys from resource_management import * from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from hive import hive from ambari_commons.os_family_impl import OsFamilyImpl from ambari_commons import OSConst @@ -55,10 +55,10 @@ class HiveClientDefault(HiveClient): import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hive", params.version) conf_select.select(params.stack_name, "hadoop", params.version) - hdp_select.select("hadoop-client", params.version) + stack_select.select("hadoop-client", params.version) if __name__ == "__main__": http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py index 59ecbbb..7fee2b3 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_metastore.py @@ -23,10 +23,10 @@ from resource_management.core.logger import Logger from resource_management.core.resources.system import Execute, Directory from resource_management.libraries.script import Script from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions.constants import Direction from resource_management.libraries.functions.format import format -from resource_management.libraries.functions.version import format_hdp_stack_version +from resource_management.libraries.functions.version import format_stack_version from resource_management.libraries.functions.version import compare_versions from resource_management.libraries.functions.security_commons import build_expectations from resource_management.libraries.functions.security_commons import cached_kinit_executor @@ -102,15 +102,15 @@ class HiveMetastoreDefault(HiveMetastore): env.set_params(params) - is_stack_hdp_23 = Script.is_hdp_stack_greater_or_equal("2.3") + is_stack_hdp_23 = Script.is_stack_greater_or_equal("2.3") is_upgrade = params.upgrade_direction == Direction.UPGRADE if is_stack_hdp_23 and is_upgrade: self.upgrade_schema(env) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hive", params.version) - hdp_select.select("hive-metastore", params.version) + stack_select.select("hive-metastore", params.version) def security_status(self, env): @@ -229,7 +229,7 @@ class HiveMetastoreDefault(HiveMetastore): # we need to choose the original legacy location schematool_hive_server_conf_dir = params.hive_server_conf_dir if params.current_version is not None: - current_version = format_hdp_stack_version(params.current_version) + current_version = format_stack_version(params.current_version) if compare_versions(current_version, "2.3") < 0: schematool_hive_server_conf_dir = LEGACY_HIVE_SERVER_CONF http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py index a81e4f6..f7f1377 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py @@ -22,12 +22,12 @@ limitations under the License. from resource_management.libraries.script.script import Script from resource_management.libraries.resources.hdfs_resource import HdfsResource from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import format from resource_management.libraries.functions.copy_tarball import copy_to_hdfs -from resource_management.libraries.functions.get_hdp_version import get_hdp_version +from resource_management.libraries.functions.get_stack_version import get_stack_version from resource_management.libraries.functions.check_process_status import check_process_status -from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version +from resource_management.libraries.functions.version import compare_versions, format_stack_version from resource_management.libraries.functions.security_commons import build_expectations, \ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ FILE_TYPE_XML @@ -117,9 +117,9 @@ class HiveServerDefault(HiveServer): import params env.set_params(params) - if params.version and compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0: + if params.version and compare_versions(format_stack_version(params.version), '2.2.0.0') >= 0: conf_select.select(params.stack_name, "hive", params.version) - hdp_select.select("hive-server2", params.version) + stack_select.select("hive-server2", params.version) # Copy mapreduce.tar.gz and tez.tar.gz to HDFS resource_created = copy_to_hdfs( http://git-wip-us.apache.org/repos/asf/ambari/blob/f7221e5a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py index 6fa3081..3a90164 100644 --- a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py +++ b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server_interactive.py @@ -22,12 +22,12 @@ limitations under the License. from resource_management.libraries.script.script import Script from resource_management.libraries.resources.hdfs_resource import HdfsResource from resource_management.libraries.functions import conf_select -from resource_management.libraries.functions import hdp_select +from resource_management.libraries.functions import stack_select from resource_management.libraries.functions import format from resource_management.libraries.functions.copy_tarball import copy_to_hdfs -from resource_management.libraries.functions.get_hdp_version import get_hdp_version +from resource_management.libraries.functions.get_stack_version import get_stack_version from resource_management.libraries.functions.check_process_status import check_process_status -from resource_management.libraries.functions.version import compare_versions, format_hdp_stack_version +from resource_management.libraries.functions.version import compare_versions, format_stack_version from resource_management.libraries.functions.security_commons import build_expectations, \ cached_kinit_executor, get_params_from_filesystem, validate_security_config_properties, \ FILE_TYPE_XML