Repository: ambari
Updated Branches:
  refs/heads/trunk 66a4bfb26 -> 3da48c232


AMBARI-11049. Some Hadoop Directory Parameters Are Wrong On Runnings Processes 
After Upgrade (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/3da48c23
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/3da48c23
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/3da48c23

Branch: refs/heads/trunk
Commit: 3da48c232f08dea8f536356e0b0b8f6e25a0bf2b
Parents: 66a4bfb
Author: Nate Cole <nc...@hortonworks.com>
Authored: Mon May 11 13:31:13 2015 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Mon May 11 17:43:46 2015 -0400

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          | 57 ++++++++++++---
 .../1.6.1.2.2.0/package/scripts/params.py       |  3 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |  5 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  3 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |  9 +--
 .../0.12.0.2.0/package/scripts/params_linux.py  |  2 +-
 .../0.12.0.2.0/package/scripts/status_params.py |  1 +
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  2 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |  8 +--
 .../0.12.0.2.0/package/scripts/params_linux.py  |  3 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  2 +-
 .../0.4.0.2.1/package/scripts/params_linux.py   |  3 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   | 10 +--
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  7 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |  3 +-
 .../2.0.6/hooks/before-START/scripts/params.py  | 10 ++-
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 75 ++++++++++++++++++++
 17 files changed, 149 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 45888d7..63b474f 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -18,13 +18,21 @@ limitations under the License.
 
 """
 
-__all__ = ["select", "create"]
+__all__ = ["select", "create", "get_hadoop_conf_dir", "get_hadoop_dir"]
 
 import version
 from resource_management.core import shell
+from resource_management.core.exceptions import Fail
 from resource_management.libraries.script.script import Script
 
 TEMPLATE = "conf-select {0} --package {1} --stack-version {2} --conf-version 0"
+HADOOP_DIR_TEMPLATE = "/usr/hdp/{0}/{1}/{2}"
+HADOOP_DIR_DEFAULTS = {
+  "libexec": "/usr/lib/hadoop/libexec",
+  "sbin": "/usr/lib/hadoop/sbin",
+  "bin": "/usr/bin",
+  "lib": "/usr/lib/hadoop/lib"
+}
 
 def _valid(stack_name, package, ver):
   if stack_name != "HDP":
@@ -35,6 +43,17 @@ def _valid(stack_name, package, ver):
 
   return True
 
+def _is_upgrade():
+  from resource_management.libraries.functions.default import default
+  direction = default("/commandParams/upgrade_direction", None)
+  stack_name = default("/hostLevelParams/stack_name", None)
+  ver = default("/commandParams/version", None)
+
+  if direction and stack_name and ver:
+    return (stack_name, ver)
+
+  return None
+
 def create(stack_name, package, version):
   """
   Creates a config version for the specified package
@@ -76,22 +95,40 @@ def get_hadoop_conf_dir():
       the configs are written in the correct place
   """
 
-  config = Script.get_config()
   hadoop_conf_dir = "/etc/hadoop/conf"
 
   if Script.is_hdp_stack_greater_or_equal("2.2"):
-    from resource_management.libraries.functions.default import default
-
     hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 
-    direction = default("/commandParams/upgrade_direction", None)
-    ver = default("/commandParams/version", None)
-    stack_name = default("/hostLevelParams/stack_name", None)
+    res = _is_upgrade()
 
-    if direction and ver and stack_name and 
Script.is_hdp_stack_greater_or_equal("2.3"):
-      select(stack_name, "hadoop", ver)
-      hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(ver)
+    if res is not None and Script.is_hdp_stack_greater_or_equal("2.3"):
+      select(res[0], "hadoop", res[1])
+      hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(res[1])
 
   return hadoop_conf_dir
 
+def get_hadoop_dir(target):
+  """
+  Return the hadoop shared directory in the following override order
+  1. Use default for 2.1 and lower
+  2. If 2.2 and higher, use /usr/hdp/current/hadoop-client/{target}
+  3. If 2.2 and higher AND for an upgrade, use 
/usr/hdp/<version>/hadoop/{target}
+  :target: the target directory
+  """
+
+  if not target in HADOOP_DIR_DEFAULTS:
+    raise Fail("Target {0} not defined".format(target))
+
+  hadoop_dir = HADOOP_DIR_DEFAULTS[target]
+
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    hadoop_dir = HADOOP_DIR_TEMPLATE.format("current", "hadoop-client", target)
+
+    res = _is_upgrade()
+
+    if res is not None:
+      hadoop_dir = HADOOP_DIR_TEMPLATE.format(res[1], "hadoop", target)
+
+  return hadoop_dir
     

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 3d2d36d..ff5a874 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -46,8 +46,7 @@ conf_dir = status_params.conf_dir
 server_conf_dir = status_params.server_conf_dir
 
 # service locations
-hadoop_prefix = "/usr/hdp/current/hadoop-client"
-hadoop_bin_dir = format("{hadoop_prefix}/bin")
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 zookeeper_home = "/usr/hdp/current/zookeeper-client"
 
 # the configuration direction for HDFS/YARN/MapR is the hadoop config

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index 27ced1d..9a5fdb5 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -18,6 +18,7 @@ limitations under the License.
 """
 import status_params
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
@@ -37,8 +38,9 @@ hdp_stack_version = 
format_hdp_stack_version(stack_version_unformatted)
 etc_prefix_dir = "/etc/falcon"
 
 # hadoop params
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
 
   # if this is a server action, then use the server binaries; smoke tests
   # use the client binaries
@@ -53,7 +55,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   falcon_webapp_dir = format('/usr/hdp/current/{falcon_root}/webapp')
   falcon_home = format('/usr/hdp/current/{falcon_root}')
 else:
-  hadoop_bin_dir = "/usr/bin"
   falcon_webapp_dir = '/var/lib/falcon/webapp'
   falcon_home = '/usr/lib/falcon'
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index aec8e23..8f23bb2 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -48,7 +48,7 @@ stack_version_unformatted = 
str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 # hadoop default parameters
-hadoop_bin_dir = "/usr/bin"
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
 region_mover = "/usr/lib/hbase/bin/region_mover.rb"
@@ -57,7 +57,6 @@ hbase_cmd = "/usr/lib/hbase/bin/hbase"
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  hadoop_bin_dir = format("/usr/hdp/current/hadoop-client/bin")
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')
   region_drainer = 
format('/usr/hdp/current/hbase-client/bin/draining_servers.rb')

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 82a6351..18e0fa0 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -62,9 +62,9 @@ secure_dn_ports_are_in_use = False
 
 # hadoop default parameters
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-hadoop_bin = "/usr/lib/hadoop/sbin"
-hadoop_bin_dir = "/usr/bin"
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
+hadoop_bin = conf_select.get_hadoop_dir("sbin")
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_home = "/usr/lib/hadoop"
 hadoop_secure_dn_user = hdfs_user
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
@@ -72,9 +72,6 @@ hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = "/usr/hdp/current/hadoop-client"
 
   if not security_enabled:

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index bd6f70e..ed12a65 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -71,6 +71,7 @@ webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
 # use the directories from status_params as they are already calculated for
 # the correct version of HDP
 hadoop_conf_dir = status_params.hadoop_conf_dir
+hadoop_bin_dir = status_params.hadoop_bin_dir
 webhcat_conf_dir = status_params.webhcat_conf_dir
 hive_conf_dir = status_params.hive_conf_dir
 hive_config_dir = status_params.hive_config_dir
@@ -87,7 +88,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hive_specific_configs_supported = True
 
   component_directory = status_params.component_directory
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   hive_bin = format('/usr/hdp/current/{component_directory}/bin')
   hive_lib = format('/usr/hdp/current/{component_directory}/lib')

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
index d0acb59..1cf1793 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -71,6 +71,7 @@ else:
 
   # default configuration directories
   hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+  hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
   webhcat_conf_dir = '/etc/hive-webhcat/conf'
   hive_etc_dir_prefix = "/etc/hive"
   hive_conf_dir = "/etc/hive/conf"

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index 47ce98a..ab0dd04 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -45,7 +45,7 @@ mahout_conf_dir = "/usr/hdp/current/mahout-client/conf"
 mahout_user = config['configurations']['mahout-env']['mahout_user']
 
 #hadoop params
-hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_home = '/usr/hdp/current/hadoop-client'
 
 # the configuration direction for HDFS/YARN/MapR is the hadoop config

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 6909cd7..9732aab 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -47,13 +47,11 @@ stack_version_unformatted = 
str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
+hadoop_lib_home = conf_select.get_hadoop_dir("lib")
 
 #hadoop params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  # start out assuming client libraries
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
-  hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
-
   # oozie-server or oozie-client, depending on role
   oozie_root = status_params.component_directory
 
@@ -74,8 +72,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hive_conf_dir = format("{conf_dir}/action-conf/hive")
 
 else:
-  hadoop_bin_dir = "/usr/bin"
-  hadoop_lib_home = "/usr/lib/hadoop/lib"
   oozie_lib_dir = "/var/lib/oozie"
   oozie_setup_sh = "/usr/lib/oozie/bin/oozie-setup.sh"
   oozie_webapps_dir = "/var/lib/oozie/oozie-server/webapps/"

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index 93c8c71..53762b6 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -40,15 +40,14 @@ version = default("/commandParams/version", None)
 
 # hadoop default parameters
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 pig_conf_dir = "/etc/pig/conf"
-hadoop_bin_dir = "/usr/bin"
 hadoop_home = '/usr'
 pig_bin_dir = ""
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 4130472..41bac8a 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -61,10 +61,10 @@ version = default("/commandParams/version", None)
 
 spark_conf = '/etc/spark/conf'
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_home = "/usr/hdp/current/hadoop-client"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   spark_conf = format("/usr/hdp/current/{component_directory}/conf")
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 7f17a26..e2aa660 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -42,7 +42,7 @@ version = default("/commandParams/version", None)
 
 # default hadoop parameters
 hadoop_home = '/usr'
-hadoop_bin_dir = "/usr/bin"
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 tez_etc_dir = "/etc/tez"
 config_dir = "/etc/tez/conf"
@@ -50,7 +50,6 @@ path_to_tez_examples_jar = 
"/usr/lib/tez/tez-mapreduce-examples*.jar"
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   path_to_tez_examples_jar = "/usr/hdp/{hdp_version}/tez/tez-examples*.jar"
 
 # tez only started linking /usr/hdp/x.x.x.x/tez-client/conf in HDP 2.3+

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index ee454b6..6e43192 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -60,9 +60,9 @@ version = default("/commandParams/version", None)
 hostname = config['hostname']
 
 # hadoop default parameters
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-hadoop_bin = "/usr/lib/hadoop/sbin"
-hadoop_bin_dir = "/usr/bin"
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
+hadoop_bin = conf_select.get_hadoop_dir("sbin")
+hadoop_bin_dir = conf_select.get_hadoop_dir("bin")
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_yarn_home = '/usr/lib/hadoop-yarn'
 hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
@@ -84,10 +84,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   if command_role in YARN_SERVER_ROLE_DIRECTORY_MAP:
     yarn_role_root = YARN_SERVER_ROLE_DIRECTORY_MAP[command_role]
 
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
-  hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
-
   hadoop_mapred2_jar_location = format("/usr/hdp/current/{mapred_role_root}")
   mapred_bin = format("/usr/hdp/current/{mapred_role_root}/sbin")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 5c6c5bd..3031a9f 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -18,6 +18,7 @@ limitations under the License.
 """
 
 from ambari_commons.constants import AMBARI_SUDO_BINARY
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
 from resource_management import *
 from resource_management.core.system import System
@@ -31,15 +32,13 @@ hdp_stack_version = 
format_hdp_stack_version(stack_version_unformatted)
 
 # default hadoop params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
 # HDP 2.2+ params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 
   # not supported in HDP 2.2+
   hadoop_conf_empty_dir = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index 9054133..9d57c18 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -72,18 +72,17 @@ def is_secure_port(port):
 
 # hadoop default params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 hadoop_home = "/usr/lib/hadoop"
 hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
 versioned_hdp_root = '/usr/hdp/current'
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
 # HDP 2.2+ params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_home = "/usr/hdp/current/hadoop-client"
 
   # not supported in HDP 2.2+

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index 059835d..6f5f666 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -31,9 +31,10 @@ hdp_stack_version = 
format_hdp_stack_version(stack_version_unformatted)
 
 # hadoop default params
 mapreduce_libs_path = "/usr/lib/hadoop-mapreduce/*"
-hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
-hadoop_lib_home = "/usr/lib/hadoop/lib"
-hadoop_bin = "/usr/lib/hadoop/sbin"
+
+hadoop_libexec_dir = conf_select.get_hadoop_dir("libexec")
+hadoop_lib_home = conf_select.get_hadoop_dir("lib")
+hadoop_bin = conf_select.get_hadoop_dir("sbin")
 hadoop_home = '/usr'
 create_lib_snappy_symlinks = True
 hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
@@ -42,9 +43,6 @@ default_topology_script_file_path = 
"/etc/hadoop/conf/topology_script.py"
 # HDP 2.2+ params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
-  hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
-  hadoop_lib_home = "/usr/hdp/current/hadoop-client/lib"
-  hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   create_lib_snappy_symlinks = False
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/3da48c23/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 2e7af32..417a163 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -1193,6 +1193,81 @@ class TestNamenode(RMFTestCase):
                               )
     self.assertNoMoreResources()
 
+  @patch("resource_management.core.shell.call")
+  def test_pre_rolling_restart_21_and_lower_params(self, call_mock):
+    config_file = 
self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+    json_content['hostLevelParams']['stack_name'] = 'HDP'
+    json_content['hostLevelParams']['stack_version'] = '2.0'
+
+    mocks_dict = {}
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "pre_rolling_restart",
+                       config_dict = json_content,
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES,
+                       call_mocks = [(0, None), (0, None), (0, None), (0, 
None), (0, None), (0, None), (0, None)],
+                       mocks_dict = mocks_dict)
+    import sys
+    self.assertEquals("/etc/hadoop/conf", 
sys.modules["params"].hadoop_conf_dir)
+    self.assertEquals("/usr/lib/hadoop/libexec", 
sys.modules["params"].hadoop_libexec_dir)
+    self.assertEquals("/usr/bin", sys.modules["params"].hadoop_bin_dir)
+    self.assertEquals("/usr/lib/hadoop/sbin", sys.modules["params"].hadoop_bin)
+
+  @patch("resource_management.core.shell.call")
+  def test_pre_rolling_restart_22_params(self, call_mock):
+    config_file = 
self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+    version = '2.2.0.0-1234'
+    del json_content['commandParams']['version']
+    json_content['hostLevelParams']['stack_name'] = 'HDP'
+    json_content['hostLevelParams']['stack_version'] = '2.2'
+
+    mocks_dict = {}
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "pre_rolling_restart",
+                       config_dict = json_content,
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES,
+                       call_mocks = [(0, None), (0, None), (0, None), (0, 
None), (0, None), (0, None), (0, None)],
+                       mocks_dict = mocks_dict)
+    import sys
+    self.assertEquals("/usr/hdp/current/hadoop-client/conf", 
sys.modules["params"].hadoop_conf_dir)
+    self.assertEquals("/usr/hdp/current/hadoop-client/libexec", 
sys.modules["params"].hadoop_libexec_dir)
+    self.assertEquals("/usr/hdp/current/hadoop-client/bin", 
sys.modules["params"].hadoop_bin_dir)
+    self.assertEquals("/usr/hdp/current/hadoop-client/sbin", 
sys.modules["params"].hadoop_bin)
+
+  @patch("resource_management.core.shell.call")
+  def test_pre_rolling_restart_23_params(self, call_mock):
+    config_file = 
self.get_src_folder()+"/test/python/stacks/2.0.6/configs/nn_ru_lzo.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+    version = '2.3.0.0-1234'
+    json_content['commandParams']['version'] = version
+    json_content['commandParams']['upgrade_direction'] = 'upgrade'
+    json_content['hostLevelParams']['stack_name'] = 'HDP'
+    json_content['hostLevelParams']['stack_version'] = '2.3'
+
+    mocks_dict = {}
+    self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/namenode.py",
+                       classname = "NameNode",
+                       command = "pre_rolling_restart",
+                       config_dict = json_content,
+                       hdp_stack_version = self.STACK_VERSION,
+                       target = RMFTestCase.TARGET_COMMON_SERVICES,
+                       call_mocks = [(0, None), (0, None), (0, None), (0, 
None), (0, None), (0, None), (0, None)],
+                       mocks_dict = mocks_dict)
+    import sys
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", 
sys.modules["params"].hadoop_conf_dir)
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/libexec", 
sys.modules["params"].hadoop_libexec_dir)
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/bin", 
sys.modules["params"].hadoop_bin_dir)
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/sbin", 
sys.modules["params"].hadoop_bin)
+
+
 
 class Popen_Mock:
   return_value = 1

Reply via email to