This is an automated email from the ASF dual-hosted git repository.

dsen pushed a commit to branch branch-feature-AMBARI-14714
in repository https://gitbox.apache.org/repos/asf/ambari.git


The following commit(s) were added to refs/heads/branch-feature-AMBARI-14714 by 
this push:
     new 1f81303  [AMBARI-23248] Integrate Instance manager into HDPCORE mpack 
(dsen) (#674)
1f81303 is described below

commit 1f8130380dd3d02b4807f08fab75a9e52926eff3
Author: Dmitry Sen <d...@apache.org>
AuthorDate: Wed Mar 21 15:37:43 2018 +0200

    [AMBARI-23248] Integrate Instance manager into HDPCORE mpack (dsen) (#674)
    
    * [AMBARI-23248] Integrate Instance manager into HDPCORE mpack (dsen)
    
    * [AMBARI-23248] Integrate Instance manager into HDPCORE mpack (dsen) - 
Addendum
---
 .../src/main/python/ambari_commons/constants.py      |  3 ++-
 .../stack-hooks/before-ANY/scripts/params.py         | 20 +++++++++++++-------
 2 files changed, 15 insertions(+), 8 deletions(-)

diff --git a/ambari-common/src/main/python/ambari_commons/constants.py 
b/ambari-common/src/main/python/ambari_commons/constants.py
index ff77a02..0cf77e3 100644
--- a/ambari-common/src/main/python/ambari_commons/constants.py
+++ b/ambari-common/src/main/python/ambari_commons/constants.py
@@ -19,7 +19,8 @@ limitations under the License.
 """
 
 AMBARI_SUDO_BINARY = "ambari-sudo.sh"
-
+HADOOP_CLIENTS_MODULE_NAME = "HADOOP_CLIENTS"
+HADOOP_CLIENT_COMPONENT_TYPE = "HADOOP_CLIENT"
 UPGRADE_TYPE_ROLLING = "rolling"
 UPGRADE_TYPE_EXPRESS = "express"
 UPGRADE_TYPE_HOST_ORDERED = "host_ordered"
diff --git 
a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py 
b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
index a2c3596..276bef3 100644
--- a/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
+++ b/ambari-server/src/main/resources/stack-hooks/before-ANY/scripts/params.py
@@ -27,8 +27,6 @@ import ambari_simplejson as json # simplejson is much faster 
comparing to Python
 from resource_management.libraries.script import Script
 from resource_management.libraries.functions import default
 from resource_management.libraries.functions import format
-from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions import stack_select
 from resource_management.libraries.functions import format_jvm_option
 from resource_management.libraries.functions.is_empty import is_empty
 from resource_management.libraries.functions.version import 
format_stack_version
@@ -38,7 +36,9 @@ from resource_management.libraries.functions.stack_features 
import check_stack_f
 from resource_management.libraries.functions.stack_features import 
get_stack_feature_version
 from resource_management.libraries.functions.get_architecture import 
get_architecture
 from resource_management.libraries.functions.cluster_settings import 
get_cluster_setting_value
-from ambari_commons.constants import AMBARI_SUDO_BINARY
+from ambari_commons.constants import AMBARI_SUDO_BINARY, 
HADOOP_CLIENTS_MODULE_NAME, HADOOP_CLIENT_COMPONENT_TYPE
+import resource_management.libraries.functions.config_helper as config_helper
+from resource_management.libraries.functions.mpack_manager_helper import 
get_component_conf_path, get_component_home_path
 
 
 config = Script.get_config()
@@ -109,12 +109,16 @@ def is_secure_port(port):
 # which would cause a lot of problems when writing out hadoop-env.sh; instead
 # force the use of "current" in the hook
 hdfs_user_nofile_limit = 
default("/configurations/hadoop-env/hdfs_user_nofile_limit", "128000")
-hadoop_home = stack_select.get_hadoop_dir("home")
+
+mpack_name = config_helper.get_mpack_name(config)
+mpack_instance_name = config_helper.get_mpack_instance_name(config)
+module_name = config_helper.get_module_name(config)
+component_type = config_helper.get_component_type(config)
+component_instance_name = config_helper.get_component_instance_name(config)
+
 stack_name = default("/hostLevelParams/stack_name", None)
 stack_name = stack_name.lower()
 component_directory = "namenode"
-hadoop_libexec_dir = 
format("/usr/hwx/mpacks/{stack_name}/{stack_version_formatted}/{component_directory}/libexec")
-hadoop_lib_home = stack_select.get_hadoop_dir("lib")
 
 hadoop_dir = "/etc/hadoop"
 hadoop_java_io_tmpdir = os.path.join(tmp_dir, "hadoop_java_io_tmpdir")
@@ -215,7 +219,9 @@ if dfs_ha_namenode_ids:
     dfs_ha_enabled = True
 
 if has_namenode or dfs_type == 'HCFS':
-    hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+    hadoop_conf_dir = get_component_conf_path(mpack_name=mpack_name, 
instance_name=mpack_instance_name,
+                                              
module_name=HADOOP_CLIENTS_MODULE_NAME,
+                                              
components_instance_type=HADOOP_CLIENT_COMPONENT_TYPE)
     hadoop_conf_secure_dir = os.path.join(hadoop_conf_dir, "secure")
 
 hbase_tmp_dir = "/tmp/hbase-hbase"

-- 
To stop receiving notification emails like this one, please contact
d...@apache.org.

Reply via email to