Repository: ambari
Updated Branches:
  refs/heads/trunk 8de5bf38f -> 258837693


AMBARI-11013. HDFS Upgrade Pack For HDP-2.2 to HDP-2.3 (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/25883769
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/25883769
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/25883769

Branch: refs/heads/trunk
Commit: 258837693cd70b57bb26adf6f8db92decbd03986
Parents: 8de5bf3
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri May 8 07:44:24 2015 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri May 8 10:42:35 2015 -0400

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          |  35 ++++++-
 .../ambari/server/checks/CheckDescription.java  |   7 ++
 .../checks/ServicesNamenodeTruncateCheck.java   |  80 +++++++++++++++
 .../1.6.1.2.2.0/package/scripts/params.py       |   3 +-
 .../0.1.0/package/scripts/params_linux.py       |   3 +-
 .../0.5.0.2.1/package/scripts/status_params.py  |   5 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |   4 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   9 +-
 .../2.1.0.2.0/package/scripts/status_params.py  |   8 +-
 .../0.12.0.2.0/package/scripts/status_params.py |   4 +-
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |   3 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |   7 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |   4 +-
 .../SLIDER/0.60.0.2.2/package/scripts/params.py |   3 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |   4 +-
 .../0.4.0.2.1/package/scripts/params_linux.py   |   4 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |   7 +-
 .../2.1.0.2.0/package/scripts/status_params.py  |  11 +-
 .../2.0.6/hooks/before-ANY/scripts/params.py    |   8 +-
 .../2.0.6/hooks/before-START/scripts/params.py  |   9 +-
 .../stacks/HDP/2.2/upgrades/upgrade-2.3.xml     |   9 ++
 .../ServicesNamenodeTruncateCheckTest.java      | 102 +++++++++++++++++++
 .../stacks/2.3/MAHOUT/test_mahout_client.py     |  21 +++-
 23 files changed, 289 insertions(+), 61 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index f137a19..45888d7 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -22,6 +22,7 @@ __all__ = ["select", "create"]
 
 import version
 from resource_management.core import shell
+from resource_management.libraries.script.script import Script
 
 TEMPLATE = "conf-select {0} --package {1} --stack-version {2} --conf-version 0"
 
@@ -45,8 +46,7 @@ def create(stack_name, package, version):
   if not _valid(stack_name, package, version):
     return
 
-  shell.call(TEMPLATE.format("create-conf-dir", package, version))
-
+  shell.call(TEMPLATE.format("create-conf-dir", package, version), 
logoutput=False, quiet=True)
 
 def select(stack_name, package, version, try_create=True):
   """
@@ -64,5 +64,34 @@ def select(stack_name, package, version, try_create=True):
   if try_create:
     create(stack_name, package, version)
 
-  shell.call(TEMPLATE.format("set-conf-dir", package, version), logoutput=True)
+  shell.call(TEMPLATE.format("set-conf-dir", package, version), 
logoutput=False, quiet=False)
+
+def get_hadoop_conf_dir():
+  """
+  Gets the shared hadoop conf directory using:
+  1.  Start with /etc/hadoop/conf
+  2.  When the stack is greater than HDP-2.2, use 
/usr/hdp/current/hadoop-client/conf
+  3.  Only when doing a RU and HDP-2.3 or higher, use the value as computed
+      by conf-select.  This is in the form /usr/hdp/VERSION/hadoop/conf to 
make sure
+      the configs are written in the correct place
+  """
+
+  config = Script.get_config()
+  hadoop_conf_dir = "/etc/hadoop/conf"
+
+  if Script.is_hdp_stack_greater_or_equal("2.2"):
+    from resource_management.libraries.functions.default import default
+
+    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+
+    direction = default("/commandParams/upgrade_direction", None)
+    ver = default("/commandParams/version", None)
+    stack_name = default("/hostLevelParams/stack_name", None)
+
+    if direction and ver and stack_name and 
Script.is_hdp_stack_greater_or_equal("2.3"):
+      select(stack_name, "hadoop", ver)
+      hadoop_conf_dir = "/usr/hdp/{0}/hadoop/conf".format(ver)
+
+  return hadoop_conf_dir
 
+    

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
index 7103566..083d3f9 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/checks/CheckDescription.java
@@ -104,6 +104,12 @@ public enum CheckDescription {
           "NameNode High Availability is not enabled. Verify that 
dfs.nameservices property is present in hdfs-site.xml.");
       }}),
 
+  SERVICES_NAMENODE_TRUNCATE(PrereqCheckType.SERVICE,
+      "NameNode Truncate must not be allowed",
+      new HashMap<String, String>() {{
+        put(AbstractCheckDescriptor.DEFAULT,
+          "NameNode Truncate is allowed. Verify that dfs.allow.truncate is set 
to 'false' in hdfs-site.xml.");
+      }}),
 
   SERVICES_TEZ_DISTRIBUTED_CACHE(PrereqCheckType.SERVICE,
       "Tez should reference Hadoop libraries from the distributed cache in 
HDFS",
@@ -166,6 +172,7 @@ public enum CheckDescription {
           "The following config types will have values overwritten: %s");
       }});
 
+
   private PrereqCheckType m_type;
   private String m_description;
   private Map<String, String> m_fails;

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicesNamenodeTruncateCheck.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicesNamenodeTruncateCheck.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicesNamenodeTruncateCheck.java
new file mode 100644
index 0000000..4159c65
--- /dev/null
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/checks/ServicesNamenodeTruncateCheck.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+import org.apache.ambari.server.AmbariException;
+import org.apache.ambari.server.ServiceNotFoundException;
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+
+import com.google.inject.Singleton;
+
+/**
+ * Checks that namenode high availability is enabled.
+ */
+@Singleton
+@UpgradeCheck(group = UpgradeCheckGroup.NAMENODE_HA, order = 1.1f)
+public class ServicesNamenodeTruncateCheck extends AbstractCheckDescriptor {
+
+  /**
+   * Constructor.
+   */
+  public ServicesNamenodeTruncateCheck() {
+    super(CheckDescription.SERVICES_NAMENODE_TRUNCATE);
+  }
+
+  @Override
+  public boolean isApplicable(PrereqCheckRequest request) throws 
AmbariException {
+    if (!super.isApplicable(request)) {
+      return false;
+    }
+
+    final Cluster cluster = 
clustersProvider.get().getCluster(request.getClusterName());
+    try {
+      cluster.getService("HDFS");
+    } catch (ServiceNotFoundException ex) {
+      return false;
+    }
+
+    PrereqCheckStatus ha = 
request.getResult(CheckDescription.SERVICES_NAMENODE_HA);
+    if (null != ha && ha == PrereqCheckStatus.FAIL) {
+      return false;
+    }
+
+    return true;
+  }
+
+  @Override
+  public void perform(PrerequisiteCheck prerequisiteCheck, PrereqCheckRequest 
request) throws AmbariException {
+    final String clusterName = request.getClusterName();
+    final Cluster cluster = clustersProvider.get().getCluster(clusterName);
+    Config config = cluster.getDesiredConfigByType("hdfs-site");
+
+    String truncateEnabled = config.getProperties().get("dfs.allow.truncate");
+
+    if (Boolean.valueOf(truncateEnabled)) {
+      prerequisiteCheck.getFailedOn().add("HDFS");
+      prerequisiteCheck.setStatus(PrereqCheckStatus.FAIL);
+      prerequisiteCheck.setFailReason(getFailReason(prerequisiteCheck, 
request));
+
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index 5bb80de..f1b8ae5 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -17,6 +17,7 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
@@ -51,7 +52,7 @@ zookeeper_home = "/usr/hdp/current/zookeeper-client"
 
 # the configuration direction for HDFS/YARN/MapR is the hadoop config
 # directory, which is symlinked by hadoop-client only
-hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 # accumulo local directory structure
 log_dir = config['configurations']['accumulo-env']['accumulo_log_dir']

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
index f35b8ef..e0085ba 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params_linux.py
@@ -19,6 +19,7 @@ limitations under the License.
 """
 
 from resource_management import *
+from resource_management.libraries.functions import conf_select
 from ambari_commons import OSCheck
 
 config = Script.get_config()
@@ -41,5 +42,5 @@ region_mover = "/usr/lib/ams-hbase/bin/region_mover.rb"
 region_drainer = "/usr/lib/ams-hbase/bin/draining_servers.rb"
 hbase_cmd = "/usr/lib/ams-hbase/bin/hbase"
 
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hbase_conf_dir = "/etc/ams-hbase/conf"

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
index 23db4f7..399ff22 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/status_params.py
@@ -18,6 +18,7 @@ limitations under the License.
 """
 from ambari_commons import OSCheck
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -42,10 +43,10 @@ else:
   falcon_pid_dir = config['configurations']['falcon-env']['falcon_pid_dir']
   server_pid_file = format('{falcon_pid_dir}/falcon.pid')
 
-  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+
   falcon_conf_dir = "/etc/falcon/conf"
   if Script.is_hdp_stack_greater_or_equal("2.2"):
-    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
     falcon_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
 
   # Security related/required params

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 59ebb6c1..aec8e23 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -24,6 +24,7 @@ from functions import calc_xmn_from_xms
 
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
@@ -48,7 +49,7 @@ hdp_stack_version = 
format_hdp_stack_version(stack_version_unformatted)
 
 # hadoop default parameters
 hadoop_bin_dir = "/usr/bin"
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 daemon_script = "/usr/lib/hbase/bin/hbase-daemon.sh"
 region_mover = "/usr/lib/hbase/bin/region_mover.rb"
 region_drainer = "/usr/lib/hbase/bin/draining_servers.rb"
@@ -56,7 +57,6 @@ hbase_cmd = "/usr/lib/hbase/bin/hbase"
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   hadoop_bin_dir = format("/usr/hdp/current/hadoop-client/bin")
   daemon_script = format('/usr/hdp/current/hbase-client/bin/hbase-daemon.sh')
   region_mover = format('/usr/hdp/current/hbase-client/bin/region_mover.rb')

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index d8d99d5..82a6351 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -25,6 +25,7 @@ import re
 
 from ambari_commons.os_check import OSCheck
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
@@ -66,7 +67,7 @@ hadoop_bin = "/usr/lib/hadoop/sbin"
 hadoop_bin_dir = "/usr/bin"
 hadoop_home = "/usr/lib/hadoop"
 hadoop_secure_dn_user = hdfs_user
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
@@ -76,10 +77,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = "/usr/hdp/current/hadoop-client"
 
-  # the configuration direction for HDFS/YARN/MapR is the hadoop config
-  # directory, which is symlinked by hadoop-client only
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-
   if not security_enabled:
     hadoop_secure_dn_user = '""'
   else:
@@ -412,4 +409,4 @@ if has_ranger_admin:
     'name': repo_name,
     'repositoryType': 'hdfs',
     'assetType': '1'
-  }
\ No newline at end of file
+  }

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
index 7918eb5..388fa59 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/status_params.py
@@ -19,6 +19,7 @@ limitations under the License.
 
 from ambari_commons import OSCheck
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -49,12 +50,7 @@ else:
   hdfs_user_principal = 
config['configurations']['hadoop-env']['hdfs_principal_name']
   hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
-    # the configuration direction for HDFS/YARN/MapR is the hadoop config
-    # directory, which is symlinked by hadoop-client only
-    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
   kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
   tmp_dir = Script.get_tmp_dir()

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
index ae960b7..d0acb59 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/status_params.py
@@ -20,6 +20,7 @@ limitations under the License.
 
 from ambari_commons import OSCheck
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -69,7 +70,7 @@ else:
   webhcat_user = config['configurations']['hive-env']['webhcat_user']
 
   # default configuration directories
-  hadoop_conf_dir = "/etc/hadoop/conf"
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
   webhcat_conf_dir = '/etc/hive-webhcat/conf'
   hive_etc_dir_prefix = "/etc/hive"
   hive_conf_dir = "/etc/hive/conf"
@@ -78,7 +79,6 @@ else:
 
   # HDP 2.2+
   if Script.is_hdp_stack_greater_or_equal("2.2"):
-    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
     webhcat_conf_dir = '/usr/hdp/current/hive-webhcat/conf'
     hive_conf_dir = format("/usr/hdp/current/{component_directory}/conf")
     hive_client_conf_dir = 
format("/usr/hdp/current/{component_directory}/conf")

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index 5e0096f..47ce98a 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -18,6 +18,7 @@ limitations under the License.
 Ambari Agent
 
 """
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
@@ -49,7 +50,7 @@ hadoop_home = '/usr/hdp/current/hadoop-client'
 
 # the configuration direction for HDFS/YARN/MapR is the hadoop config
 # directory, which is symlinked by hadoop-client only
-hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 5875284..6909cd7 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -19,6 +19,7 @@ limitations under the License.
 """
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.functions import format
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -45,6 +46,8 @@ upgrade_direction = 
default("/commandParams/upgrade_direction", None)
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
 hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
+
 #hadoop params
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   # start out assuming client libraries
@@ -70,9 +73,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   conf_dir = format("/usr/hdp/current/{oozie_root}/conf")
   hive_conf_dir = format("{conf_dir}/action-conf/hive")
 
-  # the configuration direction for HDFS/YARN/MapR is the hadoop config
-  # directory, which is symlinked by hadoop-client only
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_lib_home = "/usr/lib/hadoop/lib"
@@ -86,7 +86,6 @@ else:
   oozie_home = "/usr/lib/oozie"
   oozie_bin_dir = "/usr/bin"
   falcon_home = '/usr/lib/falcon'
-  hadoop_conf_dir = "/etc/hadoop/conf"
   conf_dir = "/etc/oozie/conf"
   hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index c4fb033..93c8c71 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -19,6 +19,7 @@ Ambari Agent
 
 """
 from resource_management.libraries.functions import format
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -38,8 +39,8 @@ hdp_stack_version = 
format_hdp_stack_version(stack_version_unformatted)
 version = default("/commandParams/version", None)
 
 # hadoop default parameters
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 pig_conf_dir = "/etc/pig/conf"
-hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_bin_dir = "/usr/bin"
 hadoop_home = '/usr'
 pig_bin_dir = ""
@@ -47,7 +48,6 @@ pig_bin_dir = ""
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
index 48c82ea..c127115 100644
--- 
a/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SLIDER/0.60.0.2.2/package/scripts/params.py
@@ -19,6 +19,7 @@ limitations under the License.
 """
 from ambari_commons.os_check import OSCheck
 from resource_management.libraries.functions import format
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
@@ -47,7 +48,7 @@ slider_bin_dir = "/usr/lib/slider/bin"
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   slider_bin_dir = '/usr/hdp/current/slider-client/bin'
 
-hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 slider_conf_dir = "/usr/hdp/current/slider-client/conf"
 
 smokeuser = config['configurations']['cluster-env']['smokeuser']

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index f430743..4130472 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -24,6 +24,7 @@ import status_params
 from setup_spark import *
 
 import resource_management.libraries.functions
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
@@ -59,11 +60,10 @@ version = default("/commandParams/version", None)
 #stack_is_hdp22_or_further = hdp_stack_version != "" and 
compare_versions(hdp_stack_version, '2.2.1.0') >= 0
 
 spark_conf = '/etc/spark/conf'
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_home = "/usr/hdp/current/hadoop-client"
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   spark_conf = format("/usr/hdp/current/{component_directory}/conf")
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 5db3422..7f17a26 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -19,6 +19,7 @@ limitations under the License.
 """
 import os
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
@@ -42,7 +43,7 @@ version = default("/commandParams/version", None)
 # default hadoop parameters
 hadoop_home = '/usr'
 hadoop_bin_dir = "/usr/bin"
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 tez_etc_dir = "/etc/tez"
 config_dir = "/etc/tez/conf"
 path_to_tez_examples_jar = "/usr/lib/tez/tez-mapreduce-examples*.jar"
@@ -50,7 +51,6 @@ path_to_tez_examples_jar = 
"/usr/lib/tez/tez-mapreduce-examples*.jar"
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
   path_to_tez_examples_jar = "/usr/hdp/{hdp_version}/tez/tez-examples*.jar"
 
 # tez only started linking /usr/hdp/x.x.x.x/tez-client/conf in HDP 2.3+

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index ae736de..ee454b6 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -20,6 +20,7 @@ Ambari Agent
 """
 import os
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
@@ -62,7 +63,7 @@ hostname = config['hostname']
 hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
 hadoop_bin = "/usr/lib/hadoop/sbin"
 hadoop_bin_dir = "/usr/bin"
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_yarn_home = '/usr/lib/hadoop-yarn'
 hadoop_mapred2_jar_location = "/usr/lib/hadoop-mapreduce"
 mapred_bin = "/usr/lib/hadoop-mapreduce/sbin"
@@ -94,10 +95,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
 
-  # the configuration direction for HDFS/YARN/MapR is the hadoop config
-  # directory, which is symlinked by hadoop-client only
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-
 
 limits_conf_dir = "/etc/security/limits.d"
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + 
yarn_container_bin

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
index 6832da5..ebef301 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/status_params.py
@@ -19,6 +19,7 @@ limitations under the License.
 """
 
 from resource_management import *
+from resource_management.libraries.functions import conf_select
 from ambari_commons import OSCheck
 
 config = Script.get_config()
@@ -50,14 +51,8 @@ else:
   yarn_historyserver_pid_file = 
format("{yarn_pid_dir}/yarn-{yarn_user}-timelineserver.pid")  # 
*-historyserver.pid is deprecated
   mapred_historyserver_pid_file = 
format("{mapred_pid_dir}/mapred-{mapred_user}-historyserver.pid")
 
-  # Security related/required params
-  hadoop_conf_dir = "/etc/hadoop/conf"
-  if Script.is_hdp_stack_greater_or_equal("2.2"):
-    # the configuration direction for HDFS/YARN/MapR is the hadoop config
-    # directory, which is symlinked by hadoop-client only
-    hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-
+  hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 
   hostname = config['hostname']
   kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
-  security_enabled = 
config['configurations']['cluster-env']['security_enabled']
\ No newline at end of file
+  security_enabled = 
config['configurations']['cluster-env']['security_enabled']

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index f457438..9054133 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -17,11 +17,12 @@ limitations under the License.
 
 """
 
+import collections
+import json
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
 from ambari_commons.os_check import OSCheck
 from resource_management import *
-import collections
-import json
 
 config = Script.get_config()
 tmp_dir = Script.get_tmp_dir()
@@ -76,7 +77,7 @@ hadoop_home = "/usr/lib/hadoop"
 hadoop_secure_dn_user = hdfs_user
 hadoop_dir = "/etc/hadoop"
 versioned_hdp_root = '/usr/hdp/current'
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 
 # HDP 2.2+ params
@@ -84,7 +85,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   mapreduce_libs_path = "/usr/hdp/current/hadoop-mapreduce-client/*"
   hadoop_libexec_dir = "/usr/hdp/current/hadoop-client/libexec"
   hadoop_home = "/usr/hdp/current/hadoop-client"
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
 
   # not supported in HDP 2.2+
   hadoop_conf_empty_dir = None

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
index d37ec82..059835d 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-START/scripts/params.py
@@ -17,6 +17,7 @@ limitations under the License.
 
 """
 
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
 from ambari_commons.os_check import OSCheck
 from resource_management import *
@@ -35,7 +36,7 @@ hadoop_lib_home = "/usr/lib/hadoop/lib"
 hadoop_bin = "/usr/lib/hadoop/sbin"
 hadoop_home = '/usr'
 create_lib_snappy_symlinks = True
-hadoop_conf_dir = "/etc/hadoop/conf"
+hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
 default_topology_script_file_path = "/etc/hadoop/conf/topology_script.py"
 
 # HDP 2.2+ params
@@ -46,9 +47,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_bin = "/usr/hdp/current/hadoop-client/sbin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   create_lib_snappy_symlinks = False
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-  default_topology_script_file_path = 
"/usr/hdp/current/hadoop-client/conf/topology_script.py"
-
+  
 current_service = config['serviceName']
 
 #security params
@@ -191,4 +190,4 @@ slave_hosts = default("/clusterHostInfo/slave_hosts", [])
 net_topology_script_file_path = 
default("/configurations/core-site/net.topology.script.file.name",default_topology_script_file_path)
 net_topology_script_dir = os.path.dirname(net_topology_script_file_path)
 net_topology_mapping_data_file_name = 'topology_mappings.data'
-net_topology_mapping_data_file_path = os.path.join(net_topology_script_dir, 
net_topology_mapping_data_file_name)
\ No newline at end of file
+net_topology_mapping_data_file_path = os.path.join(net_topology_script_dir, 
net_topology_mapping_data_file_name)

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml 
b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
index abd95aa..e233afa 100644
--- a/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
+++ b/ambari-server/src/main/resources/stacks/HDP/2.2/upgrades/upgrade-2.3.xml
@@ -325,6 +325,15 @@
 
     <service name="HDFS">
       <component name="NAMENODE">
+        <pre-upgrade>
+          <task xsi:type="configure">
+            <condition type="ranger-hdfs-plugin-properties" 
key="ranger-hdfs-plugin-enabled" value="true">
+              <type>hdfs-site</type>
+              <key>dfs.namenode.inode.attributes.provider.class</key>
+              
<value>org.apache.ranger.authorization.hadoop.RangerHdfsAuthorizer</value>
+            </condition>
+          </task>
+        </pre-upgrade>
         <upgrade>
           <task xsi:type="restart" />
         </upgrade>

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicesNamenodeTruncateCheckTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicesNamenodeTruncateCheckTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicesNamenodeTruncateCheckTest.java
new file mode 100644
index 0000000..c484b90
--- /dev/null
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/checks/ServicesNamenodeTruncateCheckTest.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.checks;
+
+import static org.easymock.EasyMock.anyObject;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.replay;
+import static org.junit.Assert.assertEquals;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.ambari.server.controller.PrereqCheckRequest;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.stack.PrereqCheckStatus;
+import org.apache.ambari.server.state.stack.PrerequisiteCheck;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Provider;
+
+/**
+ * Unit tests for ServicesUpCheck
+ *
+ */
+public class ServicesNamenodeTruncateCheckTest {
+
+  private Clusters m_clusters = EasyMock.createMock(Clusters.class);
+  private ServicesNamenodeTruncateCheck m_check = new 
ServicesNamenodeTruncateCheck();
+  private final Map<String, String> m_configMap = new HashMap<String, 
String>();
+
+  @Before
+  public void setup() throws Exception {
+    Cluster cluster = EasyMock.createMock(Cluster.class);
+
+    Config config = EasyMock.createMock(Config.class);
+
+    expect(config.getProperties()).andReturn(m_configMap).anyTimes();
+    
expect(cluster.getService("HDFS")).andReturn(EasyMock.createMock(Service.class));
+    
expect(cluster.getDesiredConfigByType("hdfs-site")).andReturn(config).anyTimes();
+    expect(m_clusters.getCluster((String) 
anyObject())).andReturn(cluster).anyTimes();
+
+    replay(m_clusters, cluster, config);
+
+    m_check.clustersProvider = new Provider<Clusters>() {
+      @Override
+      public Clusters get() {
+        return m_clusters;
+      }
+    };
+  }
+
+
+  @Test
+  public void testIsApplicable() throws Exception {
+
+    PrereqCheckRequest checkRequest = new PrereqCheckRequest("c1");
+    checkRequest.setRepositoryVersion("HDP-2.2.0.0");
+    checkRequest.setSourceStackId(new StackId("HDP", "2.2"));
+    checkRequest.setTargetStackId(new StackId("HDP", "2.2"));
+
+    Assert.assertTrue(m_check.isApplicable(checkRequest));
+  }
+
+  @Test
+  public void testPerform() throws Exception {
+    PrerequisiteCheck check = new PrerequisiteCheck(null, null);
+    m_check.perform(check, new PrereqCheckRequest("c1"));
+    assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+
+    m_configMap.put("dfs.allow.truncate", "true");
+    check = new PrerequisiteCheck(null, null);
+    m_check.perform(check, new PrereqCheckRequest("c1"));
+    assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
+
+    m_configMap.put("dfs.allow.truncate", "false");
+    check = new PrerequisiteCheck(null, null);
+    m_check.perform(check, new PrereqCheckRequest("c1"));
+    assertEquals(PrereqCheckStatus.PASS, check.getStatus());
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/25883769/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py 
b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
index 95c0b03..8a96bb9 100644
--- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
+++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_client.py
@@ -71,6 +71,10 @@ class TestMahoutClient(RMFTestCase):
       json_content = json.load(f)
     version = '2.3.0.0-1234'
     json_content['commandParams']['version'] = version
+    # test to make sure conf_select is working correctly
+    json_content['commandParams']['upgrade_direction'] = 'upgrade'
+    json_content['hostLevelParams']['stack_name'] = 'HDP'
+    json_content['hostLevelParams']['stack_version'] = '2.3'
 
     mocks_dict = {}
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/mahout_client.py",
@@ -79,17 +83,26 @@ class TestMahoutClient(RMFTestCase):
                        config_dict = json_content,
                        hdp_stack_version = self.STACK_VERSION,
                        target = RMFTestCase.TARGET_COMMON_SERVICES,
-                       call_mocks = [(0, None), (0, None)],
+                       call_mocks = [(0, None), (0, None), (0, None), (0, 
None)],
                        mocks_dict = mocks_dict)
 
     self.assertResourceCalled('Execute',
                               ('hdp-select', 'set', 'mahout-client', version), 
sudo = True)
     self.assertNoMoreResources()
 
-    self.assertEquals(2, mocks_dict['call'].call_count)
+    import sys
+    self.assertEquals("/usr/hdp/2.3.0.0-1234/hadoop/conf", 
sys.modules["params"].hadoop_conf_dir)
+
+    self.assertEquals(4, mocks_dict['call'].call_count)
     self.assertEquals(
-      "conf-select create-conf-dir --package mahout --stack-version 
2.3.0.0-1234 --conf-version 0",
+      "conf-select create-conf-dir --package hadoop --stack-version 
2.3.0.0-1234 --conf-version 0",
        mocks_dict['call'].call_args_list[0][0][0])
     self.assertEquals(
-      "conf-select set-conf-dir --package mahout --stack-version 2.3.0.0-1234 
--conf-version 0",
+      "conf-select set-conf-dir --package hadoop --stack-version 2.3.0.0-1234 
--conf-version 0",
        mocks_dict['call'].call_args_list[1][0][0])
+    self.assertEquals(
+      "conf-select create-conf-dir --package mahout --stack-version 
2.3.0.0-1234 --conf-version 0",
+       mocks_dict['call'].call_args_list[2][0][0])
+    self.assertEquals(
+      "conf-select set-conf-dir --package mahout --stack-version 2.3.0.0-1234 
--conf-version 0",
+       mocks_dict['call'].call_args_list[3][0][0])

Reply via email to