http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index 846bab7..c02bf74 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -20,15 +20,37 @@ Ambari Agent
 """
 import sys
 import os.path
+import glob
 
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 
 
-def webhcat(env):
+def webhcat():
   import params
 
+  if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
+    params.HdfsDirectory(params.webhcat_apps_dir,
+                         action="create_delayed",
+                         owner=params.webhcat_user,
+                         mode=0755
+    )
+  
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             mode=0755,
@@ -57,6 +79,55 @@ def webhcat(env):
             path='/bin'
     )
 
+  # TODO, these checks that are specific to HDP 2.2 and greater should really 
be in a script specific to that stack.
+  if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
+    copy_tarballs_to_hdfs('hive', params.webhcat_user, params.hdfs_user, 
params.user_group)
+    copy_tarballs_to_hdfs('pig', params.webhcat_user, params.hdfs_user, 
params.user_group)
+    copy_tarballs_to_hdfs('hadoop-streaming', params.webhcat_user, 
params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('sqoop', params.webhcat_user, params.hdfs_user, 
params.user_group)
+  else:
+    CopyFromLocal(params.hadoop_streeming_jars,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (os.path.isfile(params.pig_tar_file)):
+      CopyFromLocal(params.pig_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
+    CopyFromLocal(params.hive_tar_file,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (len(glob.glob(params.sqoop_tar_file)) > 0):
+      CopyFromLocal(params.sqoop_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
   XmlConfig("webhcat-site.xml",
             conf_dir=params.config_dir,
             configurations=params.config['configurations']['webhcat-site'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
index d24a1da..f1f9f37 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_server.py
@@ -31,7 +31,7 @@ class WebHCatServer(Script):
   def configure(self, env):
     import params
     env.set_params(params)
-    webhcat(env)
+    webhcat()
 
 
   def start(self, env, rolling_restart=False):

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
index b39dc8c..30d878c 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
@@ -70,8 +70,6 @@ export OOZIE_EXIT_CODE=0
 export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml 
yarn.resourcemanager.address`
 export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml 
fs.defaultFS`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml 
oozie.base.url | tr '[:upper:]' '[:lower:]'`
-export JSON_PATH='/var/lib/ambari-agent/data/hdfs_resources.json'
-export JAR_PATH='/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
 
 if [ "$os_family" == "ubuntu" ] ; then
   LIST_PACKAGE_FILES_CMD='dpkg-query -L'
@@ -102,33 +100,10 @@ else
   kinitcmd=""
 fi
 
-cat >$JSON_PATH<<EOF
-[{
-       "target":"examples",
-       "type":"directory",
-       "action":"delete"
-},
-{
-       "target":"input-data",
-       "type":"directory",
-       "action":"delete"
-},
-{
-       "target":"examples",
-       "type":"directory",
-       "action":"create",
-       "source":"$OOZIE_EXAMPLES_DIR/examples"
-},
-{
-       "target":"input-data",
-       "type":"directory",
-       "action":"create",
-       "source":"$OOZIE_EXAMPLES_DIR/examples/input-data"
-}]
-EOF
-
-echo "About to run: hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} 
${JSON_PATH} ${NAMENODE}"
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop --config 
${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config 
${hadoop_conf_dir} dfs -rm -r examples"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config 
${hadoop_conf_dir} dfs -rm -r input-data"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config 
${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples examples"
+sudo su ${smoke_test_user} -s /bin/bash - -c "${hadoop_bin_dir}/hdfs --config 
${hadoop_conf_dir} dfs -copyFromLocal $OOZIE_EXAMPLES_DIR/examples/input-data 
input-data"
 
 cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie 
-Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config 
$OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 echo $cmd

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index f5164f1..9bb4f9d 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -26,13 +26,11 @@ def oozie(is_server=False # TODO: see if see can remove this
   import params
 
   if is_server:
-    params.HdfsResource(params.oozie_hdfs_user_dir,
-                         type="directory",
-                         action="create_delayed",
+    params.HdfsDirectory(params.oozie_hdfs_user_dir,
+                         action="create",
                          owner=params.oozie_user,
                          mode=params.oozie_hdfs_user_mode
     )
-    params.HdfsResource(None, action="execute")
   Directory(params.conf_dir,
              recursive = True,
              owner = params.oozie_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
index 1a5e6e7..3755666 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params.py
@@ -147,17 +147,16 @@ hdfs_user_keytab = 
config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 
 #LZO support

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
index 8bb143a..d7bf5eb 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params.py
@@ -21,7 +21,6 @@ Ambari Agent
 
 from resource_management.libraries.functions.version import 
format_hdp_stack_version, compare_versions
 from resource_management import *
-import os
 
 # server configurations
 config = Script.get_config()
@@ -38,9 +37,6 @@ if hdp_stack_version != "" and 
compare_versions(hdp_stack_version, '2.2') >= 0:
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
-
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
@@ -57,7 +53,6 @@ security_enabled = 
config['configurations']['cluster-env']['security_enabled']
 smoke_user_keytab = config['configurations']['cluster-env']['smokeuser_keytab']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 pig_env_sh_template = config['configurations']['pig-env']['content']
-fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 # not supporting 32 bit jdk.
 java64_home = config['hostLevelParams']['java_home']
@@ -67,15 +62,14 @@ pig_properties = 
config['configurations']['pig-properties']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index b209aba..a0e04ab 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -20,34 +20,32 @@ Ambari Agent
 """
 
 from resource_management import *
-from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 
 class PigServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
 
-    input_file = format('/user/{smokeuser}/passwd')
-    output_dir = format('/user/{smokeuser}/pigsmoke.out')
+    input_file = 'passwd'
+    output_file = "pigsmoke.out"
 
+    cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
     #cleanup put below to handle retries; if retrying there wil be a stale 
file that needs cleanup; exit code is fn of second command
-    params.HdfsResource(output_dir,
-                        type="directory",
-                        action="delete_delayed",
-                        user=params.smokeuser,
-                        )
-    params.HdfsResource(input_file,
-                        type="file",
-                        action="delete_delayed",
-                        user=params.smokeuser,
-                        )
-    params.HdfsResource(input_file,
-                        type="file",
-                        source="/etc/passwd",
-                        action="create_delayed",
-                        user=params.smokeuser,
+    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} 
dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command 
needs hadoop
+    test_cmd = format("fs -test -e {output_file}")
+
+    ExecuteHadoop( create_file_cmd,
+      tries     = 3,
+      try_sleep = 5,
+      user      = params.smokeuser,
+      conf_dir = params.hadoop_conf_dir,
+      # for kinit run
+      keytab = params.smoke_user_keytab,
+      security_enabled = params.security_enabled,
+      kinit_path_local = params.kinit_path_local,
+      bin_dir = params.hadoop_bin_dir
     )
-    params.HdfsResource(None, action="execute")
 
     File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
@@ -62,39 +60,28 @@ class PigServiceCheck(Script):
       user      = params.smokeuser
     )
 
-    test_cmd = format("fs -test -e {output_dir}")
     ExecuteHadoop( test_cmd,
-      user = params.smokeuser,
+      user      = params.smokeuser,
       conf_dir = params.hadoop_conf_dir,
       bin_dir = params.hadoop_bin_dir
     )
 
     if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, '2.2') >= 0:
       # cleanup results from previous test
-      params.HdfsResource(output_dir,
-                          type="directory",
-                          action="delete_delayed",
-                          user=params.smokeuser,
-                          )
-      params.HdfsResource(input_file,
-                          type="file",
-                          action="delete_delayed",
-                          user=params.smokeuser,
-                          )
-      params.HdfsResource(input_file,
-                          type="file",
-                          source="/etc/passwd",
-                          action="create_delayed",
-                          user=params.smokeuser,
-      )
-      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
+      ExecuteHadoop( create_file_cmd,
+        tries     = 3,
+        try_sleep = 5,
+        user      = params.smokeuser,
+        conf_dir = params.hadoop_conf_dir,
+        # for kinit run
+        keytab = params.smoke_user_keytab,
+        security_enabled = params.security_enabled,
+        kinit_path_local = params.kinit_path_local,
+        bin_dir = params.hadoop_bin_dir
       )
-      params.HdfsResource(None, action="execute")
+
+      # Check for Pig-on-Tez
+      copy_tarballs_to_hdfs('tez', params.smokeuser, params.hdfs_user, 
params.user_group)
 
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser};")

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
index d839acdb..13ab77f 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params.py
@@ -48,22 +48,20 @@ config_dir = format("{config_dir_prefix}/conf")
 
 hadoop_home = '/usr'
 java64_home = config['hostLevelParams']['java_home']
-fs_root = config['configurations']['core-site']['fs.defaultFS']
 
 tez_user = config['configurations']['tez-env']['tez_user']
 user_group = config['configurations']['cluster-env']['user_group']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 
 import functools
-# Create partial functions with common arguments for every HdfsResource call
-# to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_principal_name if security_enabled else hdfs_user,
+# Create partial functions with common arguments for every HdfsDirectory call
+# to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled=security_enabled,
   keytab=hdfs_user_keytab,
   kinit_path_local=kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir=hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
index 6e173d0..d2b6ee3 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/historyserver.py
@@ -20,6 +20,7 @@ Ambari Agent
 """
 
 from resource_management import *
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 
@@ -42,32 +43,13 @@ class HistoryServer(Script):
 
     if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       Execute(format("hdp-select set hadoop-mapreduce-historyserver 
{version}"))
-
-      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, 
params.user_group)
 
   def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
-    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-
-      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
-      params.HdfsResource(None, action="execute")
+    copy_tarballs_to_hdfs('mapreduce', params.mapred_user, params.hdfs_user, 
params.user_group)
     service('historyserver', action='start', serviceName='mapreduce')
 
   def stop(self, env, rolling_restart=False):

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index f6c3ce8..d9c73bf 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -30,10 +30,7 @@ tmp_dir = Script.get_tmp_dir()
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
-
-# this is not avaliable on INSTALL action because hdp-select is not available
-hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
@@ -41,7 +38,7 @@ version = default("/commandParams/version", None)
 hostname = config['hostname']
 
 #hadoop params
-if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, 
'2.2') >= 0:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:
   yarn_role_root = "hadoop-yarn-client"
   mapred_role_root = "hadoop-mapreduce-client"
 
@@ -69,9 +66,6 @@ if hdp_stack_version_major != "" and 
compare_versions(hdp_stack_version_major, '
   hadoop_yarn_home            = format("/usr/hdp/current/{yarn_role_root}")
   yarn_bin                    = 
format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin          = format("/usr/hdp/current/{yarn_role_root}/bin")
-
-  mapreduce_tar_source = 
config['configurations']['cluster-env']['mapreduce_tar_source']
-  mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
"/" + os.path.basename(mapreduce_tar_source)
 else:
   hadoop_libexec_dir = "/usr/lib/hadoop/libexec"
   hadoop_bin = "/usr/lib/hadoop/sbin"
@@ -189,21 +183,17 @@ jobhistory_heapsize = 
default("/configurations/mapred-env/jobhistory_heapsize",
 #for create_hdfs_directory
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-
-fs_root = config['configurations']['core-site']['fs.defaultFS']
-
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 update_exclude_file_only = 
default("/commandParams/update_exclude_file_only",False)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
index b784d8d..cf0d211 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/yarn.py
@@ -28,42 +28,37 @@ def yarn(name = None):
   import params
 
 
-  if name == "historyserver":
+  if name in ["nodemanager","historyserver"]:
     if params.yarn_log_aggregation_enabled:
-      params.HdfsResource(params.yarn_nm_app_log_dir,
-                           type="directory",
+      params.HdfsDirectory(params.yarn_nm_app_log_dir,
                            action="create_delayed",
                            owner=params.yarn_user,
                            group=params.user_group,
                            mode=0777,
                            recursive_chmod=True
       )
-    params.HdfsResource("/mapred",
-                         type="directory",
+    params.HdfsDirectory("/mapred",
                          action="create_delayed",
                          owner=params.mapred_user
     )
-    params.HdfsResource("/mapred/system",
-                         type="directory",
+    params.HdfsDirectory("/mapred/system",
                          action="create_delayed",
                          owner=params.hdfs_user
     )
-    params.HdfsResource(params.mapreduce_jobhistory_intermediate_done_dir,
-                         type="directory",
+    params.HdfsDirectory(params.mapreduce_jobhistory_intermediate_done_dir,
                          action="create_delayed",
                          owner=params.mapred_user,
                          group=params.user_group,
                          mode=0777
     )
 
-    params.HdfsResource(params.mapreduce_jobhistory_done_dir,
-                         type="directory",
+    params.HdfsDirectory(params.mapreduce_jobhistory_done_dir,
                          action="create_delayed",
                          owner=params.mapred_user,
                          group=params.user_group,
-                         mode=0777
+                         mode=01777
     )
-    params.HdfsResource(None, action="execute")
+    params.HdfsDirectory(None, action="create")
 
   if name == "nodemanager":
     Directory(params.nm_local_dirs.split(',') + params.nm_log_dirs.split(','),
@@ -134,14 +129,12 @@ def yarn(name = None):
        group=params.user_group
     )
     if params.node_labels_dir:
-      params.HdfsResource(params.node_labels_dir,
-                           type="directory",
-                           action="create_delayed",
+      params.HdfsDirectory(params.node_labels_dir,
+                           action="create",
                            owner=params.yarn_user,
                            group=params.user_group,
                            mode=0700
       )
-      params.HdfsResource(None, action="execute")
   elif name == 'apptimelineserver':
     Directory(params.ats_leveldb_dir,
        owner=params.yarn_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
index f0d6331..5420bc7 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_master.py
@@ -271,42 +271,36 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
-
-    self.assertResourceCalled('HdfsResource', 
'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 
'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -391,41 +385,36 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
-    self.assertResourceCalled('HdfsResource', 
'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 
'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hbase_master.py",
@@ -516,41 +505,35 @@ class TestHBaseMaster(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2')
 
-    self.assertResourceCalled('HdfsResource', 'hdfs://nn1/apps/hbase/data',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://nn1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        action = ['create_delayed'],
-    )
-    self.assertResourceCalled('HdfsResource', '/apps/hbase/staging',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://nn1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hbase',
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0711,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://nn1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'])
 
     self.assertResourceCalled('Execute', 
'/usr/hdp/current/hbase-master/bin/hbase-daemon.sh --config /etc/hbase/conf 
start master',
       not_if = 'ls /var/run/hbase/hbase-hbase-master.pid >/dev/null 2>&1 && ps 
-p `cat /var/run/hbase/hbase-hbase-master.pid` >/dev/null 2>&1',

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
index c27a763..10bd583 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/HBASE/test_hbase_regionserver.py
@@ -198,6 +198,36 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
+    self.assertResourceCalled('HdfsDirectory', 
'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/etc/hbase',
@@ -282,6 +312,36 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2'
     )
+    self.assertResourceCalled('HdfsDirectory', 
'hdfs://c6401.ambari.apache.org:8020/apps/hbase/data',
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              bin_dir = '/usr/bin',
+                              action = ['create'],
+                              )
 
   def test_start_default_22(self):
     self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/hbase_regionserver.py",
@@ -372,6 +432,36 @@ class TestHbaseRegionServer(RMFTestCase):
                               owner='hbase',
                               content='log4jproperties\nline2')
 
+    self.assertResourceCalled('HdfsDirectory', 'hdfs://nn1/apps/hbase/data',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', '/apps/hbase/staging',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              mode = 0711,
+                              owner = 'hbase',
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create_delayed'])
+
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = "/usr/bin/kinit",
+                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
+                              action = ['create'])
+
     self.assertResourceCalled('Execute', 
'/usr/hdp/current/hbase-regionserver/bin/hbase-daemon.sh --config 
/etc/hbase/conf start regionserver',
       not_if = 'ls /var/run/hbase/hbase-hbase-regionserver.pid >/dev/null 2>&1 
&& ps -p `cat /var/run/hbase/hbase-hbase-regionserver.pid` >/dev/null 2>&1',
       user = 'hbase')

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index 8ceeddf..2fc8549 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -379,9 +379,6 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -428,9 +425,6 @@ class TestDatanode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/lib/hadoop-hdfs',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
index 7c774b4..a99748f 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_journalnode.py
@@ -206,9 +206,6 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
 
   def assert_configure_secured(self):
     self.assertResourceCalled('Directory', '/grid/0/hdfs/journal',
@@ -247,6 +244,3 @@ class TestJournalnode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index c83c0c9..3981e33 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -88,6 +88,7 @@ class TestNamenode(RMFTestCase):
         environment = {'HADOOP_LIBEXEC_DIR': '/usr/lib/hadoop/libexec'},
         not_if = 'ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 
2>&1 && ps -p `cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 
2>&1',
     )
+    self.printResources()
     self.assertResourceCalled('Execute', 'hdfs --config /etc/hadoop/conf 
dfsadmin -safemode leave',
         path = ['/usr/bin'],
         user = 'hdfs',
@@ -99,43 +100,38 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        only_if= None,
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = None,
+                              )
     self.assertNoMoreResources()
 
   def test_stop_default(self):
@@ -230,43 +226,38 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
-        only_if = None,
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_bin_dir = '/usr/bin',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = None,
+                              )
     self.assertNoMoreResources()
 
   def test_stop_secured(self):
@@ -332,42 +323,37 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
         security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
+        action = ['create'],
+        bin_dir = '/usr/bin',
         only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  
PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState 
nn1 | grep active'",
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
 
@@ -416,43 +402,37 @@ class TestNamenode(RMFTestCase):
         user = 'hdfs',
         try_sleep = 10,
     )
-
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = True,
+                              keytab = 
'/etc/security/keytabs/hdfs.headless.keytab',
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
         security_enabled = True,
-        hadoop_conf_dir = '/etc/hadoop/conf',
         keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_fs = 'hdfs://ns1',
+        conf_dir = '/etc/hadoop/conf',
+        hdfs_user = 'hdfs',
         kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = True,
+        action = ['create'],
+        bin_dir = '/usr/bin',
         only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  
PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState 
nn1 | grep active'",
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
-        hadoop_bin_dir = '/usr/bin',
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
     )
     self.assertNoMoreResources()
 
@@ -514,43 +494,38 @@ class TestNamenode(RMFTestCase):
                               user = 'hdfs',
                               try_sleep = 10,
                               )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        only_if= "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  
PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState 
nn1 | grep active'",
-    )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash 
-c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin 
-getServiceState nn1 | grep active'",
+                              )
     self.assertNoMoreResources()
 
   # tests namenode start command when NameNode HA is enabled, and
@@ -599,49 +574,44 @@ class TestNamenode(RMFTestCase):
                               not_if = 'ls 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps -p `cat 
/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid` >/dev/null 2>&1',
                               )
     self.assertResourceCalled('Execute', "hadoop dfsadmin -safemode get | grep 
'Safe mode is OFF'",
-        path = ['/usr/bin'],
-        tries = 40,
-        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  
PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState 
nn2 | grep active'",
-        user = 'hdfs',
-        try_sleep = 10,
-    )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'hdfs',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0777,
-    )
-    self.assertResourceCalled('HdfsResource', '/user/ambari-qa',
-        security_enabled = False,
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        owner = 'ambari-qa',
-        hadoop_bin_dir = '/usr/bin',
-        type = 'directory',
-        action = ['create_delayed'],
-        mode = 0770,
-    )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash -c 'export  
PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin -getServiceState 
nn2 | grep active'",
-        keytab = UnknownConfigurationMock(),
-        hadoop_bin_dir = '/usr/bin',
-        hadoop_fs = 'hdfs://ns1',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-    )
+                              path = ['/usr/bin'],
+                              tries = 40,
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash 
-c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin 
-getServiceState nn2 | grep active'",
+                              user = 'hdfs',
+                              try_sleep = 10,
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/tmp',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0777,
+                              owner = 'hdfs',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', '/user/ambari-qa',
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              mode = 0770,
+                              owner = 'ambari-qa',
+                              bin_dir = '/usr/bin',
+                              action = ['create_delayed'],
+                              )
+    self.assertResourceCalled('HdfsDirectory', None,
+                              security_enabled = False,
+                              keytab = UnknownConfigurationMock(),
+                              conf_dir = '/etc/hadoop/conf',
+                              hdfs_user = 'hdfs',
+                              kinit_path_local = '/usr/bin/kinit',
+                              action = ['create'],
+                              bin_dir = '/usr/bin',
+                              only_if = "/usr/bin/sudo su hdfs -l -s /bin/bash 
-c 'export  PATH=/bin:/usr/bin ; hdfs --config /etc/hadoop/conf haadmin 
-getServiceState nn2 | grep active'",
+                              )
     self.assertNoMoreResources()
 
   def test_decommission_default(self):
@@ -758,9 +728,6 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -800,9 +767,6 @@ class TestNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namenode',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
index 9d90456..e48f7f6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_service_check.py
@@ -59,48 +59,36 @@ class TestServiceCheck(RMFTestCase):
         bin_dir = '/usr/bin',
         user = 'ambari-qa',
     )
-    self.assertResourceCalled('HdfsResource', '/tmp',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['create_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
-        mode = 0777,
+    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir /tmp',
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        not_if = "/usr/bin/sudo su ambari-qa -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]/usr/bin/hadoop --config /etc/hadoop/conf fs -test -e 
/tmp'",
+        try_sleep = 3,
+        tries = 5,
+        bin_dir = '/usr/bin',
+        user = 'ambari-qa',
     )
-    self.assertResourceCalled('HdfsResource', '/tmp/',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['delete_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -chmod 777 /tmp',
+        conf_dir = '/etc/hadoop/conf',
+        logoutput = True,
+        try_sleep = 3,
+        tries = 5,
+        bin_dir = '/usr/bin',
+        user = 'ambari-qa',
     )
-    self.assertResourceCalled('HdfsResource', '/tmp/',
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        kinit_path_local = '/usr/bin/kinit',
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        source = '/etc/passwd',
-        user = 'hdfs',
-        action = ['create_delayed'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
-        type = 'directory',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -rm /tmp/; hadoop --config 
/etc/hadoop/conf fs -put /etc/passwd /tmp/',
+        logoutput = True,
+        tries = 5,
+        conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
+        try_sleep = 3,
+        user = 'ambari-qa',
     )
-    self.assertResourceCalled('HdfsResource', None,
-        security_enabled = False,
-        hadoop_bin_dir = '/usr/bin',
-        keytab = UnknownConfigurationMock(),
-        hadoop_fs = 'hdfs://c6401.ambari.apache.org:8020',
-        kinit_path_local = '/usr/bin/kinit',
-        user = 'hdfs',
-        action = ['execute'],
-        hadoop_conf_dir = '/etc/hadoop/conf',
+    self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /tmp/',
+        logoutput = True,
+        tries = 5,
+        conf_dir = '/etc/hadoop/conf',
+        bin_dir = '/usr/bin',
+        try_sleep = 3,
+        user = 'ambari-qa',
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
index e2996fd..7ba1924 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_snamenode.py
@@ -230,9 +230,6 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -272,9 +269,6 @@ class TestSNamenode(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/hadoop/hdfs/namesecondary',
                               owner = 'hdfs',
                               group = 'hadoop',

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
index caac26b..9fe9d03 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_zkfc.py
@@ -63,9 +63,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -157,9 +154,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'root',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -251,9 +245,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',
@@ -320,9 +311,6 @@ class TestZkfc(RMFTestCase):
                               content = Template('slaves.j2'),
                               owner = 'hdfs',
                               )
-    self.assertResourceCalled('File', 
'/var/lib/ambari-agent/lib/fast-hdfs-resource.jar',
-        content = StaticFile('fast-hdfs-resource.jar'),
-    )
     self.assertResourceCalled('Directory', '/var/run/hadoop',
                               owner = 'hdfs',
                               group = 'hadoop',

Reply via email to