http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 6edca7d..bfd4e74 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -22,7 +22,6 @@ from resource_management import *
 from resource_management.libraries import functions
 import sys
 import os
-import glob
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from urlparse import urlparse
@@ -82,96 +81,18 @@ def hive(name=None):
 
   if name == 'hiveserver2':
 
-    if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, '2.2') >=0:
-      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-        
-    if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
-      params.HdfsResource(params.webhcat_apps_dir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.webhcat_user,
-                           mode=0755
-      )
-  
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      params.HdfsResource(params.hcat_hdfs_user_dir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.hcat_user,
-                           mode=params.hcat_hdfs_user_mode
-      )
-    params.HdfsResource(params.webhcat_hdfs_user_dir,
-                         type="directory",
-                         action="create_on_execute",
-                         owner=params.webhcat_user,
-                         mode=params.webhcat_hdfs_user_mode
-    )
-  
-    for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      
params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content()
 + '/' + src_filename,
-                          type="file",
-                          action="create_on_execute",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    if (os.path.isfile(params.pig_tar_source)):
-      
params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.pig_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    
params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
-                        type="file",
-                        action="create_on_execute",
-                        source=params.hive_tar_source,
-                        group=params.user_group,
-                        mode=params.tarballs_mode
-    )
- 
-    for src_filepath in glob.glob(params.sqoop_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      
params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content()
 + '/' + src_filename,
-                          type="file",
-                          action="create_on_execute",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      
-    params.HdfsResource(params.hive_apps_whs_dir,
-                         type="directory",
-                          action="create_on_execute",
-                          owner=params.hive_user,
-                          mode=0777
+    params.HdfsDirectory(params.hive_apps_whs_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=0777
     )
-    params.HdfsResource(params.hive_hdfs_user_dir,
-                         type="directory",
-                          action="create_on_execute",
-                          owner=params.hive_user,
-                          mode=params.hive_hdfs_user_mode
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hive_user,
+                         mode=params.hive_hdfs_user_mode
     )
-    
-    if not is_empty(params.hive_exec_scratchdir) and not 
urlparse(params.hive_exec_scratchdir).path.startswith("/tmp"):
-      params.HdfsResource(params.hive_exec_scratchdir,
-                           type="directory",
-                           action="create_on_execute",
-                           owner=params.hive_user,
-                           group=params.hdfs_user,
-                           mode=0777) # Hive expects this dir to be writeable 
by everyone as it is used as a temp dir
-      
-    params.HdfsResource(None, action="execute")
+    setup_custom_scratchdir()
+    params.HdfsDirectory(None, action="create")
 
   Directory(params.hive_etc_dir_prefix,
             mode=0755
@@ -363,3 +284,20 @@ def jdbc_connector():
   File(params.target,
        mode = 0644,
   )
+
+# In case Hive has a custom path for its HDFS temporary directory,
+# recursive directory creation will be a prerequisite as 'hive' user cannot 
write on the root of the HDFS
+def setup_custom_scratchdir():
+  import params
+  # If this property is custom and not a variation of the writable temp dir
+  if is_empty(params.hive_exec_scratchdir):
+    return
+  parsed = urlparse(params.hive_exec_scratchdir)
+  if parsed.path.startswith("/tmp"):
+    return
+  params.HdfsDirectory(params.hive_exec_scratchdir,
+                       action="create_delayed",
+                       owner=params.hive_user,
+                       group=params.hdfs_user,
+                       mode=0777) # Hive expects this dir to be writeable by 
everyone as it is used as a temp dir
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index a33857c..d86de3d 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -23,6 +23,7 @@ from resource_management import *
 from hive import hive
 from hive_service import hive_service
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
   FILE_TYPE_XML
@@ -70,6 +71,8 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
     self.configure(env) # FOR SECURITY
 
+    # This function is needed in HDP 2.2, but it is safe to call in earlier 
versions.
+    copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, 
params.hdfs_user, params.user_group)
     setup_ranger_hive()    
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
@@ -99,14 +102,8 @@ class HiveServerDefault(HiveServer):
     if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       Execute(format("hdp-select set hive-server2 {version}"))
-      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, 
params.hdfs_user, params.user_group)
+      copy_tarballs_to_hdfs('tez', 'hive-server2', params.tez_user, 
params.hdfs_user, params.user_group)
 
   def security_status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index 89f0224..bd6f70e 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -25,14 +25,13 @@ import os
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.os_check import OSCheck
 
-from resource_management import *
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.get_port_from_url import 
get_port_from_url
-
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -46,12 +45,9 @@ hostname = config["hostname"]
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and 
Script.is_hdp_stack_less_than("2.2")
 
-# this is not avaliable on INSTALL action because hdp-select is not available
-hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
-
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
 
@@ -99,41 +95,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   # there are no client versions of these, use server versions directly
   hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
-  
-  # --- Tarballs ---
-
-  hive_tar_source = config['configurations']['cluster-env']['hive_tar_source']
-  pig_tar_source = config['configurations']['cluster-env']['pig_tar_source']
-  hadoop_streaming_tar_source = 
config['configurations']['cluster-env']['hadoop-streaming_tar_source']
-  sqoop_tar_source = 
config['configurations']['cluster-env']['sqoop_tar_source']
-  mapreduce_tar_source = 
config['configurations']['cluster-env']['mapreduce_tar_source']
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  
-  hive_tar_destination = 
config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + 
os.path.basename(hive_tar_source)
-  pig_tar_destination = 
config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + 
os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = 
config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
-  sqoop_tar_destination = 
config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + 
os.path.basename(sqoop_tar_source)
-  mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
"/" + os.path.basename(mapreduce_tar_source)
-  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
-
-  tarballs_mode = 0444
-else:
-  # --- Tarballs ---
-  hive_tar_source = hive_tar_file
-  pig_tar_source = pig_tar_file
-  hadoop_streaming_tar_source = hadoop_streeming_jars
-  sqoop_tar_source = sqoop_tar_file
-
-  webhcat_apps_dir = "/apps/webhcat"
-  
-  hive_tar_destination = webhcat_apps_dir + "/" + 
os.path.basename(hive_tar_source)
-  pig_tar_destination = webhcat_apps_dir + "/" + 
os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = webhcat_apps_dir
-  sqoop_tar_destination_dir = webhcat_apps_dir
-
-  tarballs_mode = 0755
-
-
 
 
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + 
hadoop_bin_dir
@@ -315,6 +276,7 @@ tez_user = config['configurations']['tez-env']['tez_user']
 # Tez jars
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -358,6 +320,8 @@ templeton_jar = 
config['configurations']['webhcat-site']['templeton.jar']
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 
+webhcat_apps_dir = "/apps/webhcat"
+
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_mode = 0755
 webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
@@ -366,18 +330,17 @@ webhcat_hdfs_user_mode = 0755
 security_param = "true" if security_enabled else "false"
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
- HdfsResource,
-  user = hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir = hadoop_conf_dir,
+  hdfs_user = hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
-
+  bin_dir = hadoop_bin_dir
+)
 
 # ranger host
 ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index c33bc90..733ab0a 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -20,9 +20,11 @@ Ambari Agent
 """
 import sys
 import os.path
+import glob
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 
@@ -44,6 +46,26 @@ def webhcat():
 def webhcat():
   import params
 
+  if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
+    params.HdfsDirectory(params.webhcat_apps_dir,
+                         action="create_delayed",
+                         owner=params.webhcat_user,
+                         mode=0755
+    )
+  
+  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+    params.HdfsDirectory(params.hcat_hdfs_user_dir,
+                         action="create_delayed",
+                         owner=params.hcat_user,
+                         mode=params.hcat_hdfs_user_mode
+    )
+  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
+                       action="create_delayed",
+                       owner=params.webhcat_user,
+                       mode=params.webhcat_hdfs_user_mode
+  )
+  params.HdfsDirectory(None, action="create")
+
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             mode=0755,
@@ -72,6 +94,55 @@ def webhcat():
             path='/bin'
     )
 
+  # TODO, these checks that are specific to HDP 2.2 and greater should really 
be in a script specific to that stack.
+  if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
+    copy_tarballs_to_hdfs('hive', 'hive-webhcat', params.webhcat_user, 
params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('pig', 'hive-webhcat', params.webhcat_user, 
params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('hadoop-streaming', 'hive-webhcat', 
params.webhcat_user, params.hdfs_user, params.user_group)
+    copy_tarballs_to_hdfs('sqoop', 'hive-webhcat', params.webhcat_user, 
params.hdfs_user, params.user_group)
+  else:
+    CopyFromLocal(params.hadoop_streeming_jars,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (os.path.isfile(params.pig_tar_file)):
+      CopyFromLocal(params.pig_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
+    CopyFromLocal(params.hive_tar_file,
+                  owner=params.webhcat_user,
+                  mode=0755,
+                  dest_dir=params.webhcat_apps_dir,
+                  kinnit_if_needed=kinit_if_needed,
+                  hdfs_user=params.hdfs_user,
+                  hadoop_bin_dir=params.hadoop_bin_dir,
+                  hadoop_conf_dir=params.hadoop_conf_dir
+    )
+
+    if (len(glob.glob(params.sqoop_tar_file)) > 0):
+      CopyFromLocal(params.sqoop_tar_file,
+                    owner=params.webhcat_user,
+                    mode=0755,
+                    dest_dir=params.webhcat_apps_dir,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+
   # Replace _HOST with hostname in relevant principal-related properties
   webhcat_site = params.config['configurations']['webhcat-site'].copy()
   for prop_name in ['templeton.hive.properties', 
'templeton.kerberos.principal']:

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
index 09fb690..9b59f30 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
@@ -21,7 +21,6 @@ limitations under the License.
 from resource_management import *
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
-import time
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def webhcat_service_check():
@@ -45,33 +44,8 @@ def webhcat_service_check():
   else:
     smokeuser_keytab= "no_keytab"
     smoke_user_principal="no_principal"
-    
-  unique_name = format("{smokeuser}.{timestamp}", timestamp = time.time())
-  templeton_test_script = format("idtest.{unique_name}.pig")
-  templeton_test_input = format("/tmp/idtest.{unique_name}.in")
-  templeton_test_output = format("/tmp/idtest.{unique_name}.out")
 
-  File(format("{tmp_dir}/{templeton_test_script}"),
-       content = Template("templeton_smoke.pig.j2", 
templeton_test_input=templeton_test_input, 
templeton_test_output=templeton_test_output),
-  )
-  
-  params.HdfsResource(format("/tmp/{templeton_test_script}"),
-                      action = "create_on_execute",
-                      type = "file",
-                      source = format("{tmp_dir}/{templeton_test_script}"),
-                      owner = params.smokeuser
-  )
-  
-  params.HdfsResource(templeton_test_input,
-                      action = "create_on_execute",
-                      type = "file",
-                      source = "/etc/passwd",
-                      owner = params.smokeuser
-  )
-  
-  params.HdfsResource(None, action = "execute")
-
-  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} 
{smokeuser} {templeton_port} {templeton_test_script} {smokeuser_keytab}"
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} 
{smokeuser} {templeton_port} {smokeuser_keytab}"
                " {security_param} {kinit_path_local} {smoke_user_principal}")
 
   Execute(cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
deleted file mode 100644
index 3153e81..0000000
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
+++ /dev/null
@@ -1,24 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-A = load '{{templeton_test_input}}' using PigStorage(':');
-B = foreach A generate \$0 as id; 
-store B into '{{templeton_test_output}}';
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index c7d13eb..47ce98a 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -18,14 +18,13 @@ limitations under the License.
 Ambari Agent
 
 """
-from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -69,14 +68,14 @@ java64_home = config['hostLevelParams']['java_home']
 log4j_props = config['configurations']['mahout-log4j']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
index da93c35..f46c41f 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
@@ -20,34 +20,55 @@ Ambari Agent
 """
 
 from resource_management import *
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 
 class MahoutServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
 
+    create_input_dir_cmd = format("fs -mkdir 
/user/{smokeuser}/mahoutsmokeinput")
+    copy_test_file_to_hdfs_cmd = format("fs -put 
{tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
     mahout_command = format("mahout seqdirectory --input 
/user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt "
                             "--output /user/{smokeuser}/mahoutsmokeoutput/ 
--charset utf-8")
     test_command = format("fs -test -e 
/user/{smokeuser}/mahoutsmokeoutput/_SUCCESS")
-    
+    remove_output_input_dirs_cmd = format("fs -rm -r -f 
/user/{smokeuser}/mahoutsmokeoutput "
+                                          "/user/{smokeuser}/mahoutsmokeinput")
+
+    ExecuteHadoop( remove_output_input_dirs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   # for kinit run
+                   keytab = params.smoke_user_keytab,
+                   principal = params.smokeuser_principal,
+                   security_enabled = params.security_enabled,
+                   kinit_path_local = params.kinit_path_local,
+                   bin_dir = params.hadoop_bin_dir
+                   )
+
+    ExecuteHadoop( create_input_dir_cmd,
+                 tries = 3,
+                 try_sleep = 5,
+                 user = params.smokeuser,
+                 conf_dir = params.hadoop_conf_dir,
+                 bin_dir = params.hadoop_bin_dir
+    )
+
     File( format("{tmp_dir}/sample-mahout-test.txt"),
         content = "Test text which will be converted to sequence file.",
         mode = 0755
     )
-    
-    params.HdfsResource(format("/user/{smokeuser}/mahoutsmokeinput"),
-                        action="create_on_execute",
-                        type="directory",
-                        owner=params.smokeuser,
-    )
-    
params.HdfsResource(format("/user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt"),
-                        action="create_on_execute",
-                        type="file",
-                        owner=params.smokeuser,
-                        source=format("{tmp_dir}/sample-mahout-test.txt")
+
+    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
     )
-    params.HdfsResource(None, action="execute")
-    
+
     Execute( mahout_command,
              tries = 3,
              try_sleep = 5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
index fd76321..32211c4 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
@@ -24,14 +24,13 @@ export os_family=$1
 export oozie_lib_dir=$2
 export oozie_conf_dir=$3
 export oozie_bin_dir=$4
-export oozie_examples_dir=$5
-export hadoop_conf_dir=$6
-export hadoop_bin_dir=$7
-export smoke_test_user=$8
-export security_enabled=$9
-export smoke_user_keytab=$10
-export kinit_path_local=${11}
-export smokeuser_principal=${12}
+export hadoop_conf_dir=$5
+export hadoop_bin_dir=$6
+export smoke_test_user=$7
+export security_enabled=$8
+export smoke_user_keytab=$9
+export kinit_path_local=${10}
+export smokeuser_principal=${11}
 
 function getValueFromField {
   xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | 
cut -d "<" -f1
@@ -68,9 +67,37 @@ function checkOozieJobStatus {
 }
 
 export OOZIE_EXIT_CODE=0
+export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml 
yarn.resourcemanager.address`
+export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml 
fs.defaultFS`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml 
oozie.base.url | tr '[:upper:]' '[:lower:]'`
 
-cd $oozie_examples_dir
+# search for the oozie examples JAR and, if found, store the directory name
+export OOZIE_EXAMPLES_DIR=`find "${oozie_lib_dir}/" -name 
"oozie-examples.tar.gz" | xargs dirname`
+if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
+  if [ "$os_family" == "ubuntu" ] ; then
+    LIST_PACKAGE_FILES_CMD='dpkg-query -L'
+  else
+    LIST_PACKAGE_FILES_CMD='rpm -ql'
+  fi
+  export OOZIE_EXAMPLES_DIR=`$LIST_PACKAGE_FILES_CMD oozie-client | grep 
'oozie-examples.tar.gz$' | xargs dirname`
+fi
+if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
+  export OOZIE_EXAMPLES_DIR='/usr/hdp/current/oozie-client/doc/'
+else
+  echo "Located Oozie examples JAR at $OOZIE_EXAMPLES_DIR"
+fi
+
+cd $OOZIE_EXAMPLES_DIR
+
+/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
+/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
+
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g"
 examples/apps/map-reduce/job.properties
 
 if [[ $security_enabled == "True" ]]; then
   kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} 
${smokeuser_principal}; "
@@ -78,7 +105,12 @@ else
   kinitcmd=""
 fi
 
-cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie 
-Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config 
$oozie_examples_dir/examples/apps/map-reduce/job.properties  -run"
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal 
$OOZIE_EXAMPLES_DIR/examples examples"
+/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal 
$OOZIE_EXAMPLES_DIR/examples/input-data input-data"
+
+cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie 
-Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config 
$OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
 echo $cmd
 job_info=`/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s 
/bin/bash - -c "$cmd" | grep "job:"`
 job_id="`echo $job_info | cut -d':' -f2`"

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
deleted file mode 100644
index 79a1bfc..0000000
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env bash
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-export oozie_conf_dir=$1
-export oozie_examples_dir=$2
-export hadoop_conf_dir=$3
-
-function getValueFromField {
-  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | 
cut -d "<" -f1
-  return $?
-}
-
-export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml 
yarn.resourcemanager.address`
-export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml 
fs.defaultFS`
-
-cd $oozie_examples_dir
-
-/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
-/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
-
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g"
 examples/apps/map-reduce/job.properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index ae66eb0..6adf2ac 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -85,13 +85,11 @@ def oozie(is_server=False):
   import params
 
   if is_server:
-    params.HdfsResource(params.oozie_hdfs_user_dir,
-                         type="directory",
-                         action="create_on_execute",
+    params.HdfsDirectory(params.oozie_hdfs_user_dir,
+                         action="create",
                          owner=params.oozie_user,
                          mode=params.oozie_hdfs_user_mode
     )
-    params.HdfsResource(None, action="execute")
   Directory(params.conf_dir,
              recursive = True,
              owner = params.oozie_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
index 47361c4..c3d9c78 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
@@ -185,16 +185,12 @@ def upgrade_oozie():
     command = format("{kinit_path_local} -kt {oozie_keytab} 
{oozie_principal_with_host}")
     Execute(command, user=params.oozie_user)
 
-  
-  params.HdfsResource(format("{oozie_hdfs_user_dir}/share"),
-                      action = "create_on_execute",
-                      type = "directory",
-                      owner = "oozie",
-                      group = "hadoop",
-                      mode = 0755,
-                      recursive_chmod = True
-  )
-  params.HdfsResource(None, action = "execute")
+  # ensure that HDFS is prepared to receive the new sharelib
+  command = format("hdfs dfs -chown oozie:hadoop {oozie_hdfs_user_dir}/share")
+  Execute(command, user=params.oozie_user)
+
+  command = format("hdfs dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
+  Execute(command, user=params.oozie_user)
 
   # upgrade oozie DB
   command = format("{oozie_home}/bin/ooziedb.sh upgrade -run")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index e8116d2..6909cd7 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -17,7 +17,6 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
-from resource_management import *
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
@@ -26,7 +25,7 @@ from resource_management.libraries.functions.default import 
default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_port_from_url
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
 from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
 
 import status_params
@@ -69,7 +68,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   oozie_shared_lib = format("/usr/hdp/current/{oozie_root}/share")
   oozie_home = format("/usr/hdp/current/{oozie_root}")
   oozie_bin_dir = format("/usr/hdp/current/{oozie_root}/bin")
-  oozie_examples_regex = format("/usr/hdp/current/{oozie_root}/doc")
   falcon_home = '/usr/hdp/current/falcon-client'
 
   conf_dir = format("/usr/hdp/current/{oozie_root}/conf")
@@ -90,7 +88,6 @@ else:
   falcon_home = '/usr/lib/falcon'
   conf_dir = "/etc/oozie/conf"
   hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
-  oozie_examples_regex = "/usr/share/doc/oozie-*"
 
 execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
 
@@ -198,18 +195,17 @@ hdfs_user_keytab = 
config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
-
+  bin_dir = hadoop_bin_dir
+)
 
 # The logic for LZO also exists in HDFS' params.py
 io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
index 6cdd362..d589452 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
@@ -26,7 +26,6 @@ from resource_management.libraries.script import Script
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 import os
-import glob
 
 
 class OozieServiceCheck(Script):
@@ -40,53 +39,30 @@ class OozieServiceCheckDefault(OozieServiceCheck):
     env.set_params(params)
 
     # on HDP1 this file is different
-    prepare_hdfs_file_name = 'prepareOozieHdfsDirectories.sh'
     smoke_test_file_name = 'oozieSmoke2.sh'
 
-    OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name, 
prepare_hdfs_file_name)
+    OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name)
 
   @staticmethod
-  def oozie_smoke_shell_file(file_name, prepare_hdfs_file_name):
+  def oozie_smoke_shell_file(file_name):
     import params
 
     File(format("{tmp_dir}/{file_name}"),
          content=StaticFile(file_name),
          mode=0755
     )
-    File(format("{tmp_dir}/{prepare_hdfs_file_name}"),
-         content=StaticFile(prepare_hdfs_file_name),
-         mode=0755
-    )
 
     os_family = System.get_instance().os_family
-    oozie_examples_dir = glob.glob(params.oozie_examples_regex)[0]
-    
-    Execute(format("{tmp_dir}/{prepare_hdfs_file_name} {conf_dir} 
{oozie_examples_dir} {hadoop_conf_dir} "),
-            tries=3,
-            try_sleep=5,
-            logoutput=True
-    )
-    
-    params.HdfsResource(format('/user/{smokeuser}/examples'),
-      action = "create_on_execute",
-      type = "directory",
-      source = format("{oozie_examples_dir}/examples"),
-    )
-    params.HdfsResource(format('/user/{smokeuser}/input-data'),
-      action = "create_on_execute",
-      type = "directory",
-      source = format("{oozie_examples_dir}/examples/input-data"),
-    )
-    params.HdfsResource(None, action="execute")
 
     if params.security_enabled:
       sh_cmd = format(
-        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} 
{oozie_bin_dir} {oozie_examples_dir} {hadoop_conf_dir} {hadoop_bin_dir} 
{smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local} 
{smokeuser_principal}")
+        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} 
{oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} 
{security_enabled} {smokeuser_keytab} {kinit_path_local} {smokeuser_principal}")
     else:
       sh_cmd = format(
-        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} 
{oozie_bin_dir} {oozie_examples_dir} {hadoop_conf_dir} {hadoop_bin_dir} 
{smokeuser} {security_enabled}")
-    
-    Execute(sh_cmd,
+        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} 
{oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} 
{security_enabled}")
+
+    Execute(format("{tmp_dir}/{file_name}"),
+            command=sh_cmd,
             path=params.execute_path,
             tries=3,
             try_sleep=5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index ddb829e..93c8c71 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -18,15 +18,13 @@ limitations under the License.
 Ambari Agent
 
 """
-from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
-import os
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -53,10 +51,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
-  
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
-
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -77,15 +71,14 @@ pig_properties = 
config['configurations']['pig-properties']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
-
+  bin_dir = hadoop_bin_dir
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index 5f33b1d..9de30ed 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -20,6 +20,7 @@ Ambari Agent
 """
 
 from resource_management import *
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries import functions
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -33,23 +34,26 @@ class PigServiceCheckLinux(PigServiceCheck):
     import params
     env.set_params(params)
 
-    input_file = format('/user/{smokeuser}/passwd')
-    output_dir = format('/user/{smokeuser}/pigsmoke.out')
-
-    params.HdfsResource(output_dir,
-                        type="directory",
-                        action="delete_on_execute",
-                        user=params.smokeuser,
-                        )
-    params.HdfsResource(input_file,
-                        type="file",
-                        source="/etc/passwd",
-                        action="create_on_execute",
-                        user=params.smokeuser,
-    )
-    params.HdfsResource(None, action="execute")
- 
+    input_file = 'passwd'
+    output_file = "pigsmoke.out"
+
+    cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
+    #cleanup put below to handle retries; if retrying there wil be a stale 
file that needs cleanup; exit code is fn of second command
+    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} 
dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command 
needs hadoop
+    test_cmd = format("fs -test -e {output_file}")
 
+    ExecuteHadoop( create_file_cmd,
+      tries     = 3,
+      try_sleep = 5,
+      user      = params.smokeuser,
+      conf_dir = params.hadoop_conf_dir,
+      # for kinit run
+      keytab = params.smoke_user_keytab,
+      principal = params.smokeuser_principal,
+      security_enabled = params.security_enabled,
+      kinit_path_local = params.kinit_path_local,
+      bin_dir = params.hadoop_bin_dir
+    )
 
     File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
@@ -64,7 +68,6 @@ class PigServiceCheckLinux(PigServiceCheck):
       user      = params.smokeuser
     )
 
-    test_cmd = format("fs -test -e {output_dir}")
     ExecuteHadoop( test_cmd,
       user      = params.smokeuser,
       conf_dir = params.hadoop_conf_dir,
@@ -73,27 +76,21 @@ class PigServiceCheckLinux(PigServiceCheck):
 
     if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, '2.2') >= 0:
       # cleanup results from previous test
-      params.HdfsResource(output_dir,
-                          type="directory",
-                          action="delete_on_execute",
-                          user=params.smokeuser,
-      )
-      params.HdfsResource(input_file,
-                          type="file",
-                          source="/etc/passwd",
-                          action="create_on_execute",
-                          user=params.smokeuser,
+      ExecuteHadoop( create_file_cmd,
+        tries     = 3,
+        try_sleep = 5,
+        user      = params.smokeuser,
+        conf_dir = params.hadoop_conf_dir,
+        # for kinit run
+        keytab = params.smoke_user_keytab,
+        principal = params.smokeuser_principal,
+        security_enabled = params.security_enabled,
+        kinit_path_local = params.kinit_path_local,
+        bin_dir = params.hadoop_bin_dir
       )
 
       # Check for Pig-on-Tez
-      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('tez', 'hadoop-client', params.smokeuser, 
params.hdfs_user, params.user_group)
 
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser_principal};")

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
index 8c05a8e..0206415 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
@@ -22,6 +22,7 @@ import sys
 import os
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.core.resources import Execute
@@ -76,14 +77,7 @@ class JobHistoryServer(Script):
     if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
       Execute(format("hdp-select set spark-historyserver {version}"))
-      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, 
params.hdfs_user, params.user_group)
 
 if __name__ == "__main__":
   JobHistoryServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 2600141..4130472 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -23,7 +23,6 @@ import status_params
 
 from setup_spark import *
 
-from resource_management import *
 import resource_management.libraries.functions
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
@@ -31,7 +30,7 @@ from resource_management.libraries.functions.version import 
format_hdp_stack_ver
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
 
 # a map of the Ambari role to the component name
 # for use with /usr/hdp/current/<component>
@@ -70,8 +69,6 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
   spark_home = format("/usr/hdp/current/{component_directory}")
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
 
 
 java_home = config['hostLevelParams']['java_home']
@@ -158,14 +155,14 @@ if security_enabled:
 
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
+  bin_dir = hadoop_bin_dir
+)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index 5c01337..80d6bbc 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -37,13 +37,11 @@ def setup_spark(env, type, action = None):
             recursive=True
   )
   if type == 'server' and action == 'config':
-    params.HdfsResource(params.spark_hdfs_user_dir,
-                       type="directory",
-                       action="create_on_execute",
+    params.HdfsDirectory(params.spark_hdfs_user_dir,
+                       action="create",
                        owner=params.spark_user,
                        mode=0775
     )
-    params.HdfsResource(None, action="execute")
     
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
     properties = params.config['configurations']['spark-defaults'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
index 840b81d..26a403d 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
@@ -27,14 +27,7 @@ def spark_service(action):
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} 
{spark_principal}; ")
       Execute(spark_kinit_cmd, user=params.spark_user)
 
-      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+    copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, 
params.hdfs_user, params.user_group)
 
     no_op_test = format(
       'ls {spark_history_server_pid_file} >/dev/null 2>&1 && ps -p `cat 
{spark_history_server_pid_file}` >/dev/null 2>&1')

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
index 2c7bbc4..5aadd99 100644
--- 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
@@ -62,10 +62,12 @@ export STORM_LOG_DIR={{log_dir}}
     </value>
   </property>
 
-   <property>
-     <name>nimbus_seeds_supported</name>
-     <value>false</value>
-     <description></description>
-   </property>
+  <configuration>
+    <property>
+      <name>nimbus_seeds_supported</name>
+      <value>false</value>
+      <description></description>
+    </property>
+  </configuration>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index 7e80f99..7f17a26 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -19,14 +19,13 @@ limitations under the License.
 """
 import os
 
-from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
 
 # server configurations
 config = Script.get_config()
@@ -76,17 +75,15 @@ user_group = 
config['configurations']['cluster-env']['user_group']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete/copyfromlocal hdfs directories/files we need to call 
params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
-  security_enabled = security_enabled,
-  keytab = hdfs_user_keytab,
-  kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+# Create partial functions with common arguments for every HdfsDirectory call
+# to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
+  security_enabled=security_enabled,
+  keytab=hdfs_user_keytab,
+  kinit_path_local=kinit_path_local,
+  bin_dir=hadoop_bin_dir
 )
 
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
index abff479..6786eba 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
@@ -21,6 +21,7 @@ Ambari Agent
 
 from resource_management import *
 from resource_management.libraries.functions.version import compare_versions
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 
@@ -37,27 +38,53 @@ class TezServiceCheckLinux(TezServiceCheck):
       hdp_version = functions.get_hdp_version("hadoop-client")
 
     path_to_tez_jar = format(params.path_to_tez_examples_jar)
+    copy_test_file_to_hdfs_cmd =  format("fs -put {tmp_dir}/sample-tez-test 
/tmp/tezsmokeinput/")
+    create_input_dir_cmd = format("fs -mkdir /tmp/tezsmokeinput")
     wordcount_command = format("jar {path_to_tez_jar} orderedwordcount "
                                "/tmp/tezsmokeinput/sample-tez-test 
/tmp/tezsmokeoutput/")
     test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
-    
+    remove_output_input_dirs_cmd = "fs -rm -r -f /tmp/tezsmokeinput 
/tmp/tezsmokeoutput"
+
+
+    ExecuteHadoop( remove_output_input_dirs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   # for kinit run
+                   keytab = params.smoke_user_keytab,
+                   principal = params.smokeuser_principal,
+                   security_enabled = params.security_enabled,
+                   kinit_path_local = params.kinit_path_local,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
+    params.HdfsDirectory("/tmp",
+                         action="create",
+                         owner=params.hdfs_user,
+                         mode=0777
+    )
+
+    ExecuteHadoop( create_input_dir_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
+    )
+
     File( format("{tmp_dir}/sample-tez-test"),
           content = "foo\nbar\nfoo\nbar\nfoo",
           mode = 0755
     )
-    
-    params.HdfsResource("/tmp/tezsmokeinput",
-                        action="create_on_execute",
-                        type="directory",
-                        owner=params.smokeuser,
-    )
-    params.HdfsResource("/tmp/tezsmokeinput/sample-tez-test",
-                        action="create_on_execute",
-                        type="file",
-                        owner=params.smokeuser,
-                        source=format("{tmp_dir}/sample-tez-test"),
+
+    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
+                   tries = 3,
+                   try_sleep = 5,
+                   user = params.smokeuser,
+                   conf_dir = params.hadoop_conf_dir,
+                   bin_dir = params.hadoop_bin_dir
     )
-    params.HdfsResource(None, action="execute")
 
     ExecuteHadoop( wordcount_command,
                    tries = 3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
index e625a0d..f4dceb5 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -21,6 +21,7 @@ Ambari Agent
 
 from resource_management import *
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -72,33 +73,13 @@ class HistoryServerDefault(HistoryServer):
     if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       Execute(format("hdp-select set hadoop-mapreduce-historyserver 
{version}"))
-      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
-      params.HdfsResource(None, action="execute")
-
+      copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', 
params.mapred_user, params.hdfs_user, params.user_group)
 
   def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
-    
-    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
-      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.mapreduce_tar_source,
-                          owner=params.hdfs_user,
-                          group=params.user_group,
-                          mode=0444,
-      )
-      params.HdfsResource(None, action="execute")
-
+    copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', 
params.mapred_user, params.hdfs_user, params.user_group)
     service('historyserver', action='start', serviceName='mapreduce')
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
index 44015bf..a18ca72 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 from resource_management import *
 import os
-import glob
 
 def install_tez_jars():
   import params
@@ -30,12 +29,25 @@ def install_tez_jars():
   # If tez libraries are to be stored in hdfs
   if destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
-      params.HdfsResource(hdfs_dir,
-                           type="directory",
-                           action="create_on_execute",
+      params.HdfsDirectory(hdfs_dir,
+                           action="create_delayed",
                            owner=params.tez_user,
                            mode=0755
       )
+    pass
+    params.HdfsDirectory(None, action="create")
+
+    if params.security_enabled:
+      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name};")
+    else:
+      kinit_if_needed = ""
+
+    if kinit_if_needed:
+      Execute(kinit_if_needed,
+              user=params.tez_user,
+              path='/bin'
+      )
+    pass
 
     app_dir_path = None
     lib_dir_path = None
@@ -50,34 +62,30 @@ def install_tez_jars():
       pass
     pass
 
-    tez_jars = {}
     if app_dir_path:
-      tez_jars[params.tez_local_api_jars] = app_dir_path
+      for scr_file, dest_file in params.app_dir_files.iteritems():
+        CopyFromLocal(scr_file,
+                      mode=0755,
+                      owner=params.tez_user,
+                      dest_dir=app_dir_path,
+                      dest_file=dest_file,
+                      kinnit_if_needed=kinit_if_needed,
+                      hdfs_user=params.hdfs_user,
+                      hadoop_bin_dir=params.hadoop_bin_dir,
+                      hadoop_conf_dir=params.hadoop_conf_dir
+        )
+
     if lib_dir_path:
-      tez_jars[params.tez_local_lib_jars] = lib_dir_path
-
-    for src_file_regex, dest_dir in tez_jars.iteritems():
-      for src_filepath in glob.glob(src_file_regex):
-        src_filename = os.path.basename(src_filepath)
-        params.HdfsResource(format("{dest_dir}/{src_filename}"),
-                            type="file",
-                            action="create_on_execute",
-                            source=src_filepath,
-                            mode=0755,
-                            owner=params.tez_user
-         )
-        
-    for src_file_regex, dest_dir in tez_jars.iteritems():
-      for src_filepath in glob.glob(src_file_regex):
-        src_filename = os.path.basename(src_filepath)
-        params.HdfsResource(format("{dest_dir}/{src_filename}"),
-                            type="file",
-                            action="create_on_execute",
-                            source=src_filepath,
-                            mode=0755,
-                            owner=params.tez_user
-         )
-    params.HdfsResource(None, action="execute")
+      CopyFromLocal(params.tez_local_lib_jars,
+                    mode=0755,
+                    owner=params.tez_user,
+                    dest_dir=lib_dir_path,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
+    pass
 
 
 def get_tez_hdfs_dir_paths(tez_lib_uris = None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
index 35fd71c..6d17aca 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
@@ -117,23 +117,34 @@ class 
MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
     input_file = format("/user/{smokeuser}/mapredsmokeinput")
     output_file = format("/user/{smokeuser}/mapredsmokeoutput")
 
+    cleanup_cmd = format("fs -rm -r -f {output_file} {input_file}")
+    create_file_cmd = format("fs -put /etc/passwd {input_file}")
     test_cmd = format("fs -test -e {output_file}")
     run_wordcount_job = format("jar {jar_path} wordcount {input_file} 
{output_file}")
 
-    params.HdfsResource(input_file,
-                        action = "create_on_execute",
-                        type = "file",
-                        source = "/etc/passwd",
-    )
-    params.HdfsResource(None, action="execute")
-
     if params.security_enabled:
       kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser_principal};")
 
       Execute(kinit_cmd,
               user=params.smokeuser
       )
-      
+
+    ExecuteHadoop(cleanup_cmd,
+                  tries=1,
+                  try_sleep=5,
+                  user=params.smokeuser,
+                  bin_dir=params.execute_path,
+                  conf_dir=params.hadoop_conf_dir
+    )
+
+    ExecuteHadoop(create_file_cmd,
+                  tries=1,
+                  try_sleep=5,
+                  user=params.smokeuser,
+                  bin_dir=params.execute_path,
+                  conf_dir=params.hadoop_conf_dir
+    )
+
     ExecuteHadoop(run_wordcount_job,
                   tries=1,
                   try_sleep=5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 7437e37..ee454b6 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -20,16 +20,13 @@ Ambari Agent
 """
 import os
 
-<<<<<<< HEAD
 from resource_management.libraries.functions import conf_select
-=======
-from resource_management import *
->>>>>>> AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
+from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
 
 import status_params
 
@@ -55,8 +52,7 @@ stack_name = default("/hostLevelParams/stack_name", None)
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
-hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
@@ -76,6 +72,7 @@ yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
+
   # MapR directory root
   mapred_role_root = "hadoop-mapreduce-client"
   command_role = default("/role", "")
@@ -97,19 +94,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
   yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
-  
-  mapreduce_tar_source = 
config['configurations']['cluster-env']['mapreduce_tar_source']
-  mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
"/" + os.path.basename(mapreduce_tar_source)
-
-<<<<<<< HEAD
-=======
-  # the configuration direction for HDFS/YARN/MapR is the hadoop config
-  # directory, which is symlinked by hadoop-client only
-  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
 
->>>>>>> AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
 
 limits_conf_dir = "/etc/security/limits.d"
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir + os.pathsep + 
yarn_container_bin
@@ -236,17 +221,17 @@ tez_lib_uris = 
default("/configurations/tez-site/tez.lib.uris", None)
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
- )
+  bin_dir = hadoop_bin_dir
+)
 update_exclude_file_only = 
default("/commandParams/update_exclude_file_only",False)
 
 mapred_tt_group = 
default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
index 9840c70..9d5fb97 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -22,6 +22,7 @@ Ambari Agent
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
   FILE_TYPE_XML
@@ -103,18 +104,11 @@ class ResourcemanagerDefault(Resourcemanager):
     self.configure(env) # FOR SECURITY
     if params.is_supported_yarn_ranger:
       setup_ranger_yarn() #Ranger Yarn Plugin related calls 
-    if not Script.is_hdp_stack_greater_or_equal("2.2"):
+    if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, '2.1') == 0:
       install_tez_jars()
     else:
       # will work only for stack versions >=2.2
-      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_on_execute",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          owner=params.hdfs_user
-      )
-      params.HdfsResource(None, action="execute")
+      copy_tarballs_to_hdfs('tez', 'hadoop-yarn-resourcemanager', 
params.tez_user, params.hdfs_user, params.user_group)
     service('resourcemanager', action='start')
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/e833066e/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
index 8e378b5..81da809 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
@@ -84,7 +84,7 @@ class ServiceCheckDefault(ServiceCheck):
     import params
     env.set_params(params)
 
-    if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, '2.2') >= 0:
+    if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, '2.2') >= 0:
       path_to_distributed_shell_jar = 
"/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-distributedshell.jar"
     else:
       path_to_distributed_shell_jar = 
"/usr/lib/hadoop-yarn/hadoop-yarn-applications-distributedshell*.jar"

Reply via email to