http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index bfd4e74..6edca7d 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -22,6 +22,7 @@ from resource_management import *
 from resource_management.libraries import functions
 import sys
 import os
+import glob
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from urlparse import urlparse
@@ -81,18 +82,96 @@ def hive(name=None):
 
   if name == 'hiveserver2':
 
-    params.HdfsDirectory(params.hive_apps_whs_dir,
-                         action="create_delayed",
-                         owner=params.hive_user,
-                         mode=0777
+    if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, '2.2') >=0:
+      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.mapreduce_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+        
+    if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
+      params.HdfsResource(params.webhcat_apps_dir,
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.webhcat_user,
+                           mode=0755
+      )
+  
+    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+      params.HdfsResource(params.hcat_hdfs_user_dir,
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hcat_user,
+                           mode=params.hcat_hdfs_user_mode
+      )
+    params.HdfsResource(params.webhcat_hdfs_user_dir,
+                         type="directory",
+                         action="create_on_execute",
+                         owner=params.webhcat_user,
+                         mode=params.webhcat_hdfs_user_mode
+    )
+  
+    for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
+      src_filename = os.path.basename(src_filepath)
+      
params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content()
 + '/' + src_filename,
+                          type="file",
+                          action="create_on_execute",
+                          source=src_filepath,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+  
+    if (os.path.isfile(params.pig_tar_source)):
+      
params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.pig_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+  
+    
params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
+                        type="file",
+                        action="create_on_execute",
+                        source=params.hive_tar_source,
+                        group=params.user_group,
+                        mode=params.tarballs_mode
+    )
+ 
+    for src_filepath in glob.glob(params.sqoop_tar_source):
+      src_filename = os.path.basename(src_filepath)
+      
params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content()
 + '/' + src_filename,
+                          type="file",
+                          action="create_on_execute",
+                          source=src_filepath,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+      
+    params.HdfsResource(params.hive_apps_whs_dir,
+                         type="directory",
+                          action="create_on_execute",
+                          owner=params.hive_user,
+                          mode=0777
     )
-    params.HdfsDirectory(params.hive_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hive_user,
-                         mode=params.hive_hdfs_user_mode
+    params.HdfsResource(params.hive_hdfs_user_dir,
+                         type="directory",
+                          action="create_on_execute",
+                          owner=params.hive_user,
+                          mode=params.hive_hdfs_user_mode
     )
-    setup_custom_scratchdir()
-    params.HdfsDirectory(None, action="create")
+    
+    if not is_empty(params.hive_exec_scratchdir) and not 
urlparse(params.hive_exec_scratchdir).path.startswith("/tmp"):
+      params.HdfsResource(params.hive_exec_scratchdir,
+                           type="directory",
+                           action="create_on_execute",
+                           owner=params.hive_user,
+                           group=params.hdfs_user,
+                           mode=0777) # Hive expects this dir to be writeable 
by everyone as it is used as a temp dir
+      
+    params.HdfsResource(None, action="execute")
 
   Directory(params.hive_etc_dir_prefix,
             mode=0755
@@ -284,20 +363,3 @@ def jdbc_connector():
   File(params.target,
        mode = 0644,
   )
-
-# In case Hive has a custom path for its HDFS temporary directory,
-# recursive directory creation will be a prerequisite as 'hive' user cannot 
write on the root of the HDFS
-def setup_custom_scratchdir():
-  import params
-  # If this property is custom and not a variation of the writable temp dir
-  if is_empty(params.hive_exec_scratchdir):
-    return
-  parsed = urlparse(params.hive_exec_scratchdir)
-  if parsed.path.startswith("/tmp"):
-    return
-  params.HdfsDirectory(params.hive_exec_scratchdir,
-                       action="create_delayed",
-                       owner=params.hive_user,
-                       group=params.hdfs_user,
-                       mode=0777) # Hive expects this dir to be writeable by 
everyone as it is used as a temp dir
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index d86de3d..a33857c 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -23,7 +23,6 @@ from resource_management import *
 from hive import hive
 from hive_service import hive_service
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
   FILE_TYPE_XML
@@ -71,8 +70,6 @@ class HiveServerDefault(HiveServer):
     env.set_params(params)
     self.configure(env) # FOR SECURITY
 
-    # This function is needed in HDP 2.2, but it is safe to call in earlier 
versions.
-    copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, 
params.hdfs_user, params.user_group)
     setup_ranger_hive()    
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
@@ -102,8 +99,14 @@ class HiveServerDefault(HiveServer):
     if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hive", params.version)
       Execute(format("hdp-select set hive-server2 {version}"))
-      copy_tarballs_to_hdfs('mapreduce', 'hive-server2', params.tez_user, 
params.hdfs_user, params.user_group)
-      copy_tarballs_to_hdfs('tez', 'hive-server2', params.tez_user, 
params.hdfs_user, params.user_group)
+      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.mapreduce_tar_source,
+                          group=params.user_group,
+                          mode=params.tarballs_mode
+      )
+      params.HdfsResource(None, action="execute")
 
   def security_status(self, env):
     import status_params

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
index ed12a65..03782d6 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params_linux.py
@@ -25,13 +25,15 @@ import os
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from ambari_commons.os_check import OSCheck
 
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
 from resource_management.libraries.functions.get_port_from_url import 
get_port_from_url
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+from resource_management.libraries import functions
+
 
 # server configurations
 config = Script.get_config()
@@ -45,9 +47,12 @@ hostname = config["hostname"]
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
 stack_is_hdp21 = Script.is_hdp_stack_greater_or_equal("2.0") and 
Script.is_hdp_stack_less_than("2.2")
 
+# this is not avaliable on INSTALL action because hdp-select is not available
+hdp_stack_version = functions.get_hdp_version('hive-server2')
+
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
 
@@ -95,6 +100,41 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   # there are no client versions of these, use server versions directly
   hcat_lib = '/usr/hdp/current/hive-webhcat/share/hcatalog'
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
+  
+  # --- Tarballs ---
+
+  hive_tar_source = config['configurations']['cluster-env']['hive_tar_source']
+  pig_tar_source = config['configurations']['cluster-env']['pig_tar_source']
+  hadoop_streaming_tar_source = 
config['configurations']['cluster-env']['hadoop-streaming_tar_source']
+  sqoop_tar_source = 
config['configurations']['cluster-env']['sqoop_tar_source']
+  mapreduce_tar_source = 
config['configurations']['cluster-env']['mapreduce_tar_source']
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  
+  hive_tar_destination = 
config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + 
os.path.basename(hive_tar_source)
+  pig_tar_destination = 
config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + 
os.path.basename(pig_tar_source)
+  hadoop_streaming_tar_destination_dir = 
config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
+  sqoop_tar_destination_dir = 
config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + 
os.path.basename(sqoop_tar_source)
+  mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
"/" + os.path.basename(mapreduce_tar_source)
+  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
+
+  tarballs_mode = 0444
+else:
+  # --- Tarballs ---
+  hive_tar_source = hive_tar_file
+  pig_tar_source = pig_tar_file
+  hadoop_streaming_tar_source = hadoop_streeming_jars
+  sqoop_tar_source = sqoop_tar_file
+
+  webhcat_apps_dir = "/apps/webhcat"
+  
+  hive_tar_destination = webhcat_apps_dir + "/" + 
os.path.basename(hive_tar_source)
+  pig_tar_destination = webhcat_apps_dir + "/" + 
os.path.basename(pig_tar_source)
+  hadoop_streaming_tar_destination_dir = webhcat_apps_dir
+  sqoop_tar_destination_dir = webhcat_apps_dir
+
+  tarballs_mode = 0755
+
+
 
 
 execute_path = os.environ['PATH'] + os.pathsep + hive_bin + os.pathsep + 
hadoop_bin_dir
@@ -276,7 +316,6 @@ tez_user = config['configurations']['tez-env']['tez_user']
 # Tez jars
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
-app_dir_files = {tez_local_api_jars:None}
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -320,8 +359,6 @@ templeton_jar = 
config['configurations']['webhcat-site']['templeton.jar']
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 
-webhcat_apps_dir = "/apps/webhcat"
-
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_mode = 0755
 webhcat_hdfs_user_dir = format("/user/{webhcat_user}")
@@ -330,17 +367,18 @@ webhcat_hdfs_user_mode = 0755
 security_param = "true" if security_enabled else "false"
 
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir = hadoop_conf_dir,
-  hdfs_user = hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+ HdfsResource,
+  user = hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
+
 
 # ranger host
 ranger_admin_hosts = default("/clusterHostInfo/ranger_admin_hosts", [])

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
index 733ab0a..c33bc90 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat.py
@@ -20,11 +20,9 @@ Ambari Agent
 """
 import sys
 import os.path
-import glob
 from resource_management import *
 from resource_management.core.resources.system import Execute
 from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 
@@ -46,26 +44,6 @@ def webhcat():
 def webhcat():
   import params
 
-  if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, "2.2.0.0") < 0:
-    params.HdfsDirectory(params.webhcat_apps_dir,
-                         action="create_delayed",
-                         owner=params.webhcat_user,
-                         mode=0755
-    )
-  
-  if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-    params.HdfsDirectory(params.hcat_hdfs_user_dir,
-                         action="create_delayed",
-                         owner=params.hcat_user,
-                         mode=params.hcat_hdfs_user_mode
-    )
-  params.HdfsDirectory(params.webhcat_hdfs_user_dir,
-                       action="create_delayed",
-                       owner=params.webhcat_user,
-                       mode=params.webhcat_hdfs_user_mode
-  )
-  params.HdfsDirectory(None, action="create")
-
   Directory(params.templeton_pid_dir,
             owner=params.webhcat_user,
             mode=0755,
@@ -94,55 +72,6 @@ def webhcat():
             path='/bin'
     )
 
-  # TODO, these checks that are specific to HDP 2.2 and greater should really 
be in a script specific to that stack.
-  if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, "2.2.0.0") >= 0:
-    copy_tarballs_to_hdfs('hive', 'hive-webhcat', params.webhcat_user, 
params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('pig', 'hive-webhcat', params.webhcat_user, 
params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('hadoop-streaming', 'hive-webhcat', 
params.webhcat_user, params.hdfs_user, params.user_group)
-    copy_tarballs_to_hdfs('sqoop', 'hive-webhcat', params.webhcat_user, 
params.hdfs_user, params.user_group)
-  else:
-    CopyFromLocal(params.hadoop_streeming_jars,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )
-
-    if (os.path.isfile(params.pig_tar_file)):
-      CopyFromLocal(params.pig_tar_file,
-                    owner=params.webhcat_user,
-                    mode=0755,
-                    dest_dir=params.webhcat_apps_dir,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-
-    CopyFromLocal(params.hive_tar_file,
-                  owner=params.webhcat_user,
-                  mode=0755,
-                  dest_dir=params.webhcat_apps_dir,
-                  kinnit_if_needed=kinit_if_needed,
-                  hdfs_user=params.hdfs_user,
-                  hadoop_bin_dir=params.hadoop_bin_dir,
-                  hadoop_conf_dir=params.hadoop_conf_dir
-    )
-
-    if (len(glob.glob(params.sqoop_tar_file)) > 0):
-      CopyFromLocal(params.sqoop_tar_file,
-                    owner=params.webhcat_user,
-                    mode=0755,
-                    dest_dir=params.webhcat_apps_dir,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-
   # Replace _HOST with hostname in relevant principal-related properties
   webhcat_site = params.config['configurations']['webhcat-site'].copy()
   for prop_name in ['templeton.hive.properties', 
'templeton.kerberos.principal']:

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
index 9b59f30..09fb690 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/webhcat_service_check.py
@@ -21,6 +21,7 @@ limitations under the License.
 from resource_management import *
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
+import time
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def webhcat_service_check():
@@ -44,8 +45,33 @@ def webhcat_service_check():
   else:
     smokeuser_keytab= "no_keytab"
     smoke_user_principal="no_principal"
+    
+  unique_name = format("{smokeuser}.{timestamp}", timestamp = time.time())
+  templeton_test_script = format("idtest.{unique_name}.pig")
+  templeton_test_input = format("/tmp/idtest.{unique_name}.in")
+  templeton_test_output = format("/tmp/idtest.{unique_name}.out")
 
-  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} 
{smokeuser} {templeton_port} {smokeuser_keytab}"
+  File(format("{tmp_dir}/{templeton_test_script}"),
+       content = Template("templeton_smoke.pig.j2", 
templeton_test_input=templeton_test_input, 
templeton_test_output=templeton_test_output),
+  )
+  
+  params.HdfsResource(format("/tmp/{templeton_test_script}"),
+                      action = "create_on_execute",
+                      type = "file",
+                      source = format("{tmp_dir}/{templeton_test_script}"),
+                      owner = params.smokeuser
+  )
+  
+  params.HdfsResource(templeton_test_input,
+                      action = "create_on_execute",
+                      type = "file",
+                      source = "/etc/passwd",
+                      owner = params.smokeuser
+  )
+  
+  params.HdfsResource(None, action = "execute")
+
+  cmd = format("{tmp_dir}/templetonSmoke.sh {webhcat_server_host[0]} 
{smokeuser} {templeton_port} {templeton_test_script} {smokeuser_keytab}"
                " {security_param} {kinit_path_local} {smoke_user_principal}")
 
   Execute(cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
new file mode 100644
index 0000000..3153e81
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/templates/templeton_smoke.pig.j2
@@ -0,0 +1,24 @@
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+A = load '{{templeton_test_input}}' using PigStorage(':');
+B = foreach A generate \$0 as id; 
+store B into '{{templeton_test_output}}';
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
index ab0dd04..54f0ba0 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/params.py
@@ -18,13 +18,14 @@ limitations under the License.
 Ambari Agent
 
 """
+from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
 
 # server configurations
 config = Script.get_config()
@@ -68,14 +69,14 @@ java64_home = config['hostLevelParams']['java_home']
 log4j_props = config['configurations']['mahout-log4j']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
index f46c41f..da93c35 100644
--- 
a/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/MAHOUT/1.0.0.2.3/package/scripts/service_check.py
@@ -20,55 +20,34 @@ Ambari Agent
 """
 
 from resource_management import *
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 
 class MahoutServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
 
-    create_input_dir_cmd = format("fs -mkdir 
/user/{smokeuser}/mahoutsmokeinput")
-    copy_test_file_to_hdfs_cmd = format("fs -put 
{tmp_dir}/sample-mahout-test.txt /user/{smokeuser}/mahoutsmokeinput/")
     mahout_command = format("mahout seqdirectory --input 
/user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt "
                             "--output /user/{smokeuser}/mahoutsmokeoutput/ 
--charset utf-8")
     test_command = format("fs -test -e 
/user/{smokeuser}/mahoutsmokeoutput/_SUCCESS")
-    remove_output_input_dirs_cmd = format("fs -rm -r -f 
/user/{smokeuser}/mahoutsmokeoutput "
-                                          "/user/{smokeuser}/mahoutsmokeinput")
-
-    ExecuteHadoop( remove_output_input_dirs_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   # for kinit run
-                   keytab = params.smoke_user_keytab,
-                   principal = params.smokeuser_principal,
-                   security_enabled = params.security_enabled,
-                   kinit_path_local = params.kinit_path_local,
-                   bin_dir = params.hadoop_bin_dir
-                   )
-
-    ExecuteHadoop( create_input_dir_cmd,
-                 tries = 3,
-                 try_sleep = 5,
-                 user = params.smokeuser,
-                 conf_dir = params.hadoop_conf_dir,
-                 bin_dir = params.hadoop_bin_dir
-    )
-
+    
     File( format("{tmp_dir}/sample-mahout-test.txt"),
         content = "Test text which will be converted to sequence file.",
         mode = 0755
     )
-
-    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   bin_dir = params.hadoop_bin_dir
+    
+    params.HdfsResource(format("/user/{smokeuser}/mahoutsmokeinput"),
+                        action="create_on_execute",
+                        type="directory",
+                        owner=params.smokeuser,
     )
-
+    
params.HdfsResource(format("/user/{smokeuser}/mahoutsmokeinput/sample-mahout-test.txt"),
+                        action="create_on_execute",
+                        type="file",
+                        owner=params.smokeuser,
+                        source=format("{tmp_dir}/sample-mahout-test.txt")
+    )
+    params.HdfsResource(None, action="execute")
+    
     Execute( mahout_command,
              tries = 3,
              try_sleep = 5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
index 32211c4..fd76321 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/oozieSmoke2.sh
@@ -24,13 +24,14 @@ export os_family=$1
 export oozie_lib_dir=$2
 export oozie_conf_dir=$3
 export oozie_bin_dir=$4
-export hadoop_conf_dir=$5
-export hadoop_bin_dir=$6
-export smoke_test_user=$7
-export security_enabled=$8
-export smoke_user_keytab=$9
-export kinit_path_local=${10}
-export smokeuser_principal=${11}
+export oozie_examples_dir=$5
+export hadoop_conf_dir=$6
+export hadoop_bin_dir=$7
+export smoke_test_user=$8
+export security_enabled=$9
+export smoke_user_keytab=$10
+export kinit_path_local=${11}
+export smokeuser_principal=${12}
 
 function getValueFromField {
   xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | 
cut -d "<" -f1
@@ -67,37 +68,9 @@ function checkOozieJobStatus {
 }
 
 export OOZIE_EXIT_CODE=0
-export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml 
yarn.resourcemanager.address`
-export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml 
fs.defaultFS`
 export OOZIE_SERVER=`getValueFromField ${oozie_conf_dir}/oozie-site.xml 
oozie.base.url | tr '[:upper:]' '[:lower:]'`
 
-# search for the oozie examples JAR and, if found, store the directory name
-export OOZIE_EXAMPLES_DIR=`find "${oozie_lib_dir}/" -name 
"oozie-examples.tar.gz" | xargs dirname`
-if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
-  if [ "$os_family" == "ubuntu" ] ; then
-    LIST_PACKAGE_FILES_CMD='dpkg-query -L'
-  else
-    LIST_PACKAGE_FILES_CMD='rpm -ql'
-  fi
-  export OOZIE_EXAMPLES_DIR=`$LIST_PACKAGE_FILES_CMD oozie-client | grep 
'oozie-examples.tar.gz$' | xargs dirname`
-fi
-if [[ -z "$OOZIE_EXAMPLES_DIR" ]] ; then
-  export OOZIE_EXAMPLES_DIR='/usr/hdp/current/oozie-client/doc/'
-else
-  echo "Located Oozie examples JAR at $OOZIE_EXAMPLES_DIR"
-fi
-
-cd $OOZIE_EXAMPLES_DIR
-
-/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
-/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
-
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
-/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g"
 examples/apps/map-reduce/job.properties
+cd $oozie_examples_dir
 
 if [[ $security_enabled == "True" ]]; then
   kinitcmd="${kinit_path_local} -kt ${smoke_user_keytab} 
${smokeuser_principal}; "
@@ -105,12 +78,7 @@ else
   kinitcmd=""
 fi
 
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r examples"
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -rm -r input-data"
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal 
$OOZIE_EXAMPLES_DIR/examples examples"
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"${hadoop_bin_dir}/hdfs --config ${hadoop_conf_dir} dfs -copyFromLocal 
$OOZIE_EXAMPLES_DIR/examples/input-data input-data"
-
-cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie 
-Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config 
$OOZIE_EXAMPLES_DIR/examples/apps/map-reduce/job.properties  -run"
+cmd="${kinitcmd}source ${oozie_conf_dir}/oozie-env.sh ; ${oozie_bin_dir}/oozie 
-Doozie.auth.token.cache=false job -oozie $OOZIE_SERVER -config 
$oozie_examples_dir/examples/apps/map-reduce/job.properties  -run"
 echo $cmd
 job_info=`/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s 
/bin/bash - -c "$cmd" | grep "job:"`
 job_id="`echo $job_info | cut -d':' -f2`"

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
new file mode 100644
index 0000000..79a1bfc
--- /dev/null
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/files/prepareOozieHdfsDirectories.sh
@@ -0,0 +1,45 @@
+#!/usr/bin/env bash
+#
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+#
+
+export oozie_conf_dir=$1
+export oozie_examples_dir=$2
+export hadoop_conf_dir=$3
+
+function getValueFromField {
+  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | 
cut -d "<" -f1
+  return $?
+}
+
+export JOBTRACKER=`getValueFromField ${hadoop_conf_dir}/yarn-site.xml 
yarn.resourcemanager.address`
+export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml 
fs.defaultFS`
+
+cd $oozie_examples_dir
+
+/var/lib/ambari-agent/ambari-sudo.sh tar -zxf oozie-examples.tar.gz
+/var/lib/ambari-agent/ambari-sudo.sh chmod -R o+rx examples
+
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|nameNode=hdfs://localhost:8020|nameNode=$NAMENODE|g"  
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|nameNode=hdfs://localhost:9000|nameNode=$NAMENODE|g"  
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:8021|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:9001|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|jobTracker=localhost:8032|jobTracker=$JOBTRACKER|g" 
examples/apps/map-reduce/job.properties
+/var/lib/ambari-agent/ambari-sudo.sh sed -i 
"s|oozie.wf.application.path=hdfs://localhost:9000|oozie.wf.application.path=$NAMENODE|g"
 examples/apps/map-reduce/job.properties

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
index 6adf2ac..ae66eb0 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie.py
@@ -85,11 +85,13 @@ def oozie(is_server=False):
   import params
 
   if is_server:
-    params.HdfsDirectory(params.oozie_hdfs_user_dir,
-                         action="create",
+    params.HdfsResource(params.oozie_hdfs_user_dir,
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.oozie_user,
                          mode=params.oozie_hdfs_user_mode
     )
+    params.HdfsResource(None, action="execute")
   Directory(params.conf_dir,
              recursive = True,
              owner = params.oozie_user,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
index c3d9c78..47361c4 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/oozie_server_upgrade.py
@@ -185,12 +185,16 @@ def upgrade_oozie():
     command = format("{kinit_path_local} -kt {oozie_keytab} 
{oozie_principal_with_host}")
     Execute(command, user=params.oozie_user)
 
-  # ensure that HDFS is prepared to receive the new sharelib
-  command = format("hdfs dfs -chown oozie:hadoop {oozie_hdfs_user_dir}/share")
-  Execute(command, user=params.oozie_user)
-
-  command = format("hdfs dfs -chmod -R 755 {oozie_hdfs_user_dir}/share")
-  Execute(command, user=params.oozie_user)
+  
+  params.HdfsResource(format("{oozie_hdfs_user_dir}/share"),
+                      action = "create_on_execute",
+                      type = "directory",
+                      owner = "oozie",
+                      group = "hadoop",
+                      mode = 0755,
+                      recursive_chmod = True
+  )
+  params.HdfsResource(None, action = "execute")
 
   # upgrade oozie DB
   command = format("{oozie_home}/bin/ooziedb.sh upgrade -run")

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
index 9732aab..59d0097 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/params_linux.py
@@ -17,6 +17,7 @@ See the License for the specific language governing 
permissions and
 limitations under the License.
 
 """
+from resource_management import *
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
@@ -25,7 +26,7 @@ from resource_management.libraries.functions.default import 
default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions import get_port_from_url
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
 from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
 
 import status_params
@@ -66,6 +67,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   oozie_shared_lib = format("/usr/hdp/current/{oozie_root}/share")
   oozie_home = format("/usr/hdp/current/{oozie_root}")
   oozie_bin_dir = format("/usr/hdp/current/{oozie_root}/bin")
+  oozie_examples_regex = format("/usr/hdp/current/{oozie_root}/doc")
   falcon_home = '/usr/hdp/current/falcon-client'
 
   conf_dir = format("/usr/hdp/current/{oozie_root}/conf")
@@ -84,6 +86,7 @@ else:
   falcon_home = '/usr/lib/falcon'
   conf_dir = "/etc/oozie/conf"
   hive_conf_dir = "/etc/oozie/conf/action-conf/hive"
+  oozie_examples_regex = "/usr/share/doc/oozie-*"
 
 execute_path = oozie_bin_dir + os.pathsep + hadoop_bin_dir
 
@@ -191,17 +194,18 @@ hdfs_user_keytab = 
config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
+
 
 # The logic for LZO also exists in HDFS' params.py
 io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
index d589452..eb55f07 100644
--- 
a/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/OOZIE/4.0.0.2.0/package/scripts/service_check.py
@@ -26,6 +26,9 @@ from resource_management.libraries.script import Script
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 import os
+import glob
+import sys
+sys.exit(0)
 
 
 class OozieServiceCheck(Script):
@@ -39,30 +42,53 @@ class OozieServiceCheckDefault(OozieServiceCheck):
     env.set_params(params)
 
     # on HDP1 this file is different
+    prepare_hdfs_file_name = 'prepareOozieHdfsDirectories.sh'
     smoke_test_file_name = 'oozieSmoke2.sh'
 
-    OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name)
+    OozieServiceCheckDefault.oozie_smoke_shell_file(smoke_test_file_name, 
prepare_hdfs_file_name)
 
   @staticmethod
-  def oozie_smoke_shell_file(file_name):
+  def oozie_smoke_shell_file(file_name, prepare_hdfs_file_name):
     import params
 
     File(format("{tmp_dir}/{file_name}"),
          content=StaticFile(file_name),
          mode=0755
     )
+    File(format("{tmp_dir}/{prepare_hdfs_file_name}"),
+         content=StaticFile(prepare_hdfs_file_name),
+         mode=0755
+    )
 
     os_family = System.get_instance().os_family
+    oozie_examples_dir = glob.glob(params.oozie_examples_regex)[0]
+    
+    Execute(format("{tmp_dir}/{prepare_hdfs_file_name} {conf_dir} 
{oozie_examples_dir} {hadoop_conf_dir} "),
+            tries=3,
+            try_sleep=5,
+            logoutput=True
+    )
+    
+    params.HdfsResource(format('/user/{smokeuser}/examples'),
+      action = "create_on_execute",
+      type = "directory",
+      source = format("{oozie_examples_dir}/examples"),
+    )
+    params.HdfsResource(format('/user/{smokeuser}/input-data'),
+      action = "create_on_execute",
+      type = "directory",
+      source = format("{oozie_examples_dir}/examples/input-data"),
+    )
+    params.HdfsResource(None, action="execute")
 
     if params.security_enabled:
       sh_cmd = format(
-        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} 
{oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} 
{security_enabled} {smokeuser_keytab} {kinit_path_local} {smokeuser_principal}")
+        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} 
{oozie_bin_dir} {oozie_examples_dir} {hadoop_conf_dir} {hadoop_bin_dir} 
{smokeuser} {security_enabled} {smokeuser_keytab} {kinit_path_local} 
{smokeuser_principal}")
     else:
       sh_cmd = format(
-        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} 
{oozie_bin_dir} {hadoop_conf_dir} {hadoop_bin_dir} {smokeuser} 
{security_enabled}")
-
-    Execute(format("{tmp_dir}/{file_name}"),
-            command=sh_cmd,
+        "{tmp_dir}/{file_name} {os_family} {oozie_lib_dir} {conf_dir} 
{oozie_bin_dir} {oozie_examples_dir} {hadoop_conf_dir} {hadoop_bin_dir} 
{smokeuser} {security_enabled}")
+    
+    Execute(sh_cmd,
             path=params.execute_path,
             tries=3,
             try_sleep=5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
index 53762b6..5bcffc7 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/params_linux.py
@@ -18,13 +18,15 @@ limitations under the License.
 Ambari Agent
 
 """
+from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
+import os
 
 # server configurations
 config = Script.get_config()
@@ -50,6 +52,10 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   pig_conf_dir = "/usr/hdp/current/pig-client/conf"
   hadoop_home = '/usr/hdp/current/hadoop-client'
   pig_bin_dir = '/usr/hdp/current/pig-client/bin'
+  
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
+
 
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
@@ -70,14 +76,15 @@ pig_properties = 
config['configurations']['pig-properties']['content']
 log4j_props = config['configurations']['pig-log4j']['content']
 
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create hdfs directory we need to call params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
index 9de30ed..5f33b1d 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/service_check.py
@@ -20,7 +20,6 @@ Ambari Agent
 """
 
 from resource_management import *
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries import functions
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
@@ -34,26 +33,23 @@ class PigServiceCheckLinux(PigServiceCheck):
     import params
     env.set_params(params)
 
-    input_file = 'passwd'
-    output_file = "pigsmoke.out"
-
-    cleanup_cmd = format("dfs -rmr {output_file} {input_file}")
-    #cleanup put below to handle retries; if retrying there wil be a stale 
file that needs cleanup; exit code is fn of second command
-    create_file_cmd = format("{cleanup_cmd}; hadoop --config {hadoop_conf_dir} 
dfs -put /etc/passwd {input_file} ") #TODO: inconsistent that second command 
needs hadoop
-    test_cmd = format("fs -test -e {output_file}")
-
-    ExecuteHadoop( create_file_cmd,
-      tries     = 3,
-      try_sleep = 5,
-      user      = params.smokeuser,
-      conf_dir = params.hadoop_conf_dir,
-      # for kinit run
-      keytab = params.smoke_user_keytab,
-      principal = params.smokeuser_principal,
-      security_enabled = params.security_enabled,
-      kinit_path_local = params.kinit_path_local,
-      bin_dir = params.hadoop_bin_dir
+    input_file = format('/user/{smokeuser}/passwd')
+    output_dir = format('/user/{smokeuser}/pigsmoke.out')
+
+    params.HdfsResource(output_dir,
+                        type="directory",
+                        action="delete_on_execute",
+                        user=params.smokeuser,
+                        )
+    params.HdfsResource(input_file,
+                        type="file",
+                        source="/etc/passwd",
+                        action="create_on_execute",
+                        user=params.smokeuser,
     )
+    params.HdfsResource(None, action="execute")
+ 
+
 
     File( format("{tmp_dir}/pigSmoke.sh"),
       content = StaticFile("pigSmoke.sh"),
@@ -68,6 +64,7 @@ class PigServiceCheckLinux(PigServiceCheck):
       user      = params.smokeuser
     )
 
+    test_cmd = format("fs -test -e {output_dir}")
     ExecuteHadoop( test_cmd,
       user      = params.smokeuser,
       conf_dir = params.hadoop_conf_dir,
@@ -76,21 +73,27 @@ class PigServiceCheckLinux(PigServiceCheck):
 
     if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, '2.2') >= 0:
       # cleanup results from previous test
-      ExecuteHadoop( create_file_cmd,
-        tries     = 3,
-        try_sleep = 5,
-        user      = params.smokeuser,
-        conf_dir = params.hadoop_conf_dir,
-        # for kinit run
-        keytab = params.smoke_user_keytab,
-        principal = params.smokeuser_principal,
-        security_enabled = params.security_enabled,
-        kinit_path_local = params.kinit_path_local,
-        bin_dir = params.hadoop_bin_dir
+      params.HdfsResource(output_dir,
+                          type="directory",
+                          action="delete_on_execute",
+                          user=params.smokeuser,
+      )
+      params.HdfsResource(input_file,
+                          type="file",
+                          source="/etc/passwd",
+                          action="create_on_execute",
+                          user=params.smokeuser,
       )
 
       # Check for Pig-on-Tez
-      copy_tarballs_to_hdfs('tez', 'hadoop-client', params.smokeuser, 
params.hdfs_user, params.user_group)
+      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          owner=params.hdfs_user
+      )
+      params.HdfsResource(None, action="execute")
 
       if params.security_enabled:
         kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser_principal};")

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
index 0206415..8c05a8e 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/job_history_server.py
@@ -22,7 +22,6 @@ import sys
 import os
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.check_process_status import 
check_process_status
 from resource_management.core.resources import Execute
@@ -77,7 +76,14 @@ class JobHistoryServer(Script):
     if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "spark", params.version)
       Execute(format("hdp-select set spark-historyserver {version}"))
-      copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, 
params.hdfs_user, params.user_group)
+      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          owner=params.hdfs_user
+      )
+      params.HdfsResource(None, action="execute")
 
 if __name__ == "__main__":
   JobHistoryServer().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
index 41bac8a..34d1cb8 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/params.py
@@ -23,6 +23,7 @@ import status_params
 
 from setup_spark import *
 
+from resource_management import *
 import resource_management.libraries.functions
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
@@ -30,7 +31,7 @@ from resource_management.libraries.functions.version import 
format_hdp_stack_ver
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
 
 # a map of the Ambari role to the component name
 # for use with /usr/hdp/current/<component>
@@ -69,6 +70,8 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   spark_log_dir = config['configurations']['spark-env']['spark_log_dir']
   spark_pid_dir = status_params.spark_pid_dir
   spark_home = format("/usr/hdp/current/{component_directory}")
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
 
 
 java_home = config['hostLevelParams']['java_home']
@@ -155,14 +158,14 @@ if security_enabled:
 
 
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
index 80d6bbc..5c01337 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/setup_spark.py
@@ -37,11 +37,13 @@ def setup_spark(env, type, action = None):
             recursive=True
   )
   if type == 'server' and action == 'config':
-    params.HdfsDirectory(params.spark_hdfs_user_dir,
-                       action="create",
+    params.HdfsResource(params.spark_hdfs_user_dir,
+                       type="directory",
+                       action="create_on_execute",
                        owner=params.spark_user,
                        mode=0775
     )
+    params.HdfsResource(None, action="execute")
     
   PropertiesFile(format("{spark_conf}/spark-defaults.conf"),
     properties = params.config['configurations']['spark-defaults'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
index 26a403d..840b81d 100644
--- 
a/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK/1.2.0.2.2/package/scripts/spark_service.py
@@ -27,7 +27,14 @@ def spark_service(action):
       spark_kinit_cmd = format("{kinit_path_local} -kt {spark_kerberos_keytab} 
{spark_principal}; ")
       Execute(spark_kinit_cmd, user=params.spark_user)
 
-    copy_tarballs_to_hdfs('tez', 'spark-historyserver', params.spark_user, 
params.hdfs_user, params.user_group)
+      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.tez_tar_source,
+                          group=params.user_group,
+                          owner=params.hdfs_user
+      )
+      params.HdfsResource(None, action="execute")
 
     no_op_test = format(
       'ls {spark_history_server_pid_file} >/dev/null 2>&1 && ps -p `cat 
{spark_history_server_pid_file}` >/dev/null 2>&1')

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
index 5aadd99..2c7bbc4 100644
--- 
a/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
+++ 
b/ambari-server/src/main/resources/common-services/STORM/0.9.1.2.1/configuration/storm-env.xml
@@ -62,12 +62,10 @@ export STORM_LOG_DIR={{log_dir}}
     </value>
   </property>
 
-  <configuration>
-    <property>
-      <name>nimbus_seeds_supported</name>
-      <value>false</value>
-      <description></description>
-    </property>
-  </configuration>
+   <property>
+     <name>nimbus_seeds_supported</name>
+     <value>false</value>
+     <description></description>
+   </property>
 
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
index e2aa660..1e183de 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/params_linux.py
@@ -19,13 +19,14 @@ limitations under the License.
 """
 import os
 
+from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
 
 # server configurations
 config = Script.get_config()
@@ -74,15 +75,17 @@ user_group = 
config['configurations']['cluster-env']['user_group']
 tez_env_sh_template = config['configurations']['tez-env']['content']
 
 import functools
-# Create partial functions with common arguments for every HdfsDirectory call
-# to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
-  security_enabled=security_enabled,
-  keytab=hdfs_user_keytab,
-  kinit_path_local=kinit_path_local,
-  bin_dir=hadoop_bin_dir
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete/copyfromlocal hdfs directories/files we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
+  security_enabled = security_enabled,
+  keytab = hdfs_user_keytab,
+  kinit_path_local = kinit_path_local,
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
index 6786eba..abff479 100644
--- 
a/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/TEZ/0.4.0.2.1/package/scripts/service_check.py
@@ -21,7 +21,6 @@ Ambari Agent
 
 from resource_management import *
 from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 
@@ -38,53 +37,27 @@ class TezServiceCheckLinux(TezServiceCheck):
       hdp_version = functions.get_hdp_version("hadoop-client")
 
     path_to_tez_jar = format(params.path_to_tez_examples_jar)
-    copy_test_file_to_hdfs_cmd =  format("fs -put {tmp_dir}/sample-tez-test 
/tmp/tezsmokeinput/")
-    create_input_dir_cmd = format("fs -mkdir /tmp/tezsmokeinput")
     wordcount_command = format("jar {path_to_tez_jar} orderedwordcount "
                                "/tmp/tezsmokeinput/sample-tez-test 
/tmp/tezsmokeoutput/")
     test_command = format("fs -test -e /tmp/tezsmokeoutput/_SUCCESS")
-    remove_output_input_dirs_cmd = "fs -rm -r -f /tmp/tezsmokeinput 
/tmp/tezsmokeoutput"
-
-
-    ExecuteHadoop( remove_output_input_dirs_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   # for kinit run
-                   keytab = params.smoke_user_keytab,
-                   principal = params.smokeuser_principal,
-                   security_enabled = params.security_enabled,
-                   kinit_path_local = params.kinit_path_local,
-                   bin_dir = params.hadoop_bin_dir
-    )
-
-    params.HdfsDirectory("/tmp",
-                         action="create",
-                         owner=params.hdfs_user,
-                         mode=0777
-    )
-
-    ExecuteHadoop( create_input_dir_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   bin_dir = params.hadoop_bin_dir
-    )
-
+    
     File( format("{tmp_dir}/sample-tez-test"),
           content = "foo\nbar\nfoo\nbar\nfoo",
           mode = 0755
     )
-
-    ExecuteHadoop( copy_test_file_to_hdfs_cmd,
-                   tries = 3,
-                   try_sleep = 5,
-                   user = params.smokeuser,
-                   conf_dir = params.hadoop_conf_dir,
-                   bin_dir = params.hadoop_bin_dir
+    
+    params.HdfsResource("/tmp/tezsmokeinput",
+                        action="create_on_execute",
+                        type="directory",
+                        owner=params.smokeuser,
+    )
+    params.HdfsResource("/tmp/tezsmokeinput/sample-tez-test",
+                        action="create_on_execute",
+                        type="file",
+                        owner=params.smokeuser,
+                        source=format("{tmp_dir}/sample-tez-test"),
     )
+    params.HdfsResource(None, action="execute")
 
     ExecuteHadoop( wordcount_command,
                    tries = 3,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
index f4dceb5..e625a0d 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py
@@ -21,7 +21,6 @@ Ambari Agent
 
 from resource_management import *
 from resource_management.libraries.functions import conf_select
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
 from resource_management.libraries.functions.format import format
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
@@ -73,13 +72,33 @@ class HistoryServerDefault(HistoryServer):
     if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
       conf_select.select(params.stack_name, "hadoop", params.version)
       Execute(format("hdp-select set hadoop-mapreduce-historyserver 
{version}"))
-      copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', 
params.mapred_user, params.hdfs_user, params.user_group)
+      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.mapreduce_tar_source,
+                          owner=params.hdfs_user,
+                          group=params.user_group,
+                          mode=0444,
+      )
+      params.HdfsResource(None, action="execute")
+
 
   def start(self, env, rolling_restart=False):
     import params
     env.set_params(params)
     self.configure(env) # FOR SECURITY
-    copy_tarballs_to_hdfs('mapreduce', 'hadoop-mapreduce-historyserver', 
params.mapred_user, params.hdfs_user, params.user_group)
+    
+    if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+                          type="file",
+                          action="create_on_execute",
+                          source=params.mapreduce_tar_source,
+                          owner=params.hdfs_user,
+                          group=params.user_group,
+                          mode=0444,
+      )
+      params.HdfsResource(None, action="execute")
+
     service('historyserver', action='start', serviceName='mapreduce')
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
index a18ca72..44015bf 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/install_jars.py
@@ -20,6 +20,7 @@ limitations under the License.
 
 from resource_management import *
 import os
+import glob
 
 def install_tez_jars():
   import params
@@ -29,25 +30,12 @@ def install_tez_jars():
   # If tez libraries are to be stored in hdfs
   if destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
-      params.HdfsDirectory(hdfs_dir,
-                           action="create_delayed",
+      params.HdfsResource(hdfs_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.tez_user,
                            mode=0755
       )
-    pass
-    params.HdfsDirectory(None, action="create")
-
-    if params.security_enabled:
-      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name};")
-    else:
-      kinit_if_needed = ""
-
-    if kinit_if_needed:
-      Execute(kinit_if_needed,
-              user=params.tez_user,
-              path='/bin'
-      )
-    pass
 
     app_dir_path = None
     lib_dir_path = None
@@ -62,30 +50,34 @@ def install_tez_jars():
       pass
     pass
 
+    tez_jars = {}
     if app_dir_path:
-      for scr_file, dest_file in params.app_dir_files.iteritems():
-        CopyFromLocal(scr_file,
-                      mode=0755,
-                      owner=params.tez_user,
-                      dest_dir=app_dir_path,
-                      dest_file=dest_file,
-                      kinnit_if_needed=kinit_if_needed,
-                      hdfs_user=params.hdfs_user,
-                      hadoop_bin_dir=params.hadoop_bin_dir,
-                      hadoop_conf_dir=params.hadoop_conf_dir
-        )
-
+      tez_jars[params.tez_local_api_jars] = app_dir_path
     if lib_dir_path:
-      CopyFromLocal(params.tez_local_lib_jars,
-                    mode=0755,
-                    owner=params.tez_user,
-                    dest_dir=lib_dir_path,
-                    kinnit_if_needed=kinit_if_needed,
-                    hdfs_user=params.hdfs_user,
-                    hadoop_bin_dir=params.hadoop_bin_dir,
-                    hadoop_conf_dir=params.hadoop_conf_dir
-      )
-    pass
+      tez_jars[params.tez_local_lib_jars] = lib_dir_path
+
+    for src_file_regex, dest_dir in tez_jars.iteritems():
+      for src_filepath in glob.glob(src_file_regex):
+        src_filename = os.path.basename(src_filepath)
+        params.HdfsResource(format("{dest_dir}/{src_filename}"),
+                            type="file",
+                            action="create_on_execute",
+                            source=src_filepath,
+                            mode=0755,
+                            owner=params.tez_user
+         )
+        
+    for src_file_regex, dest_dir in tez_jars.iteritems():
+      for src_filepath in glob.glob(src_file_regex):
+        src_filename = os.path.basename(src_filepath)
+        params.HdfsResource(format("{dest_dir}/{src_filename}"),
+                            type="file",
+                            action="create_on_execute",
+                            source=src_filepath,
+                            mode=0755,
+                            owner=params.tez_user
+         )
+    params.HdfsResource(None, action="execute")
 
 
 def get_tez_hdfs_dir_paths(tez_lib_uris = None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
index 6d17aca..35fd71c 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/mapred_service_check.py
@@ -117,34 +117,23 @@ class 
MapReduce2ServiceCheckDefault(MapReduce2ServiceCheck):
     input_file = format("/user/{smokeuser}/mapredsmokeinput")
     output_file = format("/user/{smokeuser}/mapredsmokeoutput")
 
-    cleanup_cmd = format("fs -rm -r -f {output_file} {input_file}")
-    create_file_cmd = format("fs -put /etc/passwd {input_file}")
     test_cmd = format("fs -test -e {output_file}")
     run_wordcount_job = format("jar {jar_path} wordcount {input_file} 
{output_file}")
 
+    params.HdfsResource(input_file,
+                        action = "create_on_execute",
+                        type = "file",
+                        source = "/etc/passwd",
+    )
+    params.HdfsResource(None, action="execute")
+
     if params.security_enabled:
       kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser_principal};")
 
       Execute(kinit_cmd,
               user=params.smokeuser
       )
-
-    ExecuteHadoop(cleanup_cmd,
-                  tries=1,
-                  try_sleep=5,
-                  user=params.smokeuser,
-                  bin_dir=params.execute_path,
-                  conf_dir=params.hadoop_conf_dir
-    )
-
-    ExecuteHadoop(create_file_cmd,
-                  tries=1,
-                  try_sleep=5,
-                  user=params.smokeuser,
-                  bin_dir=params.execute_path,
-                  conf_dir=params.hadoop_conf_dir
-    )
-
+      
     ExecuteHadoop(run_wordcount_job,
                   tries=1,
                   try_sleep=5,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
index 6e43192..5dd21b1 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/params_linux.py
@@ -21,12 +21,13 @@ Ambari Agent
 import os
 
 from resource_management.libraries.functions import conf_select
+from resource_management import *
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+from resource_management.libraries import functions
 
 import status_params
 
@@ -52,7 +53,8 @@ stack_name = default("/hostLevelParams/stack_name", None)
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
+hdp_stack_version = functions.get_hdp_version('hadoop-yarn-resourcemanager')
 
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
@@ -72,7 +74,6 @@ yarn_container_bin = "/usr/lib/hadoop-yarn/bin"
 
 # hadoop parameters for 2.2+
 if Script.is_hdp_stack_greater_or_equal("2.2"):
-
   # MapR directory root
   mapred_role_root = "hadoop-mapreduce-client"
   command_role = default("/role", "")
@@ -90,6 +91,15 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
   hadoop_yarn_home = format("/usr/hdp/current/{yarn_role_root}")
   yarn_bin = format("/usr/hdp/current/{yarn_role_root}/sbin")
   yarn_container_bin = format("/usr/hdp/current/{yarn_role_root}/bin")
+  
+  mapreduce_tar_source = 
config['configurations']['cluster-env']['mapreduce_tar_source']
+  mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
"/" + os.path.basename(mapreduce_tar_source)
+
+  # the configuration direction for HDFS/YARN/MapR is the hadoop config
+  # directory, which is symlinked by hadoop-client only
+  hadoop_conf_dir = "/usr/hdp/current/hadoop-client/conf"
+  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
+  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
 
 
 limits_conf_dir = "/etc/security/limits.d"
@@ -217,17 +227,17 @@ tez_lib_uris = 
default("/configurations/tez-site/tez.lib.uris", None)
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
 update_exclude_file_only = 
default("/commandParams/update_exclude_file_only",False)
 
 mapred_tt_group = 
default("/configurations/mapred-site/mapreduce.tasktracker.group", user_group)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
index 9d5fb97..df41938 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/resourcemanager.py
@@ -22,7 +22,6 @@ Ambari Agent
 from resource_management import *
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions.version import compare_versions, 
format_hdp_stack_version
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, \
   FILE_TYPE_XML
@@ -32,6 +31,7 @@ from service import service
 from ambari_commons import OSConst
 from ambari_commons.os_family_impl import OsFamilyImpl
 from setup_ranger_yarn import setup_ranger_yarn
+import os
 
 
 class Resourcemanager(Script):
@@ -104,11 +104,19 @@ class ResourcemanagerDefault(Resourcemanager):
     self.configure(env) # FOR SECURITY
     if params.is_supported_yarn_ranger:
       setup_ranger_yarn() #Ranger Yarn Plugin related calls 
-    if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, '2.1') == 0:
+    if not Script.is_hdp_stack_greater_or_equal("2.2"):
       install_tez_jars()
     else:
       # will work only for stack versions >=2.2
-      copy_tarballs_to_hdfs('tez', 'hadoop-yarn-resourcemanager', 
params.tez_user, params.hdfs_user, params.user_group)
+      if os.path.exists(params.tez_tar_source):
+        
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
+                            type="file",
+                            action="create_on_execute",
+                            source=params.tez_tar_source,
+                            group=params.user_group,
+                            owner=params.hdfs_user
+        )
+      params.HdfsResource(None, action="execute")
     service('resourcemanager', action='start')
 
   def status(self, env):

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
index 81da809..8e378b5 100644
--- 
a/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/YARN/2.1.0.2.0/package/scripts/service_check.py
@@ -84,7 +84,7 @@ class ServiceCheckDefault(ServiceCheck):
     import params
     env.set_params(params)
 
-    if params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, '2.2') >= 0:
+    if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, '2.2') >= 0:
       path_to_distributed_shell_jar = 
"/usr/hdp/current/hadoop-yarn-client/hadoop-yarn-applications-distributedshell.jar"
     else:
       path_to_distributed_shell_jar = 
"/usr/lib/hadoop-yarn/hadoop-yarn-applications-distributedshell*.jar"

Reply via email to