AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/c2f35d48
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/c2f35d48
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/c2f35d48

Branch: refs/heads/trunk
Commit: c2f35d48976aa02d73a34a56be256dbd8946fa23
Parents: 6f6c383
Author: Andrew Onishuk <aonis...@hortonworks.com>
Authored: Tue May 12 17:51:33 2015 +0300
Committer: Andrew Onishuk <aonis...@hortonworks.com>
Committed: Tue May 12 17:52:50 2015 +0300

----------------------------------------------------------------------
 ambari-agent/pom.xml                            |   7 +
 .../resource_management/TestContentSources.py   |   6 +-
 .../resource_management/TestCopyFromLocal.py    |  68 ---
 .../TestPropertiesFileResource.py               |  10 +-
 .../TestRepositoryResource.py                   |   6 +-
 .../TestXmlConfigResource.py                    |   8 +-
 .../python/resource_management/core/source.py   |  11 +-
 .../libraries/functions/__init__.py             |   1 -
 .../libraries/functions/get_hdp_version.py      |   8 +-
 .../libraries/providers/__init__.py             |   3 +-
 .../libraries/providers/copy_from_local.py      |  94 ----
 .../libraries/providers/hdfs_directory.py       | 112 -----
 .../libraries/providers/hdfs_resource.py        | 118 +++++
 .../libraries/resources/__init__.py             |   3 +-
 .../libraries/resources/copy_from_local.py      |  41 --
 .../libraries/resources/hdfs_directory.py       |  45 --
 .../libraries/resources/hdfs_resource.py        |  76 ++++
 .../libraries/script/script.py                  |   2 +
 .../1.6.1.2.2.0/package/scripts/accumulo.py     |   6 +-
 .../package/scripts/accumulo_configuration.py   |  14 +-
 .../1.6.1.2.2.0/package/scripts/params.py       |  17 +-
 .../0.1.0/package/scripts/hbase.py              |  14 +-
 .../0.1.0/package/scripts/params.py             |  16 +-
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |  23 +-
 .../0.5.0.2.1/package/scripts/params_linux.py   |  20 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |  14 +-
 .../0.96.0.2.0/package/scripts/params_linux.py  |  22 +-
 .../0.96.0.2.0/package/scripts/service_check.py |   1 -
 .../package/files/fast-hdfs-resource.jar        | Bin 0 -> 19285282 bytes
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py      |   5 +
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  15 +-
 .../package/scripts/hdfs_nfsgateway.py          |   1 -
 .../2.1.0.2.0/package/scripts/hdfs_snamenode.py |   1 -
 .../2.1.0.2.0/package/scripts/params_linux.py   |  20 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |  53 +--
 .../0.12.0.2.0/package/files/templetonSmoke.sh  |  22 +-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     | 116 +++--
 .../0.12.0.2.0/package/scripts/hive_server.py   |  13 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  64 ++-
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |  71 ---
 .../package/scripts/webhcat_service_check.py    |  28 +-
 .../package/templates/templeton_smoke.pig.j2    |  24 +
 .../MAHOUT/1.0.0.2.3/package/scripts/params.py  |  17 +-
 .../1.0.0.2.3/package/scripts/service_check.py  |  49 +-
 .../4.0.0.2.0/package/files/oozieSmoke2.sh      |  52 +--
 .../files/prepareOozieHdfsDirectories.sh        |  45 ++
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   6 +-
 .../package/scripts/oozie_server_upgrade.py     |  16 +-
 .../4.0.0.2.0/package/scripts/params_linux.py   |  22 +-
 .../4.0.0.2.0/package/scripts/service_check.py  |  40 +-
 .../0.12.0.2.0/package/scripts/params_linux.py  |  25 +-
 .../0.12.0.2.0/package/scripts/service_check.py |  67 +--
 .../package/scripts/job_history_server.py       |  10 +-
 .../SPARK/1.2.0.2.2/package/scripts/params.py   |  21 +-
 .../1.2.0.2.2/package/scripts/setup_spark.py    |   6 +-
 .../1.2.0.2.2/package/scripts/spark_service.py  |   9 +-
 .../STORM/0.9.1.2.1/configuration/storm-env.xml |  12 +-
 .../0.4.0.2.1/package/scripts/params_linux.py   |  25 +-
 .../0.4.0.2.1/package/scripts/service_check.py  |  53 +--
 .../2.1.0.2.0/package/scripts/historyserver.py  |  25 +-
 .../2.1.0.2.0/package/scripts/install_jars.py   |  68 ++-
 .../package/scripts/mapred_service_check.py     |  27 +-
 .../2.1.0.2.0/package/scripts/params_linux.py   |  32 +-
 .../package/scripts/resourcemanager.py          |  14 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |   2 +-
 .../YARN/2.1.0.2.0/package/scripts/yarn.py      |  38 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     | 192 ++++----
 .../2.0.6/HBASE/test_hbase_regionserver.py      |  91 ----
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   6 +
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   6 +
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 448 ++++++++++---------
 .../python/stacks/2.0.6/HDFS/test_nfsgateway.py |   6 +
 .../stacks/2.0.6/HDFS/test_service_check.py     |  59 +--
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  |   6 +
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |  12 +
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 329 +++++++++-----
 .../2.0.6/HIVE/test_hive_service_check.py       | 106 ++++-
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    | 134 ------
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     | 178 ++++++--
 .../stacks/2.0.6/OOZIE/test_service_check.py    |  45 +-
 .../stacks/2.0.6/PIG/test_pig_service_check.py  |  84 +++-
 .../stacks/2.0.6/YARN/test_historyserver.py     | 282 ++++++------
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   4 +
 .../2.0.6/YARN/test_mapreduce2_service_check.py |  64 +--
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 138 +-----
 .../stacks/2.0.6/YARN/test_resourcemanager.py   |  40 +-
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   4 +
 .../python/stacks/2.0.6/configs/default.json    |  13 +-
 .../python/stacks/2.0.6/configs/secured.json    |  13 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  47 +-
 .../stacks/2.1/FALCON/test_service_check.py     |   2 +-
 .../python/stacks/2.1/TEZ/test_service_check.py | 100 ++---
 .../stacks/2.1/YARN/test_apptimelineserver.py   |   1 +
 .../stacks/2.2/PIG/test_pig_service_check.py    | 121 +++--
 .../stacks/2.2/SPARK/test_job_history_server.py |  88 +++-
 .../test/python/stacks/2.2/configs/default.json |  13 +-
 .../test/python/stacks/2.2/configs/secured.json |  13 +-
 .../2.3/MAHOUT/test_mahout_service_check.py     |  63 +--
 .../dependency-reduced-pom.xml                  |  42 ++
 contrib/fast-hdfs-resource/pom.xml              |  86 ++++
 .../fast-hdfs-resource/resources/example.json   |  57 +++
 .../resources/test_perfomance.sh                |  46 ++
 .../ambari/fast_hdfs_resource/Resource.java     | 295 ++++++++++++
 .../ambari/fast_hdfs_resource/Runner.java       |  93 ++++
 104 files changed, 3063 insertions(+), 2144 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index 4dde1d9..b807ba9 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -426,6 +426,12 @@
                 </source>
               </sources>
             </mapping>
+            <mapping>
+              <directory>/var/lib/ambari-agent/lib</directory>
+              <filemode>755</filemode>
+              <username>root</username>
+              <groupname>root</groupname>
+            </mapping>
           </mappings>
         </configuration>
       </plugin>
@@ -551,6 +557,7 @@
                 <path>/var/lib/${project.artifactId}/data/tmp</path>
                 <path>/var/lib/${project.artifactId}/keys</path>
                 <path>${package.log.dir}</path>
+                <path>/var/lib/${project.artifactId}/lib</path>
               </paths>
               <mapper>
                 <type>perm</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-agent/src/test/python/resource_management/TestContentSources.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestContentSources.py 
b/ambari-agent/src/test/python/resource_management/TestContentSources.py
index d35ec9a..c144cf9 100644
--- a/ambari-agent/src/test/python/resource_management/TestContentSources.py
+++ b/ambari-agent/src/test/python/resource_management/TestContentSources.py
@@ -221,7 +221,7 @@ class TestContentSources(TestCase):
       content = template.get_content()
     self.assertEqual(open_mock.call_count, 1)
 
-    self.assertEqual(u'test template content\n', content)
+    self.assertEqual(u'test template content', content)
     open_mock.assert_called_with('/absolute/path/test.j2', 'rb')
     self.assertEqual(getmtime_mock.call_count, 1)
     getmtime_mock.assert_called_with('/absolute/path/test.j2')
@@ -234,7 +234,7 @@ class TestContentSources(TestCase):
       template = InlineTemplate("{{test_arg1}} template content", [], 
test_arg1 = "test")
       content = template.get_content()
 
-    self.assertEqual(u'test template content\n', content)
+    self.assertEqual(u'test template content', content)
 
   def test_template_imports(self):
     """
@@ -250,4 +250,4 @@ class TestContentSources(TestCase):
     with Environment("/base") as env:
       template = InlineTemplate("{{test_arg1}} template content 
{{os.path.join(path[0],path[1])}}", [os], test_arg1 = "test", path = 
["/one","two"])
       content = template.get_content()
-    self.assertEqual(u'test template content /one/two\n', content)
+    self.assertEqual(u'test template content /one/two', content)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py 
b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
deleted file mode 100644
index 1ec1858..0000000
--- a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
+++ /dev/null
@@ -1,68 +0,0 @@
-'''
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-   http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-'''
-
-from unittest import TestCase
-from mock.mock import patch, MagicMock
-from resource_management import *
-from resource_management.core import shell
-
-@patch.object(shell, "call", new = MagicMock(return_value=(1, "")))
-@patch.object(System, "os_family", new = 'redhat')
-class TestCopyFromLocal(TestCase):
-
-  
@patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
-  def test_run_default_args(self, execute_hadoop_mock):
-    with Environment() as env:
-      CopyFromLocal('/user/testdir/*.files',
-        owner='user1',
-        dest_dir='/apps/test/',
-        kinnit_if_needed='',
-        hdfs_user='hdfs'
-      )
-      self.assertEqual(execute_hadoop_mock.call_count, 2)
-      call_arg_list = execute_hadoop_mock.call_args_list
-      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
-                       call_arg_list[0][0][0].command)
-      print call_arg_list[0][0][0].arguments
-      self.assertEquals({'not_if': "ambari-sudo.sh su user1 -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls 
/apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': 
'/etc/hadoop/conf'},
-                        call_arg_list[0][0][0].arguments)
-      self.assertEquals('fs -chown user1 /apps/test//*.files', 
call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': 
'/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
-
-
-  
@patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
-  def test_run_with_chmod(self, execute_hadoop_mock):
-    with Environment() as env:
-      CopyFromLocal('/user/testdir/*.files',
-        mode=0655,
-        owner='user1',
-        group='hdfs',
-        dest_dir='/apps/test/',
-        kinnit_if_needed='',
-        hdfs_user='hdfs'
-      )
-      self.assertEqual(execute_hadoop_mock.call_count, 3)
-      call_arg_list = execute_hadoop_mock.call_args_list
-      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
-                       call_arg_list[0][0][0].command)
-      self.assertEquals({'not_if': "ambari-sudo.sh su user1 -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls 
/apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': 
'/etc/hadoop/conf'},
-                        call_arg_list[0][0][0].arguments)
-      self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', 
call_arg_list[1][0][0].command)
-      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': 
'/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
 
b/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
index bb91159..5c97380 100644
--- 
a/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
+++ 
b/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
@@ -65,7 +65,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={}
       )
 
-    
create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', 
u'# Generated by Apache Ambari. Today is Wednesday\n    \n    \n', 
encoding=None)
+    
create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', 
u'# Generated by Apache Ambari. Today is Wednesday\n    \n    ', encoding=None)
     ensure_mock.assert_called()
 
 
@@ -98,7 +98,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={},
       )
 
-    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated 
by Apache Ambari. Some other day\n    \n    \n', encoding=None)
+    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated 
by Apache Ambari. Some other day\n    \n    ', encoding=None)
     ensure_mock.assert_called()
 
 
@@ -131,7 +131,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={'property1': 'value1'},
       )
 
-    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by 
Apache Ambari. 777\n    \nproperty1=value1\n    \n', encoding=None)
+    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by 
Apache Ambari. 777\n    \nproperty1=value1\n    ', encoding=None)
     ensure_mock.assert_called()
 
 
@@ -169,7 +169,7 @@ class TestPropertiesFIleResource(TestCase):
                      },
       )
 
-    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by 
Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, 
openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - 
%m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    \n", encoding=None)
+    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by 
Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, 
openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - 
%m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    ", encoding=None)
     ensure_mock.assert_called()
 
 
@@ -206,5 +206,5 @@ class TestPropertiesFIleResource(TestCase):
       )
 
     read_file_mock.assert_called()
-    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by 
Apache Ambari. 777\n    \nproperty_1=value1\n    \n', encoding=None)
+    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by 
Apache Ambari. 777\n    \nproperty_1=value1\n    ', encoding=None)
     ensure_mock.assert_called()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py 
b/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
index 6b94481..c31cc20 100644
--- a/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
@@ -164,7 +164,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
       
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c\n')
+      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c')
       
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', 
content=StaticFile('/tmp/1.txt'))")
@@ -205,7 +205,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
 
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c\n')
+      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c')
 
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', 
content=StaticFile('/tmp/1.txt'))")
@@ -239,7 +239,7 @@ class TestRepositoryResource(TestCase):
       template_content = call_content[1]['content']
       
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c\n')
+      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c')
       
       self.assertEqual(file_mock.call_count, 1)
       self.assertEqual(execute_mock.call_count, 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py 
b/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
index a7eaae9..6092717 100644
--- a/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
@@ -62,7 +62,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n  </configuration>\n', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n  </configuration>', encoding='UTF-8')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -91,7 +91,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={'attr': {'property1': 'attr_value'}}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      
<name>property1</name>\n      <value>value1</value>\n      
<attr>attr_value</attr>\n    </property>\n    \n  </configuration>\n', 
encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      
<name>property1</name>\n      <value>value1</value>\n      
<attr>attr_value</attr>\n    </property>\n    \n  </configuration>', 
encoding='UTF-8')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -144,7 +144,7 @@ class TestXmlConfigResource(TestCase):
                     }
                 })
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n    
  <value></value>\n    </property>\n    \n    <property>\n      
<name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      
<attr1>x</attr1>\n    </property>\n    \n    <property>\n      
<name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    
\n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p 
%c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n   
 <property>\n      <name>prop.4</name>\n      
<value>${oozie.log.dir}/oozie.log</value>\n      
<attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    
</property>\n    \n    <property>\n      <name>prop.empty</name>\n      
<value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n 
   \n  </configuration>\n', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n    
  <value></value>\n    </property>\n    \n    <property>\n      
<name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      
<attr1>x</attr1>\n    </property>\n    \n    <property>\n      
<name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    
\n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p 
%c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n   
 <property>\n      <name>prop.4</name>\n      
<value>${oozie.log.dir}/oozie.log</value>\n      
<attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    
</property>\n    \n    <property>\n      <name>prop.empty</name>\n      
<value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n 
   \n  </configuration>', encoding='UTF-8')
 
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch.object(sudo, "create_file")
@@ -177,7 +177,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n    
  <value></value>\n    </property>\n    \n    <property>\n      
<name>first</name>\n      <value>should be first</value>\n    </property>\n    
\n    <property>\n      <name>second</name>\n      <value>should be 
second</value>\n    </property>\n    \n    <property>\n      
<name>third</name>\n      <value>should be third</value>\n    </property>\n    
\n    <property>\n      <name>z_last</name>\n      <value>should be 
last</value>\n    </property>\n    \n  </configuration>\n', encoding='UTF-8')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n    
  <value></value>\n    </property>\n    \n    <property>\n      
<name>first</name>\n      <value>should be first</value>\n    </property>\n    
\n    <property>\n      <name>second</name>\n      <value>should be 
second</value>\n    </property>\n    \n    <property>\n      
<name>third</name>\n      <value>should be third</value>\n    </property>\n    
\n    <property>\n      <name>z_last</name>\n      <value>should be 
last</value>\n    </property>\n    \n  </configuration>', encoding='UTF-8')
 
   @patch("resource_management.libraries.providers.xml_config.File")
   @patch.object(sudo, "path_exists")

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/core/source.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/source.py 
b/ambari-common/src/main/python/resource_management/core/source.py
index 9d1fc76..4eecc7d 100644
--- a/ambari-common/src/main/python/resource_management/core/source.py
+++ b/ambari-common/src/main/python/resource_management/core/source.py
@@ -141,7 +141,7 @@ else:
       self.context.update(variables)
       
       rendered = self.template.render(self.context)
-      return rendered + "\n" if not rendered.endswith('\n') else rendered
+      return rendered
     
   class InlineTemplate(Template):
     def __init__(self, name, extra_imports=[], **kwargs):
@@ -189,9 +189,14 @@ class DownloadSource(Source):
         opener = urllib2.build_opener()
       
       req = urllib2.Request(self.url)
-      web_file = opener.open(req)
+      
+      try:
+        web_file = opener.open(req)
+      except urllib2.HTTPError as ex:
+        raise Fail("Failed to download file from {0} due to HTTP error: 
{1}".format(self.url, str(ex)))
+      
       content = web_file.read()
-
+      
       if self.cache:
         with open(filepath, 'w') as fp:
           fp.write(content)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
index e06d246..ae6e47b 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
@@ -40,7 +40,6 @@ from 
resource_management.libraries.functions.format_jvm_option import *
 from resource_management.libraries.functions.constants import *
 from resource_management.libraries.functions.get_hdp_version import *
 from resource_management.libraries.functions.get_lzo_packages import *
-from resource_management.libraries.functions.dynamic_variable_interpretation 
import *
 from resource_management.libraries.functions.setup_ranger_plugin import *
 
 IS_WINDOWS = platform.system() == "Windows"

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
index ed34c02..ec30dc4 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/get_hdp_version.py
@@ -32,6 +32,11 @@ def get_hdp_version(package_name):
   """
   @param package_name, name of the package, from which, function will try to 
get hdp version
   """
+  
+  if not os.path.exists("/usr/bin/hdp-select"):
+    Logger.info('Skipping get_hdp_version since hdp-select is not yet 
available')
+    return None # lazy fail
+  
   try:
     command = 'hdp-select status ' + package_name
     return_code, hdp_output = shell.call(command, timeout=20)
@@ -48,6 +53,7 @@ def get_hdp_version(package_name):
   match = re.match('[0-9]+.[0-9]+.[0-9]+.[0-9]+-[0-9]+', hdp_version)
 
   if match is None:
-    raise Fail('Failed to get extracted version')
+    Logger.info('Failed to get extracted version with hdp-select')
+    return None # lazy fail
 
   return hdp_version

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
index 34b10a9..44e9ca1 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
@@ -42,8 +42,7 @@ PROVIDERS = dict(
     
XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     
PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     
MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
-    
HdfsDirectory="resource_management.libraries.providers.hdfs_directory.HdfsDirectoryProvider",
-    
CopyFromLocal="resource_management.libraries.providers.copy_from_local.CopyFromLocalProvider",
+    
HdfsResource="resource_management.libraries.providers.hdfs_resource.HdfsResourceProvider",
     
ModifyPropertiesFile="resource_management.libraries.providers.modify_properties_file.ModifyPropertiesFileProvider"
   ),
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
deleted file mode 100644
index bd9805c..0000000
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-import os
-from resource_management.libraries.resources.execute_hadoop import 
ExecuteHadoop
-from resource_management.core.providers import Provider
-from resource_management.libraries.functions.format import format
-from resource_management.core.shell import as_user
-from resource_management.core.resources.system import Execute
-
-class CopyFromLocalProvider(Provider):
-  def action_run(self):
-
-    path = self.resource.path
-    dest_dir = self.resource.dest_dir
-    dest_file = self.resource.dest_file
-    kinnit_if_needed = self.resource.kinnit_if_needed
-    user = self.resource.user   # user to perform commands as. If not 
provided, default to the owner
-    owner = self.resource.owner
-    group = self.resource.group
-    mode = self.resource.mode
-    hdfs_usr=self.resource.hdfs_user
-    hadoop_conf_path = self.resource.hadoop_conf_dir
-    bin_dir = self.resource.hadoop_bin_dir
-
-
-    if dest_file:
-      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}/{dest_file}")
-      dest_path = dest_dir + dest_file if dest_dir.endswith(os.sep) else 
dest_dir + os.sep + dest_file
-    else:
-      dest_file_name = os.path.split(path)[1]
-      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}")
-      dest_path = dest_dir + os.sep + dest_file_name
-    # Need to run unless as resource user
-    
-    if kinnit_if_needed:
-      Execute(kinnit_if_needed, 
-              user=user if user else owner,
-      )
-    
-    unless_cmd = as_user(format("PATH=$PATH:{bin_dir} hadoop fs -ls 
{dest_path}"), user if user else owner)
-
-    ExecuteHadoop(copy_cmd,
-                  not_if=unless_cmd,
-                  user=user if user else owner,
-                  bin_dir=bin_dir,
-                  conf_dir=hadoop_conf_path
-                  )
-
-    if not owner:
-      chown = None
-    else:
-      if not group:
-        chown = owner
-      else:
-        chown = format('{owner}:{group}')
-
-    if chown:
-      chown_cmd = format("fs -chown {chown} {dest_path}")
-
-      ExecuteHadoop(chown_cmd,
-                    user=hdfs_usr,
-                    bin_dir=bin_dir,
-                    conf_dir=hadoop_conf_path)
-    pass
-
-    if mode:
-      dir_mode = oct(mode)[1:]
-      chmod_cmd = format('fs -chmod {dir_mode} {dest_path}')
-
-      ExecuteHadoop(chmod_cmd,
-                    user=hdfs_usr,
-                    bin_dir=bin_dir,
-                    conf_dir=hadoop_conf_path)
-    pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
deleted file mode 100644
index 9fa2de7..0000000
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
+++ /dev/null
@@ -1,112 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import os
-
-from resource_management import *
-directories_list = [] #direcotries list for mkdir
-chmod_map = {} #(mode,recursive):dir_list map
-chown_map = {} #(owner,group,recursive):dir_list map
-class HdfsDirectoryProvider(Provider):
-  def action_create_delayed(self):
-    global delayed_directories
-    global chmod_map
-    global chown_map
-
-    if not self.resource.dir_name:
-      return
-
-    dir_name = self.resource.dir_name
-    dir_owner = self.resource.owner
-    dir_group = self.resource.group
-    dir_mode = oct(self.resource.mode)[1:] if self.resource.mode else None
-    directories_list.append(self.resource.dir_name)
-
-    recursive_chown_str = "-R" if self.resource.recursive_chown else ""
-    recursive_chmod_str = "-R" if self.resource.recursive_chmod else ""
-    # grouping directories by mode/owner/group to modify them in one 'chXXX' 
call
-    if dir_mode:
-      chmod_key = (dir_mode,recursive_chmod_str)
-      if chmod_map.has_key(chmod_key):
-        chmod_map[chmod_key].append(dir_name)
-      else:
-        chmod_map[chmod_key] = [dir_name]
-
-    if dir_owner:
-      owner_key = (dir_owner,dir_group,recursive_chown_str)
-      if chown_map.has_key(owner_key):
-        chown_map[owner_key].append(dir_name)
-      else:
-        chown_map[owner_key] = [dir_name]
-
-  def action_create(self):
-    global delayed_directories
-    global chmod_map
-    global chown_map
-
-    self.action_create_delayed()
-
-    hdp_conf_dir = self.resource.conf_dir
-    hdp_hdfs_user = self.resource.hdfs_user
-    secured = self.resource.security_enabled
-    keytab_file = self.resource.keytab
-    kinit_path = self.resource.kinit_path_local
-    bin_dir = self.resource.bin_dir
-
-    chmod_commands = []
-    chown_commands = []
-
-    for chmod_key, chmod_dirs in chmod_map.items():
-      mode = chmod_key[0]
-      recursive = chmod_key[1]
-      chmod_dirs_str = ' '.join(chmod_dirs)
-      chmod_commands.append(format("hadoop --config {hdp_conf_dir} fs -chmod 
{recursive} {mode} {chmod_dirs_str}"))
-
-    for chown_key, chown_dirs in chown_map.items():
-      owner = chown_key[0]
-      group = chown_key[1]
-      recursive = chown_key[2]
-      chown_dirs_str = ' '.join(chown_dirs)
-      if owner:
-        chown = owner
-        if group:
-          chown = format("{owner}:{group}")
-        chown_commands.append(format("hadoop --config {hdp_conf_dir} fs -chown 
{recursive} {chown} {chown_dirs_str}"))
-
-    if secured:
-        Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
-                user=hdp_hdfs_user)
-    #create all directories in one 'mkdir' call
-    dir_list_str = ' '.join(directories_list)
-    #for hadoop 2 we need to specify -p to create directories recursively
-    parent_flag = '-p'
-
-    Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} 
{dir_list_str} && {chmod_cmd} && {chown_cmd}',
-                   chmod_cmd=' && '.join(chmod_commands),
-                   chown_cmd=' && '.join(chown_commands)),
-            user=hdp_hdfs_user,
-            path=bin_dir,
-            not_if=as_user(format("hadoop --config {hdp_conf_dir} fs -ls 
{dir_list_str}"), hdp_hdfs_user)
-    )
-
-    directories_list[:] = []
-    chmod_map.clear()
-    chown_map.clear()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
new file mode 100644
index 0000000..21ad8ed
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
@@ -0,0 +1,118 @@
+# !/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import json
+from resource_management.core.environment import Environment
+from resource_management.core.base import Fail
+from resource_management.core.resources.system import Execute
+from resource_management.core.resources.system import File
+from resource_management.core.providers import Provider
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions import format
+
+
+JSON_PATH = '/var/lib/ambari-agent/data/hdfs_resources.json'
+JAR_PATH = '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
+
+RESOURCE_TO_JSON_FIELDS = {
+  'target': 'target',
+  'type': 'type',
+  'action': 'action',
+  'source': 'source',
+  'owner': 'owner',
+  'group': 'group',
+  'mode': 'mode',
+  'recursive_chown': 'recursiveChown',
+  'recursive_chmod': 'recursiveChmod',
+  'change_permissions_for_parents': 'changePermissionforParents'
+}
+
+
+class HdfsResourceProvider(Provider):
+  def action_delayed(self, action_name):
+    resource = {}
+    env = Environment.get_instance()
+    if not 'hdfs_files' in env.config:
+      env.config['hdfs_files'] = []
+
+    # Check required parameters
+    if not self.resource.type or not self.resource.action:
+      raise Fail("Resource parameter type or action is not set.")
+
+    # Put values in dictionary-resource
+    for field_name, json_field_name in RESOURCE_TO_JSON_FIELDS.iteritems():
+      if field_name == 'action':
+        resource[json_field_name] = action_name
+      elif field_name == 'mode' and self.resource.mode:
+        resource[json_field_name] = oct(self.resource.mode)[1:]
+      elif getattr(self.resource, field_name):
+        resource[json_field_name] = getattr(self.resource, field_name)
+
+    # Add resource to create
+    env.config['hdfs_files'].append(resource)
+
+  def action_create_on_execute(self):
+    self.action_delayed("create")
+
+  def action_delete_on_execute(self):
+    self.action_delayed("delete")
+
+  def action_execute(self):
+    env = Environment.get_instance()
+
+    # Check required parameters
+    if not self.resource.user:
+      raise Fail("Resource parameter 'user' is not set.")
+
+    if not 'hdfs_files' in env.config or not env.config['hdfs_files']:
+      Logger.info("No resources to create. 'create_on_execute' or 
'delete_on_execute' wasn't triggered before this 'execute' action.")
+      return
+
+    hadoop_bin_dir = self.resource.hadoop_bin_dir
+    hadoop_conf_dir = self.resource.hadoop_conf_dir
+    user = self.resource.user
+    security_enabled = self.resource.security_enabled
+    keytab_file = self.resource.keytab
+    kinit_path = self.resource.kinit_path_local
+    logoutput = self.resource.logoutput
+    jar_path=JAR_PATH
+    json_path=JSON_PATH
+
+    if security_enabled:
+      Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
+              user=user
+      )
+
+    # Write json file to disk
+    File(JSON_PATH,
+         owner = user,
+         content = json.dumps(env.config['hdfs_files'])
+    )
+
+    # Execute jar to create/delete resources in hadoop
+    Execute(format("hadoop --config {hadoop_conf_dir} jar {jar_path} 
{json_path}"),
+            user=user,
+            path=[hadoop_bin_dir],
+            logoutput=logoutput,
+    )
+
+    # Clean
+    env.config['hdfs_files'] = []

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
index 596c2e2..524292f 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
@@ -26,7 +26,6 @@ from resource_management.libraries.resources.xml_config 
import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.monitor_webserver import *
-from resource_management.libraries.resources.hdfs_directory import *
-from resource_management.libraries.resources.copy_from_local import *
+from resource_management.libraries.resources.hdfs_resource import *
 from resource_management.libraries.resources.msi import *
 from resource_management.libraries.resources.modify_properties_file import *
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
deleted file mode 100644
index 42050a3..0000000
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["CopyFromLocal"]
-from resource_management.core.base import Resource, ForcedListArgument, 
ResourceArgument, BooleanArgument
-
-class CopyFromLocal(Resource):
-  action = ForcedListArgument(default="run")
-
-  path = ResourceArgument(default=lambda obj: obj.name)
-  dest_dir = ResourceArgument(required=True)
-  dest_file = ResourceArgument()
-  owner = ResourceArgument(required=True)  # file user owner
-  group = ResourceArgument()               # file group user
-  mode = ResourceArgument()                # file ACL mode
-  kinnit_if_needed = ResourceArgument(default='')
-  user = ResourceArgument()                # user to perform commands as. If 
not provided, default to the owner
-  hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
-  hdfs_user = ResourceArgument(default='hdfs')
-  hadoop_bin_dir = ResourceArgument(default='/usr/bin')
-
-  actions = Resource.actions + ["run"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
deleted file mode 100644
index 7888cd8..0000000
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["HdfsDirectory"]
-from resource_management.core.base import Resource, ForcedListArgument, 
ResourceArgument, BooleanArgument
-
-class HdfsDirectory(Resource):
-  action = ForcedListArgument()
-
-  dir_name = ResourceArgument(default=lambda obj: obj.name)
-  owner = ResourceArgument()
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  recursive_chown = BooleanArgument(default=False)
-  recursive_chmod = BooleanArgument(default=False)
-
-  conf_dir = ResourceArgument()
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  hdfs_user = ResourceArgument()
-  bin_dir = ResourceArgument(default="")
-
-  #action 'create' immediately creates all pending directory in efficient 
manner
-  #action 'create_delayed' add directory to list of pending directories
-  actions = Resource.actions + ["create","create_delayed"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
new file mode 100644
index 0000000..92a043f
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
@@ -0,0 +1,76 @@
+# !/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["HdfsResource"]
+from resource_management.core.base import Resource, ForcedListArgument, 
ResourceArgument, BooleanArgument
+
+"""
+Calling a lot of hadoop commands takes too much time.
+The cause is that for every call new connection initialized, with datanodes, 
namenode.
+
+While this resource can gather the dicteroies/files to 
create/delete/copyFromLocal.
+And after just with one call create all that.
+
+action = create_delayed / delete_delayed. Are for gathering information  about 
what you want
+to create.
+
+After everything is gathered you should execute action = execute. To perform 
delayed actions
+
+The resource is a replacement for the following operations:
+  1) hadoop fs -rmr
+  2) hadoop fs -copyFromLocal
+  3) hadoop fs -put
+  4) hadoop fs -mkdir
+  5) hadoop fs -touchz
+  6) hadoop fs -chmod
+  7) hadoop fs -chown
+"""
+
+
+class HdfsResource(Resource):
+  # Required: {target, type, action}
+  # path to hadoop file/directory
+  target = ResourceArgument(default=lambda obj: obj.name)
+  # "directory" or "file"
+  type = ResourceArgument()
+  # "create_delayed" or "delete_delayed" or "execute"
+  action = ForcedListArgument()
+  # if present - copies file/directory from local path {source} to hadoop path 
- {target}
+  source = ResourceArgument()
+  owner = ResourceArgument()
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  logoutput = ResourceArgument()
+  recursive_chown = BooleanArgument(default=False)
+  recursive_chmod = BooleanArgument(default=False)
+  change_permissions_for_parents = BooleanArgument(default=False)
+
+  security_enabled = BooleanArgument(default=False)
+  keytab = ResourceArgument()
+  kinit_path_local = ResourceArgument()
+  user = ResourceArgument()
+  hadoop_bin_dir = ResourceArgument()
+  hadoop_conf_dir = ResourceArgument()
+
+  #action 'execute' immediately creates all pending files/directories in 
efficient manner
+  #action 'create_delayed/delete_delayed' adds file/directory to list of 
pending directories
+  actions = Resource.actions + ["create_on_execute", "delete_on_execute", 
"execute"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index c9ba846..8de9247 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -297,6 +297,7 @@ class Script(object):
       return None
 
     stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
+
     if stack_version_unformatted is None or stack_version_unformatted == '':
       return None
 
@@ -325,6 +326,7 @@ class Script(object):
     :return: True if the command's stack is less than the specified version
     """
     hdp_stack_version = Script.get_hdp_stack_version()
+
     if hdp_stack_version is None:
       return False
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
index e974573..5e476a5 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo.py
@@ -75,13 +75,13 @@ def accumulo(name=None # 'master' or 'tserver' or 'client'
       owner=params.accumulo_user
     )
 
-  if name in ["master","tserver"]:
+  if name == "master":
     params.HdfsDirectory(format("{params.accumulo_hdfs_root_dir}"),
-                         action="create_delayed",
+                         action="create_on_execute",
                          owner=params.accumulo_user,
     )
     params.HdfsDirectory(format("{params.accumulo_hdfs_stage_dir}"),
-                         action="create_delayed",
+                         action="create_on_execute",
                          owner=params.accumulo_user,
                          mode=0751
     )

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
index 88d83d3..f26fdd0 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/accumulo_configuration.py
@@ -160,17 +160,19 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 
'monitor' or 'gc' or '
 
   # other server setup
   if name == 'master':
-    params.HdfsDirectory(format("/user/{params.accumulo_user}"),
-                         action="create_delayed",
+    params.HdfsResource(format("/user/{params.accumulo_user}"),
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.accumulo_user,
                          mode=0700
     )
-    params.HdfsDirectory(format("{params.parent_dir}"),
-                         action="create_delayed",
+    params.HdfsResource(format("{params.parent_dir}"),
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.accumulo_user,
                          mode=0700
     )
-    params.HdfsDirectory(None, action="create")
+    params.HdfsResource(None, action="execute")
     if params.security_enabled and params.has_secure_user_auth:
       Execute( format("{params.kinit_cmd} "
                       "{params.daemon_script} init "
@@ -184,6 +186,7 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 
'monitor' or 'gc' or '
                                      "{params.hadoop_conf_dir} fs -stat "
                                      "{params.instance_volumes}"),
                               params.accumulo_user),
+               logoutput=True,
                user=params.accumulo_user)
     else:
       passfile = format("{params.exec_tmp_dir}/pass")
@@ -205,6 +208,7 @@ def setup_conf_dir(name=None): # 'master' or 'tserver' or 
'monitor' or 'gc' or '
                                        "{params.hadoop_conf_dir} fs -stat "
                                        "{params.instance_volumes}"),
                                 params.accumulo_user),
+                 logoutput=True,
                  user=params.accumulo_user)
       finally:
         os.remove(passfile)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
index ff5a874..99522c1 100644
--- 
a/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/ACCUMULO/1.6.1.2.2.0/package/scripts/params.py
@@ -18,11 +18,12 @@ limitations under the License.
 
 """
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
 
 import status_params
 
@@ -147,14 +148,14 @@ hdfs_user_keytab = 
config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
index cd9f8f9..009076c 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase.py
@@ -167,23 +167,25 @@ def hbase(name=None # 'master' or 'regionserver' or 
'client'
     hbase_TemplateConfig( format("hbase_client_jaas.conf"), 
user=params.hbase_user)
     hbase_TemplateConfig( format("ams_zookeeper_jaas.conf"), 
user=params.hbase_user)
 
-  if name in ["master","regionserver"]:
+  if name == "master":
 
     if params.is_hbase_distributed:
 
-      params.HdfsDirectory(params.hbase_root_dir,
-                           action="create_delayed",
+      params.HdfsResource(params.hbase_root_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.hbase_user,
                            mode=0775
       )
 
-      params.HdfsDirectory(params.hbase_staging_dir,
-                           action="create_delayed",
+      params.HdfsResource(params.hbase_staging_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.hbase_user,
                            mode=0711
       )
 
-      params.HdfsDirectory(None, action="create")
+      params.HdfsResource(None, action="execute")
 
     else:
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
index 7e516b8..4a63e3c 100644
--- 
a/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/params.py
@@ -185,17 +185,17 @@ hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_nam
 kinit_path_local = 
functions.get_kinit_path(default('/configurations/kerberos-env/executable_search_paths',
 None))
 
 import functools
-# create partial functions with common arguments for every HdfsDirectory call
-# to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
 
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index ed9098c..86318f3 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -91,33 +91,36 @@ def falcon(type, action = None):
   if type == 'server':
     if action == 'config':
       if params.store_uri[0:4] == "hdfs":
-        params.HdfsDirectory(params.store_uri,
-                             action="create_delayed",
+        params.HdfsResource(params.store_uri,
+                             type="directory",
+                             action="create_on_execute",
                              owner=params.falcon_user,
                              mode=0755
         )
-      if params.store_uri[0:4] == "file":
+      elif params.store_uri[0:4] == "file":
         Directory(params.store_uri[7:],
                   owner=params.falcon_user,
                   recursive=True
         )
-      params.HdfsDirectory(params.flacon_apps_dir,
-                           action="create_delayed",
+      params.HdfsResource(params.flacon_apps_dir,
+                           type="directory",
+                           action="create_on_execute",
                            owner=params.falcon_user,
-                           mode=0777#TODO change to proper mode
+                           mode=0777 #TODO change to proper mode
       )
       if params.falcon_store_uri[0:4] == "hdfs":
-        params.HdfsDirectory(params.falcon_store_uri,
-                             action="create_delayed",
+        params.HdfsResource(params.falcon_store_uri,
+                             type="directory",
+                             action="create_on_execute",
                              owner=params.falcon_user,
                              mode=0755
         )
-      if params.falcon_store_uri[0:4] == "file":
+      elif params.falcon_store_uri[0:4] == "file":
         Directory(params.falcon_store_uri[7:],
                   owner=params.falcon_user,
                   recursive=True
         )
-      params.HdfsDirectory(None, action="create")
+      params.HdfsResource(None, action="execute")
       Directory(params.falcon_local_dir,
                 owner=params.falcon_user,
                 recursive=True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
index 9a5fdb5..9663d11 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params_linux.py
@@ -18,13 +18,14 @@ limitations under the License.
 """
 import status_params
 
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
 
 config = Script.get_config()
 
@@ -101,14 +102,15 @@ hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_nam
 smokeuser_principal =  
config['configurations']['cluster-env']['smokeuser_principal_name']
 kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
-)
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
+ )
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
index 9a35a24..1bfa7e4 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
@@ -163,17 +163,19 @@ def hbase(name=None):
       group=params.user_group,
       owner=params.hbase_user
     )
-  if name in ["master","regionserver"]:
-    params.HdfsDirectory(params.hbase_hdfs_root_dir,
-                         action="create_delayed",
+  if name == "master":
+    params.HdfsResource(params.hbase_hdfs_root_dir,
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.hbase_user
     )
-    params.HdfsDirectory(params.hbase_staging_dir,
-                         action="create_delayed",
+    params.HdfsResource(params.hbase_staging_dir,
+                         type="directory",
+                         action="create_on_execute",
                          owner=params.hbase_user,
                          mode=0711
     )
-    params.HdfsDirectory(None, action="create")
+    params.HdfsResource(None, action="execute")
 
 def hbase_TemplateConfig(name, tag=None):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
index 8f23bb2..c077f54 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params_linux.py
@@ -24,14 +24,16 @@ from functions import calc_xmn_from_xms
 
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions.version import 
format_hdp_stack_version
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.functions import get_kinit_path
+from resource_management.libraries.functions import is_empty
 from resource_management.libraries.functions import get_unique_id_and_date
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+
 from resource_management.libraries.functions.substitute_vars import 
substitute_vars
 
 # server configurations
@@ -139,7 +141,9 @@ if security_enabled:
   _hostname_lowercase = config['hostname'].lower()
   master_jaas_princ = 
config['configurations']['hbase-site']['hbase.master.kerberos.principal'].replace('_HOST',_hostname_lowercase)
   regionserver_jaas_princ = 
config['configurations']['hbase-site']['hbase.regionserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
-  queryserver_jaas_princ = 
config['configurations']['hbase-site']['phoenix.queryserver.kerberos.principal'].replace('_HOST',_hostname_lowercase)
+  _queryserver_jaas_princ = 
config['configurations']['hbase-site']['phoenix.queryserver.kerberos.principal']
+  if not is_empty(_queryserver_jaas_princ):
+    queryserver_jaas_princ 
=_queryserver_jaas_princ.replace('_HOST',_hostname_lowercase)
 
 master_keytab_path = 
config['configurations']['hbase-site']['hbase.master.keytab.file']
 regionserver_keytab_path = 
config['configurations']['hbase-site']['hbase.regionserver.keytab.file']
@@ -168,16 +172,16 @@ hdfs_user_keytab = 
config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete hdfs directory/file/copyfromlocal we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 
 # ranger host

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
index a60ebad..b774f19 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
@@ -46,7 +46,6 @@ class HbaseServiceCheckDefault(HbaseServiceCheck):
     env.set_params(params)
     
     output_file = "/apps/hbase/data/ambarismoketest"
-    test_cmd = format("fs -test -e {output_file}")
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smokeuser_principal};") if params.security_enabled else ""
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
new file mode 100644
index 0000000..defde28
Binary files /dev/null and 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
 differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index a615c27..95fe90c 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -98,6 +98,11 @@ def hdfs(name=None):
        owner=tc_owner,
        content=Template("slaves.j2")
   )
+
+  # for source-code of jar goto contrib/fast-hdfs-resource
+  File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
+       content=StaticFile("fast-hdfs-resource.jar")
+  )
   
   if params.lzo_enabled and len(params.lzo_packages) > 0:
       Package(params.lzo_packages)

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index e36019e..453d824 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -168,18 +168,21 @@ def create_name_dirs(directories):
 def create_hdfs_directories(check):
   import params
 
-  params.HdfsDirectory("/tmp",
-                       action="create_delayed",
+  params.HdfsResource("/tmp",
+                       type="directory",
+                       action="create_on_execute",
                        owner=params.hdfs_user,
                        mode=0777
   )
-  params.HdfsDirectory(params.smoke_hdfs_user_dir,
-                       action="create_delayed",
+  params.HdfsResource(params.smoke_hdfs_user_dir,
+                       type="directory",
+                       action="create_on_execute",
                        owner=params.smoke_user,
                        mode=params.smoke_hdfs_user_mode
   )
-  params.HdfsDirectory(None, action="create",
-                       only_if=check #skip creation when HA not active
+  params.HdfsResource(None, 
+                      action="execute",
+                      only_if=check #skip creation when HA not active
   )
 
 def format_namenode(force=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
index ad5ef89..ac0e24d 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_nfsgateway.py
@@ -22,7 +22,6 @@ from resource_management.core.logger import Logger
 from resource_management.core.resources import Directory
 from resource_management.core import shell
 from utils import service
-from utils import hdfs_directory
 import subprocess,os
 
 # NFS GATEWAY is always started by root using jsvc due to rpcbind bugs

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
index 3feff67..78ef977 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
@@ -19,7 +19,6 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
-from utils import hdfs_directory
 from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
 from ambari_commons import OSConst
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
index 18e0fa0..18aede8 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params_linux.py
@@ -32,7 +32,8 @@ from resource_management.libraries.functions.default import 
default
 from resource_management.libraries.functions import get_klist_path
 from resource_management.libraries.functions import get_kinit_path
 from resource_management.libraries.script.script import Script
-from resource_management.libraries.resources.hdfs_directory import 
HdfsDirectory
+from resource_management.libraries.resources.hdfs_resource import HdfsResource
+
 from resource_management.libraries.functions.format_jvm_option import 
format_jvm_option
 from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
 
@@ -93,7 +94,7 @@ if Script.is_hdp_stack_greater_or_equal("2.2"):
       hadoop_secure_dn_user = '""'
 
 
-
+ambari_libs_dir = "/var/lib/ambari-agent/lib"
 limits_conf_dir = "/etc/security/limits.d"
 
 if Script.is_hdp_stack_greater_or_equal("2.0") and 
Script.is_hdp_stack_less_than("2.1") and not OSCheck.is_suse_family():
@@ -281,18 +282,19 @@ else:
   jn_kinit_cmd = ""
 
 import functools
-#create partial functions with common arguments for every HdfsDirectory call
-#to create hdfs directory we need to call params.HdfsDirectory in code
-HdfsDirectory = functools.partial(
-  HdfsDirectory,
-  conf_dir=hadoop_conf_dir,
-  hdfs_user=hdfs_user,
+#create partial functions with common arguments for every HdfsResource call
+#to create/delete/copyfromlocal hdfs directories/files we need to call 
params.HdfsResource in code
+HdfsResource = functools.partial(
+  HdfsResource,
+  user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  bin_dir = hadoop_bin_dir
+  hadoop_bin_dir = hadoop_bin_dir,
+  hadoop_conf_dir = hadoop_conf_dir
 )
 
+
 # The logic for LZO also exists in OOZIE's params.py
 io_compression_codecs = 
default("/configurations/core-site/io.compression.codecs", None)
 lzo_enabled = io_compression_codecs is not None and 
"com.hadoop.compression.lzo" in io_compression_codecs.lower()

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
index dd319b0..7a1e6b7 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
@@ -37,15 +37,6 @@ class HdfsServiceCheckDefault(HdfsServiceCheck):
 
     safemode_command = format("dfsadmin -fs {namenode_address} -safemode get | 
grep OFF")
 
-    create_dir_cmd = format("fs -mkdir {dir}")
-    chmod_command = format("fs -chmod 777 {dir}")
-    test_dir_exists = as_user(format("{hadoop_bin_dir}/hadoop --config 
{hadoop_conf_dir} fs -test -e {dir}"), params.hdfs_user)
-    cleanup_cmd = format("fs -rm {tmp_file}")
-    #cleanup put below to handle retries; if retrying there wil be a stale file
-    #that needs cleanup; exit code is fn of second command
-    create_file_cmd = format(
-      "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd 
{tmp_file}")
-    test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name}"),
         user=params.hdfs_user
@@ -58,39 +49,23 @@ class HdfsServiceCheckDefault(HdfsServiceCheck):
                   tries=20,
                   bin_dir=params.hadoop_bin_dir
     )
-    ExecuteHadoop(create_dir_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  not_if=test_dir_exists,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+    params.HdfsResource(dir,
+                        type="directory",
+                        action="create_on_execute",
+                        mode=0777
     )
-    ExecuteHadoop(chmod_command,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+    params.HdfsResource(tmp_file,
+                        type="file",
+                        action="delete_on_execute",
     )
-    ExecuteHadoop(create_file_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
-    )
-    ExecuteHadoop(test_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  try_sleep=3,
-                  tries=5,
-                  bin_dir=params.hadoop_bin_dir
+
+    params.HdfsResource(tmp_file,
+                        type="file",
+                        source="/etc/passwd",
+                        action="create_on_execute"
     )
+    params.HdfsResource(None, action="execute")
+
     if params.has_journalnode_hosts:
       journalnode_port = params.journalnode_port
       checkWebUIFileName = "checkWebUI.py"

http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
index d16848d..2083312 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
@@ -23,10 +23,11 @@
 export ttonhost=$1
 export smoke_test_user=$2
 export templeton_port=$3
-export smoke_user_keytab=$4
-export security_enabled=$5
-export kinit_path_local=$6
-export smokeuser_principal=$7
+export ttonTestScript=$4
+export smoke_user_keytab=$5
+export security_enabled=$6
+export kinit_path_local=$7
+export smokeuser_principal=$8
 export ttonurl="http://${ttonhost}:${templeton_port}/templeton/v1";
 
 if [[ $security_enabled == "true" ]]; then
@@ -67,21 +68,8 @@ if [[ $security_enabled == "true" ]]; then
 fi
 
 #try pig query
-outname=${smoke_test_user}.`date +"%M%d%y"`.$$;
-ttonTestOutput="/tmp/idtest.${outname}.out";
-ttonTestInput="/tmp/idtest.${outname}.in";
 ttonTestScript="idtest.${outname}.pig"
 
-echo "A = load '$ttonTestInput' using PigStorage(':');"  > /tmp/$ttonTestScript
-echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
-echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
-
-#copy pig script to hdfs
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"hadoop dfs -copyFromLocal /tmp/$ttonTestScript /tmp/$ttonTestScript"
-
-#copy input file to hdfs
-/var/lib/ambari-agent/ambari-sudo.sh su ${smoke_test_user} -s /bin/bash - -c 
"hadoop dfs -copyFromLocal /etc/passwd $ttonTestInput"
-
 #create, copy post args file
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > 
/tmp/pig_post.txt
 

Reply via email to