Revert "AMBARI-8932. Creating hdfs directories on deploy takes too long, Part 
2, reduces deploy time by ~6min (aonishuk)"


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/8cf0e915
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/8cf0e915
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/8cf0e915

Branch: refs/heads/trunk
Commit: 8cf0e91550619ec4101427d721bf859cdbeafb9d
Parents: 7288109
Author: Andrew Onishuk <aonis...@hortonworks.com>
Authored: Tue Dec 30 19:51:33 2014 +0200
Committer: Andrew Onishuk <aonis...@hortonworks.com>
Committed: Tue Dec 30 19:52:21 2014 +0200

----------------------------------------------------------------------
 ambari-agent/pom.xml                            |   7 -
 .../resource_management/TestContentSources.py   |   6 +-
 .../resource_management/TestCopyFromLocal.py    |  68 +++
 .../TestPropertiesFileResource.py               |  10 +-
 .../TestRepositoryResource.py                   |   4 +-
 .../TestXmlConfigResource.py                    |  10 +-
 .../python/resource_management/core/source.py   |   2 +-
 .../libraries/functions/__init__.py             |   1 -
 .../libraries/functions/get_namenode_states.py  |  72 ----
 .../libraries/functions/version.py              |  29 +-
 .../libraries/providers/__init__.py             |   3 +-
 .../libraries/providers/copy_from_local.py      |  89 ++++
 .../libraries/providers/hdfs_directory.py       | 112 +++++
 .../libraries/providers/hdfs_resource.py        | 109 -----
 .../libraries/resources/__init__.py             |   3 +-
 .../libraries/resources/copy_from_local.py      |  40 ++
 .../libraries/resources/hdfs_directory.py       |  45 ++
 .../libraries/resources/hdfs_resource.py        |  77 ----
 .../FALCON/0.5.0.2.1/package/scripts/falcon.py  |   8 +-
 .../FALCON/0.5.0.2.1/package/scripts/params.py  |  16 +-
 .../HBASE/0.96.0.2.0/package/scripts/hbase.py   |  10 +-
 .../HBASE/0.96.0.2.0/package/scripts/params.py  |  16 +-
 .../0.96.0.2.0/package/scripts/service_check.py |   3 +-
 .../package/files/fast-hdfs-resource.jar        | Bin 19284916 -> 0 bytes
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py      |   5 -
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  11 +-
 .../2.1.0.2.0/package/scripts/hdfs_snamenode.py |   2 +
 .../HDFS/2.1.0.2.0/package/scripts/params.py    |  21 +-
 .../2.1.0.2.0/package/scripts/service_check.py  |  54 ++-
 .../0.12.0.2.0/package/files/templetonSmoke.sh  |  30 +-
 .../HIVE/0.12.0.2.0/package/scripts/hive.py     |  86 +---
 .../0.12.0.2.0/package/scripts/hive_server.py   |   9 +-
 .../0.12.0.2.0/package/scripts/install_jars.py  |  55 ++-
 .../HIVE/0.12.0.2.0/package/scripts/params.py   |  66 +--
 .../HIVE/0.12.0.2.0/package/scripts/webhcat.py  |  73 +++-
 .../package/scripts/webhcat_server.py           |   2 +-
 .../4.0.0.2.0/package/files/oozieSmoke2.sh      |  33 +-
 .../OOZIE/4.0.0.2.0/package/scripts/oozie.py    |   6 +-
 .../OOZIE/4.0.0.2.0/package/scripts/params.py   |  15 +-
 .../PIG/0.12.0.2.0/package/scripts/params.py    |  20 +-
 .../0.12.0.2.0/package/scripts/service_check.py |  75 ++--
 .../TEZ/0.4.0.2.1/package/scripts/params.py     |  16 +-
 .../YARN/package/scripts/historyserver.py       |  24 +-
 .../services/YARN/package/scripts/params.py     |  28 +-
 .../2.0.6/services/YARN/package/scripts/yarn.py |  27 +-
 .../stacks/2.0.6/HBASE/test_hbase_master.py     | 195 ++++-----
 .../2.0.6/HBASE/test_hbase_regionserver.py      |  90 ++++
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |   6 -
 .../stacks/2.0.6/HDFS/test_journalnode.py       |   6 -
 .../python/stacks/2.0.6/HDFS/test_namenode.py   | 414 +++++++++----------
 .../stacks/2.0.6/HDFS/test_service_check.py     |  70 ++--
 .../python/stacks/2.0.6/HDFS/test_snamenode.py  |   6 -
 .../test/python/stacks/2.0.6/HDFS/test_zkfc.py  |  12 -
 .../stacks/2.0.6/HIVE/test_hive_server.py       | 272 ++++--------
 .../stacks/2.0.6/HIVE/test_webhcat_server.py    | 134 ++++++
 .../stacks/2.0.6/OOZIE/test_oozie_server.py     |  66 +--
 .../stacks/2.0.6/PIG/test_pig_service_check.py  | 112 +----
 .../stacks/2.0.6/YARN/test_historyserver.py     | 288 ++++++-------
 .../stacks/2.0.6/YARN/test_mapreduce2_client.py |   2 -
 .../stacks/2.0.6/YARN/test_nodemanager.py       | 132 ++++++
 .../stacks/2.0.6/YARN/test_yarn_client.py       |   2 -
 .../python/stacks/2.0.6/configs/default.json    |   4 +-
 .../stacks/2.1/FALCON/test_falcon_server.py     |  43 +-
 ambari-server/src/test/python/unitTests.py      |   1 -
 ambari-web/app/config.js                        |   2 +-
 .../dependency-reduced-pom.xml                  |  58 ---
 .../ambari/fast_hdfs_resource/Resource.java     |   8 +-
 67 files changed, 1611 insertions(+), 1710 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-agent/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-agent/pom.xml b/ambari-agent/pom.xml
index bee88f8..939c366 100644
--- a/ambari-agent/pom.xml
+++ b/ambari-agent/pom.xml
@@ -402,12 +402,6 @@
                 </source>
               </sources>
             </mapping>
-            <mapping>
-              <directory>/var/lib/ambari-agent/lib</directory>
-              <filemode>755</filemode>
-              <username>root</username>
-              <groupname>root</groupname>
-            </mapping>
           </mappings>
         </configuration>
       </plugin>
@@ -522,7 +516,6 @@
                 <path>/var/lib/${project.artifactId}/data/tmp</path>
                 <path>/var/lib/${project.artifactId}/keys</path>
                 <path>${package.log.dir}</path>
-                <path>/var/lib/${project.artifactId}/lib</path>
               </paths>
               <mapper>
                 <type>perm</type>

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-agent/src/test/python/resource_management/TestContentSources.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestContentSources.py 
b/ambari-agent/src/test/python/resource_management/TestContentSources.py
index d3a9b53..651ff83 100644
--- a/ambari-agent/src/test/python/resource_management/TestContentSources.py
+++ b/ambari-agent/src/test/python/resource_management/TestContentSources.py
@@ -310,7 +310,7 @@ class TestContentSources(TestCase):
       content = template.get_content()
     self.assertEqual(open_mock.call_count, 1)
 
-    self.assertEqual(u'test template content', content)
+    self.assertEqual(u'test template content\n', content)
     open_mock.assert_called_with('/absolute/path/test.j2', 'rb')
     self.assertEqual(getmtime_mock.call_count, 1)
     getmtime_mock.assert_called_with('/absolute/path/test.j2')
@@ -323,7 +323,7 @@ class TestContentSources(TestCase):
       template = InlineTemplate("{{test_arg1}} template content", [], 
test_arg1 = "test")
       content = template.get_content()
 
-    self.assertEqual(u'test template content', content)
+    self.assertEqual(u'test template content\n', content)
 
   def test_template_imports(self):
     """
@@ -339,4 +339,4 @@ class TestContentSources(TestCase):
     with Environment("/base") as env:
       template = InlineTemplate("{{test_arg1}} template content 
{{os.path.join(path[0],path[1])}}", [os], test_arg1 = "test", path = 
["/one","two"])
       content = template.get_content()
-    self.assertEqual(u'test template content /one/two', content)
+    self.assertEqual(u'test template content /one/two\n', content)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py 
b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
new file mode 100644
index 0000000..cabfab5
--- /dev/null
+++ b/ambari-agent/src/test/python/resource_management/TestCopyFromLocal.py
@@ -0,0 +1,68 @@
+'''
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+'''
+
+from unittest import TestCase
+from mock.mock import patch, MagicMock
+from resource_management import *
+from resource_management.core import shell
+
+@patch.object(shell, "call", new = MagicMock(return_value=(1, "")))
+@patch.object(System, "os_family", new = 'redhat')
+class TestCopyFromLocal(TestCase):
+
+  
@patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
+  def test_run_default_args(self, execute_hadoop_mock):
+    with Environment() as env:
+      CopyFromLocal('/user/testdir/*.files',
+        owner='user1',
+        dest_dir='/apps/test/',
+        kinnit_if_needed='',
+        hdfs_user='hdfs'
+      )
+      self.assertEqual(execute_hadoop_mock.call_count, 2)
+      call_arg_list = execute_hadoop_mock.call_args_list
+      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
+                       call_arg_list[0][0][0].command)
+      print call_arg_list[0][0][0].arguments
+      self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls 
/apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': 
'/etc/hadoop/conf'},
+                        call_arg_list[0][0][0].arguments)
+      self.assertEquals('fs -chown user1 /apps/test//*.files', 
call_arg_list[1][0][0].command)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': 
'/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
+
+
+  
@patch("resource_management.libraries.providers.execute_hadoop.ExecuteHadoopProvider")
+  def test_run_with_chmod(self, execute_hadoop_mock):
+    with Environment() as env:
+      CopyFromLocal('/user/testdir/*.files',
+        mode=0655,
+        owner='user1',
+        group='hdfs',
+        dest_dir='/apps/test/',
+        kinnit_if_needed='',
+        hdfs_user='hdfs'
+      )
+      self.assertEqual(execute_hadoop_mock.call_count, 3)
+      call_arg_list = execute_hadoop_mock.call_args_list
+      self.assertEqual('fs -copyFromLocal /user/testdir/*.files /apps/test/',
+                       call_arg_list[0][0][0].command)
+      self.assertEquals({'not_if': "/usr/bin/sudo su user1 -l -s /bin/bash -c 
'[RMF_EXPORT_PLACEHOLDER]PATH=$PATH:/usr/bin hadoop fs -ls 
/apps/test//*.files'", 'bin_dir': '/usr/bin', 'user': 'user1', 'conf_dir': 
'/etc/hadoop/conf'},
+                        call_arg_list[0][0][0].arguments)
+      self.assertEquals('fs -chown user1:hdfs /apps/test//*.files', 
call_arg_list[1][0][0].command)
+      self.assertEquals({'user': 'hdfs', 'bin_dir': '/usr/bin', 'conf_dir': 
'/etc/hadoop/conf'}, call_arg_list[1][0][0].arguments)
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
 
b/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
index 53511c0..bdb64de 100644
--- 
a/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
+++ 
b/ambari-agent/src/test/python/resource_management/TestPropertiesFileResource.py
@@ -65,7 +65,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={}
       )
 
-    
create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', 
u'# Generated by Apache Ambari. Today is Wednesday\n    \n    ')
+    
create_file_mock.assert_called_with('/somewhere_in_system/one_file.properties', 
u'# Generated by Apache Ambari. Today is Wednesday\n    \n    \n')
     ensure_mock.assert_called()
 
 
@@ -98,7 +98,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={},
       )
 
-    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated 
by Apache Ambari. Some other day\n    \n    ')
+    create_file_mock.assert_called_with('/dir/and/dir/file.txt', u'# Generated 
by Apache Ambari. Some other day\n    \n    \n')
     ensure_mock.assert_called()
 
 
@@ -131,7 +131,7 @@ class TestPropertiesFIleResource(TestCase):
                      properties={'property1': 'value1'},
       )
 
-    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by 
Apache Ambari. 777\n    \nproperty1=value1\n    ')
+    create_file_mock.assert_called_with('/dir/new_file', u'# Generated by 
Apache Ambari. 777\n    \nproperty1=value1\n    \n')
     ensure_mock.assert_called()
 
 
@@ -169,7 +169,7 @@ class TestPropertiesFIleResource(TestCase):
                      },
       )
 
-    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by 
Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, 
openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - 
%m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    ")
+    create_file_mock.assert_called_with('/dir/new_file', u"# Generated by 
Apache Ambari. 777\n    \n=\nprop.1='.'yyyy-MM-dd-HH\nprop.2=INFO, 
openjpa\nprop.3=%d{ISO8601} %5p %c{1}:%L - 
%m%n\nprop.4=${oozie.log.dir}/oozie.log\nprop.empty=\n    \n")
     ensure_mock.assert_called()
 
 
@@ -206,5 +206,5 @@ class TestPropertiesFIleResource(TestCase):
       )
 
     read_file_mock.assert_called()
-    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by 
Apache Ambari. 777\n    \nproperty_1=value1\n    ')
+    create_file_mock.assert_called_with('/dir1/new_file', u'# Generated by 
Apache Ambari. 777\n    \nproperty_1=value1\n    \n')
     ensure_mock.assert_called()

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py 
b/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
index 224f8b4..3b83c0f 100644
--- a/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestRepositoryResource.py
@@ -144,7 +144,7 @@ class TestRepositoryResource(TestCase):
       template_content = template_item[1]['content'].get_content()
       
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c')
+      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c\n')
       
       copy_item = str(file_mock.call_args_list[1])
       self.assertEqual(copy_item, "call('/etc/apt/sources.list.d/HDP.list', 
content=StaticFile('/tmp/1.txt'))")
@@ -179,7 +179,7 @@ class TestRepositoryResource(TestCase):
       template_content = template_item[1]['content'].get_content()
       
       self.assertEquals(template_name, '/tmp/1.txt')
-      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c')
+      self.assertEquals(template_content, 'deb 
http://download.base_url.org/rpm/ a b c\n')
       
       self.assertEqual(file_mock.call_count, 1)
       self.assertEqual(execute_mock.call_count, 0)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py 
b/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
index f0264d6..4affd31 100644
--- a/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
+++ b/ambari-agent/src/test/python/resource_management/TestXmlConfigResource.py
@@ -62,7 +62,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 
2014-02-->\n    <configuration>\n    \n  </configuration>')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n  </configuration>\n')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -91,7 +91,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={'attr': {'property1': 'attr_value'}}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      
<name>property1</name>\n      <value>value1</value>\n      
<attr>attr_value</attr>\n    </property>\n    \n  </configuration>')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      
<name>property1</name>\n      <value>value1</value>\n      
<attr>attr_value</attr>\n    </property>\n    \n  </configuration>\n')
 
 
   @patch("resource_management.core.providers.system._ensure_metadata")
@@ -144,8 +144,8 @@ class TestXmlConfigResource(TestCase):
                     }
                 })
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n    
  <value></value>\n    </property>\n    \n    <property>\n      
<name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      
<attr1>x</attr1>\n    </property>\n    \n    <property>\n      
<name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    
\n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p 
%c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n   
 <property>\n      <name>prop.4</name>\n      
<value>${oozie.log.dir}/oozie.log</value>\n      
<attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    
</property>\n    \n    <property>\n      <name>prop.empty</name>\n      
<value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n 
   \n  </configuration>')
-  
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n    
  <value></value>\n    </property>\n    \n    <property>\n      
<name>prop.1</name>\n      <value>&#39;.&#39;yyyy-MM-dd-HH</value>\n      
<attr1>x</attr1>\n    </property>\n    \n    <property>\n      
<name>prop.2</name>\n      <value>INFO, openjpa</value>\n    </property>\n    
\n    <property>\n      <name>prop.3</name>\n      <value>%d{ISO8601} %5p 
%c{1}:%L - %m%n</value>\n      <attr2>value3</attr2>\n    </property>\n    \n   
 <property>\n      <name>prop.4</name>\n      
<value>${oozie.log.dir}/oozie.log</value>\n      
<attr_value_empty></attr_value_empty>\n      <attr2>value4</attr2>\n    
</property>\n    \n    <property>\n      <name>prop.empty</name>\n      
<value></value>\n      <attr_value_empty></attr_value_empty>\n    </property>\n 
   \n  </configuration>\n')
+
   @patch("resource_management.core.providers.system._ensure_metadata")
   @patch.object(sudo, "create_file")
   @patch.object(os.path, "exists")
@@ -177,7 +177,7 @@ class TestXmlConfigResource(TestCase):
                 configuration_attributes={}
                 )
 
-    create_file_mock.assert_called_with('/dir/conf/file.xml', '<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n    
  <value></value>\n    </property>\n    \n    <property>\n      
<name>first</name>\n      <value>should be first</value>\n    </property>\n    
\n    <property>\n      <name>second</name>\n      <value>should be 
second</value>\n    </property>\n    \n    <property>\n      
<name>third</name>\n      <value>should be third</value>\n    </property>\n    
\n    <property>\n      <name>z_last</name>\n      <value>should be 
last</value>\n    </property>\n    \n  </configuration>')
+    create_file_mock.assert_called_with('/dir/conf/file.xml', u'<!--Wed 
2014-02-->\n    <configuration>\n    \n    <property>\n      <name></name>\n    
  <value></value>\n    </property>\n    \n    <property>\n      
<name>first</name>\n      <value>should be first</value>\n    </property>\n    
\n    <property>\n      <name>second</name>\n      <value>should be 
second</value>\n    </property>\n    \n    <property>\n      
<name>third</name>\n      <value>should be third</value>\n    </property>\n    
\n    <property>\n      <name>z_last</name>\n      <value>should be 
last</value>\n    </property>\n    \n  </configuration>\n')
 
   @patch("resource_management.libraries.providers.xml_config.File")
   @patch.object(os.path, "exists")

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/core/source.py
----------------------------------------------------------------------
diff --git a/ambari-common/src/main/python/resource_management/core/source.py 
b/ambari-common/src/main/python/resource_management/core/source.py
index 12fd9c8..22e1c6d 100644
--- a/ambari-common/src/main/python/resource_management/core/source.py
+++ b/ambari-common/src/main/python/resource_management/core/source.py
@@ -124,7 +124,7 @@ else:
       self.context.update(variables)
       
       rendered = self.template.render(self.context)
-      return rendered
+      return rendered + "\n" if not rendered.endswith('\n') else rendered
     
   class InlineTemplate(Template):
     def __init__(self, name, extra_imports=[], **kwargs):

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
index 27606e2..9b32b92 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/__init__.py
@@ -33,7 +33,6 @@ from 
resource_management.libraries.functions.get_port_from_url import *
 from resource_management.libraries.functions.hive_check import *
 from resource_management.libraries.functions.version import *
 from resource_management.libraries.functions.format_jvm_option import *
-from resource_management.libraries.functions.get_namenode_states import *
 
 IS_WINDOWS = platform.system() == "Windows"
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py
deleted file mode 100644
index 826c045..0000000
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/get_namenode_states.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-from resource_management.libraries.script import UnknownConfiguration
-
-__all__ = ["get_namenode_states", "get_active_namenode"]
-
-HDFS_NN_STATE_ACTIVE = 'active'
-HDFS_NN_STATE_STANDBY = 'standby'
-
-NAMENODE_HTTP_FRAGMENT = 'dfs.namenode.http-address.{0}.{1}'
-JMX_URI_FRAGMENT = 
"http://{0}/jmx?qry=Hadoop:service=NameNode,name=NameNodeStatus";
-  
-def get_namenode_states(hdfs_site):
-  active_namenodes = []
-  standby_namenodes = []
-  unknown_namenodes = []
-  
-  name_service = hdfs_site['dfs.nameservices']
-  nn_unique_ids_key = 'dfs.ha.namenodes.' + name_service
-
-  # now we have something like 'nn1,nn2,nn3,nn4'
-  # turn it into dfs.namenode.[property].[dfs.nameservices].[nn_unique_id]
-  # ie dfs.namenode.http-address.hacluster.nn1
-  nn_unique_ids = hdfs_site[nn_unique_ids_key].split(',')
-  for nn_unique_id in nn_unique_ids:
-    key = NAMENODE_HTTP_FRAGMENT.format(name_service,nn_unique_id)
-
-    if key in hdfs_site:
-      # use str() to ensure that unicode strings do not have the u' in them
-      value = str(hdfs_site[key])
-
-      try:
-        jmx_uri = JMX_URI_FRAGMENT.format(value)
-        state = get_value_from_jmx(jmx_uri,'State')
-
-        if state == HDFS_NN_STATE_ACTIVE:
-          active_namenodes.append(value)
-        elif state == HDFS_NN_STATE_STANDBY:
-          standby_namenodes.append(value)
-        else:
-          unknown_namenodes.append(value)
-      except:
-        unknown_namenodes.append(value)
-        
-  return active_namenodes, active_namenodes, unknown_namenodes
-
-def get_active_namenode(hdfs_site):
-  active_namenodes = get_namenode_states(hdfs_site)[0]
-  if active_namenodes:
-    return active_namenodes[0]
-  else:
-    return UnknownConfiguration('fs_root')

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/functions/version.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/version.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/version.py
index d9f20da..1de6bf8 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/version.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/version.py
@@ -19,13 +19,7 @@ limitations under the License.
 Ambari Agent
 
 """
-import os
 import re
-from resource_management.core import shell
-from resource_management.core.exceptions import Fail
-from resource_management.libraries.script.config_dictionary import 
UnknownConfiguration
-
-__all__ = ["format_hdp_stack_version", "compare_versions", 
"get_hdp_build_version"]
 
 def _normalize(v, desired_segments=0):
   """
@@ -76,25 +70,4 @@ def compare_versions(version1, version2):
   :return: Returns -1 if version1 is before version2, 0 if they are equal, and 
1 if version1 is after version2
   """
   max_segments = max(len(version1.split(".")), len(version2.split(".")))
-  return cmp(_normalize(version1, desired_segments=max_segments), 
_normalize(version2, desired_segments=max_segments))
-
-
-def get_hdp_build_version(hdp_stack_version):
-  """
-  Used to check hdp_stack_version for stacks >= 2.2
-  :param hdp_stack_version: version for stacks >= 2.2
-  :return: checked hdp_version (or UnknownConfiguration for stacks < 2.2)
-  """
-  HDP_SELECT = "/usr/bin/hdp-select"
-  if hdp_stack_version != "" and compare_versions(hdp_stack_version, 
"2.2.0.0") >= 0 and os.path.exists(HDP_SELECT):
-    code, out = shell.call('{0} status'.format(HDP_SELECT))
-
-    matches = re.findall(r"([\d\.]+\-\d+)", out)
-    hdp_version = matches[0] if matches and len(matches) > 0 else None
-
-    if not hdp_version:
-      raise Fail("Could not parse HDP version from output of hdp-select: %s" % 
str(out))
-
-    return hdp_version
-  else:
-    return UnknownConfiguration('hdp_version')
\ No newline at end of file
+  return cmp(_normalize(version1, desired_segments=max_segments), 
_normalize(version2, desired_segments=max_segments))
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
index 5d26e4a..80e0a14 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/__init__.py
@@ -39,6 +39,7 @@ PROVIDERS = dict(
     
XmlConfig="resource_management.libraries.providers.xml_config.XmlConfigProvider",
     
PropertiesFile="resource_management.libraries.providers.properties_file.PropertiesFileProvider",
     
MonitorWebserver="resource_management.libraries.providers.monitor_webserver.MonitorWebserverProvider",
-    
HdfsResource="resource_management.libraries.providers.hdfs_resource.HdfsResourceProvider",
+    
HdfsDirectory="resource_management.libraries.providers.hdfs_directory.HdfsDirectoryProvider",
+    
CopyFromLocal="resource_management.libraries.providers.copy_from_local.CopyFromLocalProvider"
   ),
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
new file mode 100644
index 0000000..dbd54a7
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/copy_from_local.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+import os
+from resource_management import *
+
+class CopyFromLocalProvider(Provider):
+  def action_run(self):
+
+    path = self.resource.path
+    dest_dir = self.resource.dest_dir
+    dest_file = self.resource.dest_file
+    kinnit_if_needed = self.resource.kinnit_if_needed
+    owner = self.resource.owner
+    group = self.resource.group
+    mode = self.resource.mode
+    hdfs_usr=self.resource.hdfs_user
+    hadoop_conf_path = self.resource.hadoop_conf_dir
+    bin_dir = self.resource.hadoop_bin_dir
+
+
+    if dest_file:
+      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}/{dest_file}")
+      dest_path = dest_dir + dest_file if dest_dir.endswith(os.sep) else 
dest_dir + os.sep + dest_file
+    else:
+      dest_file_name = os.path.split(path)[1]
+      copy_cmd = format("fs -copyFromLocal {path} {dest_dir}")
+      dest_path = dest_dir + os.sep + dest_file_name
+    # Need to run unless as resource user
+    
+    if kinnit_if_needed:
+      Execute(kinnit_if_needed, 
+              user=owner,
+      )
+    
+    unless_cmd = as_user(format("PATH=$PATH:{bin_dir} hadoop fs -ls 
{dest_path}"), owner)
+
+    ExecuteHadoop(copy_cmd,
+                  not_if=unless_cmd,
+                  user=owner,
+                  bin_dir=bin_dir,
+                  conf_dir=hadoop_conf_path
+                  )
+
+    if not owner:
+      chown = None
+    else:
+      if not group:
+        chown = owner
+      else:
+        chown = format('{owner}:{group}')
+
+    if chown:
+      chown_cmd = format("fs -chown {chown} {dest_path}")
+
+      ExecuteHadoop(chown_cmd,
+                    user=hdfs_usr,
+                    bin_dir=bin_dir,
+                    conf_dir=hadoop_conf_path)
+    pass
+
+    if mode:
+      dir_mode = oct(mode)[1:]
+      chmod_cmd = format('fs -chmod {dir_mode} {dest_path}')
+
+      ExecuteHadoop(chmod_cmd,
+                    user=hdfs_usr,
+                    bin_dir=bin_dir,
+                    conf_dir=hadoop_conf_path)
+    pass

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
new file mode 100644
index 0000000..bfdb97d
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+import os
+
+from resource_management import *
+directories_list = [] #direcotries list for mkdir
+chmod_map = {} #(mode,recursive):dir_list map
+chown_map = {} #(owner,group,recursive):dir_list map
+class HdfsDirectoryProvider(Provider):
+  def action_create_delayed(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    if not self.resource.dir_name:
+      return
+
+    dir_name = self.resource.dir_name
+    dir_owner = self.resource.owner
+    dir_group = self.resource.group
+    dir_mode = oct(self.resource.mode)[1:] if self.resource.mode else None
+    directories_list.append(self.resource.dir_name)
+
+    recursive_chown_str = "-R" if self.resource.recursive_chown else ""
+    recursive_chmod_str = "-R" if self.resource.recursive_chmod else ""
+    # grouping directories by mode/owner/group to modify them in one 'chXXX' 
call
+    if dir_mode:
+      chmod_key = (dir_mode,recursive_chmod_str)
+      if chmod_map.has_key(chmod_key):
+        chmod_map[chmod_key].append(dir_name)
+      else:
+        chmod_map[chmod_key] = [dir_name]
+
+    if dir_owner:
+      owner_key = (dir_owner,dir_group,recursive_chown_str)
+      if chown_map.has_key(owner_key):
+        chown_map[owner_key].append(dir_name)
+      else:
+        chown_map[owner_key] = [dir_name]
+
+  def action_create(self):
+    global delayed_directories
+    global chmod_map
+    global chown_map
+
+    self.action_create_delayed()
+
+    hdp_conf_dir = self.resource.conf_dir
+    hdp_hdfs_user = self.resource.hdfs_user
+    secured = self.resource.security_enabled
+    keytab_file = self.resource.keytab
+    kinit_path = self.resource.kinit_path_local
+    bin_dir = self.resource.bin_dir
+
+    chmod_commands = []
+    chown_commands = []
+
+    for chmod_key, chmod_dirs in chmod_map.items():
+      mode = chmod_key[0]
+      recursive = chmod_key[1]
+      chmod_dirs_str = ' '.join(chmod_dirs)
+      chmod_commands.append(format("hadoop --config {hdp_conf_dir} fs -chmod 
{recursive} {mode} {chmod_dirs_str}"))
+
+    for chown_key, chown_dirs in chown_map.items():
+      owner = chown_key[0]
+      group = chown_key[1]
+      recursive = chown_key[2]
+      chown_dirs_str = ' '.join(chown_dirs)
+      if owner:
+        chown = owner
+        if group:
+          chown = format("{owner}:{group}")
+        chown_commands.append(format("hadoop --config {hdp_conf_dir} fs -chown 
{recursive} {chown} {chown_dirs_str}"))
+
+    if secured:
+        Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
+                user=hdp_hdfs_user)
+    #create all directories in one 'mkdir' call
+    dir_list_str = ' '.join(directories_list)
+    #for hadoop 2 we need to specify -p to create directories recursively
+    parent_flag = '`rpm -q hadoop | grep -q "hadoop-1" || echo "-p"`'
+
+    Execute(format('hadoop --config {hdp_conf_dir} fs -mkdir {parent_flag} 
{dir_list_str} && {chmod_cmd} && {chown_cmd}',
+                   chmod_cmd=' && '.join(chmod_commands),
+                   chown_cmd=' && '.join(chown_commands)),
+            user=hdp_hdfs_user,
+            path=bin_dir,
+            not_if=as_user(format("hadoop --config {hdp_conf_dir} fs -ls 
{dir_list_str}"), hdp_hdfs_user)
+    )
+
+    directories_list[:] = []
+    chmod_map.clear()
+    chown_map.clear()

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
deleted file mode 100644
index c031cbc..0000000
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_resource.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# !/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-import json
-from resource_management import *
-
-JSON_PATH = '/var/lib/ambari-agent/data/hdfs_resources.json'
-JAR_PATH = '/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
-
-RESOURCE_TO_JSON_FIELDS = {
-  'target': 'target',
-  'type': 'type',
-  'action': 'action',
-  'source': 'source',
-  'owner': 'owner',
-  'group': 'group',
-  'mode': 'mode',
-  'recursive_chown': 'recursiveChown',
-  'recursive_chmod': 'recursiveChmod'
-}
-
-
-class HdfsResourceProvider(Provider):
-  def action_delayed(self, action_name):
-    resource = {}
-    env = Environment.get_instance()
-    if not 'hdfs_files' in env.config:
-      env.config['hdfs_files'] = []
-
-    # Check required parameters
-    if not self.resource.type or not self.resource.action:
-      raise Fail("Resource parameter type or action is not set.")
-
-    # Put values in dictionary-resource
-    for field_name, json_field_name in RESOURCE_TO_JSON_FIELDS.iteritems():
-      if field_name == 'action':
-        resource[json_field_name] = action_name
-      elif field_name == 'mode' and self.resource.mode:
-        resource[json_field_name] = oct(self.resource.mode)[1:]
-      elif getattr(self.resource, field_name):
-        resource[json_field_name] = getattr(self.resource, field_name)
-
-    # Add resource to create
-    env.config['hdfs_files'].append(resource)
-
-  def action_create_delayed(self):
-    self.action_delayed("create")
-
-  def action_delete_delayed(self):
-    self.action_delayed("delete")
-
-  def action_execute(self):
-    env = Environment.get_instance()
-
-    # Check required parameters
-    if not self.resource.hadoop_fs or not self.resource.user:
-      raise Fail("Resource parameter hadoop_fs or user is not set.")
-
-    if not 'hdfs_files' in env.config or not env.config['hdfs_files']:
-      raise Fail("No resources to create. Please perform create_delayed"
-                 " or delete_delayed before doing execute action.")
-
-    hadoop_fs = self.resource.hadoop_fs
-    hadoop_bin_dir = self.resource.hadoop_bin_dir
-    hadoop_conf_dir = self.resource.hadoop_conf_dir
-    user = self.resource.user
-    security_enabled = self.resource.security_enabled
-    keytab_file = self.resource.keytab
-    kinit_path = self.resource.kinit_path_local
-    logoutput = self.resource.logoutput
-    jar_path=JAR_PATH
-    json_path=JSON_PATH
-
-    if security_enabled:
-      Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
-              user=user
-      )
-
-    # Write json file to disk
-    with open(JSON_PATH, 'w') as f:
-      f.write(json.dumps(env.config['hdfs_files']))
-
-    # Execute jar to create/delete resources in hadoop
-    Execute(format("hadoop --config {hadoop_conf_dir} jar {jar_path} 
{json_path} {hadoop_fs}"),
-            user=user,
-            path=[hadoop_bin_dir],
-            logoutput=logoutput,
-    )
-
-    # Clean
-    env.config['hdfs_files'] = []

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
index 909d7b8..24b497c 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/__init__.py
@@ -26,4 +26,5 @@ from resource_management.libraries.resources.xml_config 
import *
 from resource_management.libraries.resources.properties_file import *
 from resource_management.libraries.resources.repository import *
 from resource_management.libraries.resources.monitor_webserver import *
-from resource_management.libraries.resources.hdfs_resource import *
+from resource_management.libraries.resources.hdfs_directory import *
+from resource_management.libraries.resources.copy_from_local import *
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
new file mode 100644
index 0000000..54d003e
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/copy_from_local.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["CopyFromLocal"]
+from resource_management.core.base import Resource, ForcedListArgument, 
ResourceArgument, BooleanArgument
+
+class CopyFromLocal(Resource):
+  action = ForcedListArgument(default="run")
+
+  path = ResourceArgument(default=lambda obj: obj.name)
+  dest_dir = ResourceArgument(required=True)
+  dest_file = ResourceArgument()
+  owner = ResourceArgument(required=True)
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  kinnit_if_needed = ResourceArgument(default='')
+  hadoop_conf_dir = ResourceArgument(default='/etc/hadoop/conf')
+  hdfs_user = ResourceArgument(default='hdfs')
+  hadoop_bin_dir = ResourceArgument(default='/usr/bin')
+
+  actions = Resource.actions + ["run"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
new file mode 100644
index 0000000..7888cd8
--- /dev/null
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_directory.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+"""
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+Ambari Agent
+
+"""
+
+_all__ = ["HdfsDirectory"]
+from resource_management.core.base import Resource, ForcedListArgument, 
ResourceArgument, BooleanArgument
+
+class HdfsDirectory(Resource):
+  action = ForcedListArgument()
+
+  dir_name = ResourceArgument(default=lambda obj: obj.name)
+  owner = ResourceArgument()
+  group = ResourceArgument()
+  mode = ResourceArgument()
+  recursive_chown = BooleanArgument(default=False)
+  recursive_chmod = BooleanArgument(default=False)
+
+  conf_dir = ResourceArgument()
+  security_enabled = BooleanArgument(default=False)
+  keytab = ResourceArgument()
+  kinit_path_local = ResourceArgument()
+  hdfs_user = ResourceArgument()
+  bin_dir = ResourceArgument(default="")
+
+  #action 'create' immediately creates all pending directory in efficient 
manner
+  #action 'create_delayed' add directory to list of pending directories
+  actions = Resource.actions + ["create","create_delayed"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
deleted file mode 100644
index 21d63ae..0000000
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/hdfs_resource.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# !/usr/bin/env python
-"""
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-
-Ambari Agent
-
-"""
-
-_all__ = ["HdfsResource"]
-from resource_management.core.base import Resource, ForcedListArgument, 
ResourceArgument, BooleanArgument
-
-"""
-Calling a lot of hadoop commands takes too much time.
-The cause is that for every call new connection initialized, with datanodes, 
namenode.
-
-While this resource can gather the dicteroies/files to 
create/delete/copyFromLocal.
-And after just with one call create all that.
-
-action = create_delayed / delete_delayed. Are for gathering information  about 
what you want
-to create.
-
-After everything is gathered you should execute action = execute. To perform 
delayed actions
-
-The resource is a replacement for the following operations:
-  1) hadoop fs -rmr
-  2) hadoop fs -copyFromLocal
-  3) hadoop fs -put
-  4) hadoop fs -mkdir
-  5) hadoop fs -touchz
-  6) hadoop fs -chmod
-  7) hadoop fs -chown
-"""
-
-
-class HdfsResource(Resource):
-  # Required: {target, type, action}
-  # path to hadoop file/directory
-  target = ResourceArgument(default=lambda obj: obj.name)
-  # "directory" or "file"
-  type = ResourceArgument()
-  # "create_delayed" or "delete_delayed" or "execute"
-  action = ForcedListArgument()
-  # Required for action="execute", path to fs like 
"hdfs://c6401.ambari.apache.org:8020"
-  hadoop_fs = ResourceArgument()
-  # if present - copies file/directory from local path {source} to hadoop path 
- {target}
-  source = ResourceArgument()
-  owner = ResourceArgument()
-  group = ResourceArgument()
-  mode = ResourceArgument()
-  logoutput = ResourceArgument()
-  recursive_chown = BooleanArgument(default=False)
-  recursive_chmod = BooleanArgument(default=False)
-
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  user = ResourceArgument()
-  hadoop_bin_dir = ResourceArgument()
-  hadoop_conf_dir = ResourceArgument()
-
-  #action 'execute' immediately creates all pending files/directories in 
efficient manner
-  #action 'create_delayed/delete_delayed' adds file/directory to list of 
pending directories
-  actions = Resource.actions + ["create_delayed", "delete_delayed", "execute"]

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
index ce4b015..a903e5c 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/falcon.py
@@ -84,19 +84,17 @@ def falcon(type, action = None):
   if type == 'server':
     if action == 'config':
       if params.store_uri[0:4] == "hdfs":
-        params.HdfsResource(params.store_uri,
-                             type="file",
+        params.HdfsDirectory(params.store_uri,
                              action="create_delayed",
                              owner=params.falcon_user,
                              mode=0755
         )
-      params.HdfsResource(params.flacon_apps_dir,
-                           type="directory",
+      params.HdfsDirectory(params.flacon_apps_dir,
                            action="create_delayed",
                            owner=params.falcon_user,
                            mode=0777#TODO change to proper mode
       )
-      params.HdfsResource(None, action="execute")
+      params.HdfsDirectory(None, action="create")
       Directory(params.falcon_local_dir,
                 owner=params.falcon_user,
                 recursive=True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
index ac422c4..82cd470 100644
--- 
a/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/FALCON/0.5.0.2.1/package/scripts/params.py
@@ -90,17 +90,15 @@ hdfs_user_keytab = 
config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
-fs_root = config['configurations']['core-site']['fs.defaultFS']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
index 42de66d..ea99288 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/hbase.py
@@ -135,19 +135,17 @@ def hbase(name=None # 'master' or 'regionserver' or 
'client'
       group=params.user_group,
       owner=params.hbase_user
     )
-  if name == "master":
-    params.HdfsResource(params.hbase_hdfs_root_dir,
-                         type="directory",
+  if name in ["master","regionserver"]:
+    params.HdfsDirectory(params.hbase_hdfs_root_dir,
                          action="create_delayed",
                          owner=params.hbase_user
     )
-    params.HdfsResource(params.hbase_staging_dir,
-                         type="directory",
+    params.HdfsDirectory(params.hbase_staging_dir,
                          action="create_delayed",
                          owner=params.hbase_user,
                          mode=0711
     )
-    params.HdfsResource(None, action="execute")
+    params.HdfsDirectory(None, action="create")
 
 def hbase_TemplateConfig(name, 
                          tag=None

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
index 23c2f6c..1f970fc 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/params.py
@@ -137,19 +137,17 @@ hdfs_user_keytab = 
config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
-fs_root = config['configurations']['core-site']['fs.defaultFS']
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 
 if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >= 0:

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
index 96a93ad..15a306b 100644
--- 
a/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HBASE/0.96.0.2.0/package/scripts/service_check.py
@@ -26,8 +26,9 @@ class HbaseServiceCheck(Script):
   def service_check(self, env):
     import params
     env.set_params(params)
-
+    
     output_file = "/apps/hbase/data/ambarismoketest"
+    test_cmd = format("fs -test -e {output_file}")
     smokeuser_kinit_cmd = format("{kinit_path_local} -kt {smoke_user_keytab} 
{smoke_test_user};") if params.security_enabled else ""
     hbase_servicecheck_file = format("{exec_tmp_dir}/hbase-smoke.sh")
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
deleted file mode 100644
index 51b022a..0000000
Binary files 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/files/fast-hdfs-resource.jar
 and /dev/null differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index f4017c1..25c1067 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -78,11 +78,6 @@ def hdfs(name=None):
        owner=tc_owner,
        content=Template("slaves.j2")
   )
-
-  # for source-code of jar goto contrib/fast-hdfs-resource
-  File(format("{ambari_libs_dir}/fast-hdfs-resource.jar"),
-       content=StaticFile("fast-hdfs-resource.jar")
-  )
   
   if params.lzo_enabled:
     Package(params.lzo_packages_for_current_host)

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index 86bcb4a..2029aac 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -135,21 +135,18 @@ def create_name_dirs(directories):
 def create_hdfs_directories(check):
   import params
 
-  params.HdfsResource("/tmp",
-                       type="directory",
+  params.HdfsDirectory("/tmp",
                        action="create_delayed",
                        owner=params.hdfs_user,
                        mode=0777
   )
-  params.HdfsResource(params.smoke_hdfs_user_dir,
-                       type="directory",
+  params.HdfsDirectory(params.smoke_hdfs_user_dir,
                        action="create_delayed",
                        owner=params.smoke_user,
                        mode=params.smoke_hdfs_user_mode
   )
-  params.HdfsResource(None, 
-                      action="execute",
-                      only_if=check #skip creation when HA not active
+  params.HdfsDirectory(None, action="create",
+                       only_if=check #skip creation when HA not active
   )
 
 def format_namenode(force=None):

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
index f6805a2..c650c4d 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_snamenode.py
@@ -19,6 +19,8 @@ limitations under the License.
 
 from resource_management import *
 from utils import service
+from utils import hdfs_directory
+
 
 def snamenode(action=None, format=False):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
index 3a9d074..1ac4446 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/params.py
@@ -81,8 +81,6 @@ else:
 hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_conf_empty_dir = "/etc/hadoop/conf.empty"
 limits_conf_dir = "/etc/security/limits.d"
-# Path to which fast-hdfs-resource.jar will be installed
-ambari_libs_dir = "/var/lib/ambari-agent/lib"
 
 execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 ulimit_cmd = "ulimit -c unlimited && "
@@ -226,22 +224,19 @@ if security_enabled:
   nn_kinit_cmd = format("{kinit_path_local} -kt {_nn_keytab} 
{_nn_principal_name};")  
 else:
   dn_kinit_cmd = ""
-  nn_kinit_cmd = ""
-
-fs_root = config['configurations']['core-site']['fs.defaultFS']
+  nn_kinit_cmd = ""  
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user=hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir=hadoop_conf_dir,
+  hdfs_user=hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )
 
 io_compression_codecs = 
config['configurations']['core-site']['io.compression.codecs']

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
index c009ffb..3dc3a1b 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/service_check.py
@@ -31,6 +31,15 @@ class HdfsServiceCheck(Script):
 
     safemode_command = "dfsadmin -safemode get | grep OFF"
 
+    create_dir_cmd = format("fs -mkdir {dir}")
+    chmod_command = format("fs -chmod 777 {dir}")
+    test_dir_exists = as_user(format("{hadoop_bin_dir}/hadoop --config 
{hadoop_conf_dir} fs -test -e {dir}"), params.smoke_user)
+    cleanup_cmd = format("fs -rm {tmp_file}")
+    #cleanup put below to handle retries; if retrying there wil be a stale file
+    #that needs cleanup; exit code is fn of second command
+    create_file_cmd = format(
+      "{cleanup_cmd}; hadoop --config {hadoop_conf_dir} fs -put /etc/passwd 
{tmp_file}")
+    test_cmd = format("fs -test -e {tmp_file}")
     if params.security_enabled:
       Execute(format("{kinit_path_local} -kt {smoke_user_keytab} 
{smoke_user}"),
         user=params.smoke_user
@@ -43,24 +52,39 @@ class HdfsServiceCheck(Script):
                   tries=20,
                   bin_dir=params.hadoop_bin_dir
     )
-    params.HdfsResource(dir,
-                        type="directory",
-                        action="create_delayed",
-                        mode=0777
+    ExecuteHadoop(create_dir_cmd,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  not_if=test_dir_exists,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
-    #cleanup put below to handle retries; if retrying there wil be a stale file
-    #that needs cleanup; exit code is fn of second command
-    params.HdfsResource(tmp_file,
-                        type="directory",
-                        action="delete_delayed",
+    ExecuteHadoop(chmod_command,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
-    params.HdfsResource(tmp_file,
-                        type="directory",
-                        source="/etc/passwd",
-                        action="create_delayed"
+    ExecuteHadoop(create_file_cmd,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
+    )
+    ExecuteHadoop(test_cmd,
+                  user=params.smoke_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  try_sleep=3,
+                  tries=5,
+                  bin_dir=params.hadoop_bin_dir
     )
-    params.HdfsResource(None, action="execute")
-
     if params.has_journalnode_hosts:
       journalnode_port = params.journalnode_port
       checkWebUIFileName = "checkWebUI.py"

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
index e4afd43..22202ee 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/files/templetonSmoke.sh
@@ -20,23 +20,13 @@
 #
 #
 
-function getValueFromField {
-  xmllint $1 | grep "<name>$2</name>" -C 2 | grep '<value>' | cut -d ">" -f2 | 
cut -d "<" -f1
-  return $?
-}
-
 export ttonhost=$1
 export smoke_test_user=$2
 export smoke_user_keytab=$3
 export security_enabled=$4
 export kinit_path_local=$5
-export hadoop_conf_dir=$6
 export ttonurl="http://${ttonhost}:50111/templeton/v1";
 
-export NAMENODE=`getValueFromField ${hadoop_conf_dir}/core-site.xml 
fs.defaultFS`
-export JSON_PATH='/var/lib/ambari-agent/data/hdfs_resources.json'
-export JAR_PATH='/var/lib/ambari-agent/lib/fast-hdfs-resource.jar'
-
 if [[ $security_enabled == "true" ]]; then
   kinitcmd="${kinit_path_local}  -kt ${smoke_user_keytab} ${smoke_test_user}; "
 else
@@ -84,25 +74,11 @@ echo "A = load '$ttonTestInput' using PigStorage(':');"  > 
/tmp/$ttonTestScript
 echo "B = foreach A generate \$0 as id; " >> /tmp/$ttonTestScript
 echo "store B into '$ttonTestOutput';" >> /tmp/$ttonTestScript
 
-cat >$JSON_PATH<<EOF
-[{
-       "target":"/tmp/${ttonTestScript}",
-       "type":"directory",
-       "action":"create",
-       "source":"/tmp/${ttonTestScript}"
-},
-{
-       "target":"${ttonTestInput}",
-       "type":"directory",
-       "action":"create",
-       "source":"/etc/passwd"
-}]
-EOF
-
 #copy pig script to hdfs
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal 
/tmp/$ttonTestScript /tmp/$ttonTestScript"
+
 #copy input file to hdfs
-echo "About to run: hadoop --config ${hadoop_conf_dir} jar ${JAR_PATH} 
${JSON_PATH} ${NAMENODE}"
-sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop --config 
${hadoop_conf_dir} jar ${JAR_PATH} ${JSON_PATH} ${NAMENODE}"
+sudo su ${smoke_test_user} -s /bin/bash - -c "hadoop dfs -copyFromLocal 
/etc/passwd $ttonTestInput"
 
 #create, copy post args file
 echo -n "user.name=${smoke_test_user}&file=/tmp/$ttonTestScript" > 
/tmp/pig_post.txt

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index 6dd80a4..b9677e7 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -19,8 +19,6 @@ limitations under the License.
 """
 
 from resource_management import *
-from install_jars import install_tez_jars
-import glob
 import sys
 import os
 
@@ -29,96 +27,18 @@ def hive(name=None):
   import params
 
   if name == 'hiveserver2':
-    if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, '2.2') >=0:
-      
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.mapreduce_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      
params.HdfsResource(InlineTemplate(params.tez_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.tez_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-    else:
-      install_tez_jars()
-        
-    if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
-      params.HdfsResource(params.webhcat_apps_dir,
-                           type="directory",
-                           action="create_delayed",
-                           owner=params.webhcat_user,
-                           mode=0755
-      )
-  
-    if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
-      params.HdfsResource(params.hcat_hdfs_user_dir,
-                           type="directory",
-                           action="create_delayed",
-                           owner=params.hcat_user,
-                           mode=params.hcat_hdfs_user_mode
-      )
-    params.HdfsResource(params.webhcat_hdfs_user_dir,
-                         type="directory",
-                         action="create_delayed",
-                         owner=params.webhcat_user,
-                         mode=params.webhcat_hdfs_user_mode
-    )
-  
-    for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      
params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content()
 + '/' + src_filename,
-                          type="file",
-                          action="create_delayed",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    if (os.path.isfile(params.pig_tar_source)):
-      
params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
-                          type="file",
-                          action="create_delayed",
-                          source=params.pig_tar_source,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-  
-    
params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
-                        type="file",
-                        action="create_delayed",
-                        source=params.hive_tar_source,
-                        group=params.user_group,
-                        mode=params.tarballs_mode
-    )
 
-    for src_filepath in glob.glob(params.sqoop_tar_source):
-      src_filename = os.path.basename(src_filepath)
-      
params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content()
 + '/' + src_filename,
-                          type="file",
-                          action="create_delayed",
-                          source=src_filepath,
-                          group=params.user_group,
-                          mode=params.tarballs_mode
-      )
-      
-    params.HdfsResource(params.hive_apps_whs_dir,
-                         type="directory",
+    params.HdfsDirectory(params.hive_apps_whs_dir,
                          action="create_delayed",
                          owner=params.hive_user,
                          mode=0777
     )
-    params.HdfsResource(params.hive_hdfs_user_dir,
-                         type="directory",
+    params.HdfsDirectory(params.hive_hdfs_user_dir,
                          action="create_delayed",
                          owner=params.hive_user,
                          mode=params.hive_hdfs_user_mode
     )
-    params.HdfsResource(None, action="execute")
+    params.HdfsDirectory(None, action="create")
 
   Directory(params.hive_conf_dir_prefix,
             mode=0755

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
index 735d59d..12efae8 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive_server.py
@@ -22,8 +22,8 @@ import hive_server_upgrade
 from resource_management import *
 from hive import hive
 from hive_service import hive_service
+from resource_management.libraries.functions.dynamic_variable_interpretation 
import copy_tarballs_to_hdfs
 from install_jars import install_tez_jars
-from resource_management.libraries.functions.version import compare_versions
 
 class HiveServer(Script):
 
@@ -35,8 +35,7 @@ class HiveServer(Script):
   def configure(self, env):
     import params
     env.set_params(params)
-    
-    if not (params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, '2.2') >=0):
+    if not (params.hdp_stack_version != "" and 
compare_versions(params.hdp_stack_version, '2.2') >=0):
       install_tez_jars()
 
     hive(name='hiveserver2')
@@ -47,6 +46,10 @@ class HiveServer(Script):
     env.set_params(params)
     self.configure(env) # FOR SECURITY
 
+    # This function is needed in HDP 2.2, but it is safe to call in earlier 
versions.
+    copy_tarballs_to_hdfs('mapreduce', params.tez_user, params.hdfs_user, 
params.user_group)
+    copy_tarballs_to_hdfs('tez', params.tez_user, params.hdfs_user, 
params.user_group)
+
     hive_service( 'hiveserver2', action = 'start',
       rolling_restart=rolling_restart )
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
index 16c63a4..a18ca72 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/install_jars.py
@@ -20,7 +20,6 @@ limitations under the License.
 
 from resource_management import *
 import os
-import glob
 
 def install_tez_jars():
   import params
@@ -30,12 +29,25 @@ def install_tez_jars():
   # If tez libraries are to be stored in hdfs
   if destination_hdfs_dirs:
     for hdfs_dir in destination_hdfs_dirs:
-      params.HdfsResource(hdfs_dir,
-                           type="directory",
+      params.HdfsDirectory(hdfs_dir,
                            action="create_delayed",
                            owner=params.tez_user,
                            mode=0755
       )
+    pass
+    params.HdfsDirectory(None, action="create")
+
+    if params.security_enabled:
+      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name};")
+    else:
+      kinit_if_needed = ""
+
+    if kinit_if_needed:
+      Execute(kinit_if_needed,
+              user=params.tez_user,
+              path='/bin'
+      )
+    pass
 
     app_dir_path = None
     lib_dir_path = None
@@ -49,25 +61,30 @@ def install_tez_jars():
         pass
       pass
     pass
-  
-    tez_jars = {}
+
     if app_dir_path:
-      tez_jars[params.tez_local_api_jars] = app_dir_path
-    if lib_dir_path:
-      tez_jars[params.tez_local_lib_jars] = lib_dir_path
-
-    for src_file_regex, dest_dir in tez_jars.iteritems():
-      for src_filepath in glob.glob(src_file_regex):
-        src_filename = os.path.basename(src_filepath)
-        params.HdfsResource(format("{dest_dir}/{src_filename}"),
-                            type="file",
-                            action="create_delayed",
-                            source=src_filepath,
-                            mode=0755,
-                            owner=params.tez_user
+      for scr_file, dest_file in params.app_dir_files.iteritems():
+        CopyFromLocal(scr_file,
+                      mode=0755,
+                      owner=params.tez_user,
+                      dest_dir=app_dir_path,
+                      dest_file=dest_file,
+                      kinnit_if_needed=kinit_if_needed,
+                      hdfs_user=params.hdfs_user,
+                      hadoop_bin_dir=params.hadoop_bin_dir,
+                      hadoop_conf_dir=params.hadoop_conf_dir
         )
 
-    params.HdfsResource(None, action="execute")
+    if lib_dir_path:
+      CopyFromLocal(params.tez_local_lib_jars,
+                    mode=0755,
+                    owner=params.tez_user,
+                    dest_dir=lib_dir_path,
+                    kinnit_if_needed=kinit_if_needed,
+                    hdfs_user=params.hdfs_user,
+                    hadoop_bin_dir=params.hadoop_bin_dir,
+                    hadoop_conf_dir=params.hadoop_conf_dir
+      )
     pass
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/8cf0e915/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
index 0caae1b..41785c6 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/params.py
@@ -29,19 +29,15 @@ tmp_dir = Script.get_tmp_dir()
 
 # This is expected to be of the form #.#.#.#
 stack_version_unformatted = str(config['hostLevelParams']['stack_version'])
-hdp_stack_version_major = format_hdp_stack_version(stack_version_unformatted)
-stack_is_hdp21 = hdp_stack_version_major != "" and 
compare_versions(hdp_stack_version_major, '2.1') >= 0 and 
compare_versions(hdp_stack_version_major, '2.2') < 0
+hdp_stack_version = format_hdp_stack_version(stack_version_unformatted)
+stack_is_hdp21 = hdp_stack_version != "" and 
compare_versions(hdp_stack_version, '2.1') >= 0 and 
compare_versions(hdp_stack_version, '2.2') < 0
 
-# this is not avaliable on INSTALL action because hdp-select is not available
-hdp_stack_version = version.get_hdp_build_version(hdp_stack_version_major)
 # New Cluster Stack Version that is defined during the RESTART of a Rolling 
Upgrade
 version = default("/commandParams/version", None)
 
-webhcat_apps_dir = "/apps/webhcat"
-
 # Hadoop params
 # TODO, this logic should initialize these parameters in a file inside the HDP 
2.2 stack.
-if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, 
'2.2') >=0:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, '2.2') >=0:
   # start out with client libraries
   hadoop_bin_dir = "/usr/hdp/current/hadoop-client/bin"
   hadoop_home = '/usr/hdp/current/hadoop-client'
@@ -64,25 +60,6 @@ if hdp_stack_version_major != "" and 
compare_versions(hdp_stack_version_major, '
   webhcat_bin_dir = '/usr/hdp/current/hive-webhcat/sbin'
 
   hive_specific_configs_supported = True
-
-  # --- Tarballs ---
-
-  hive_tar_source = config['configurations']['cluster-env']['hive_tar_source']
-  pig_tar_source = config['configurations']['cluster-env']['pig_tar_source']
-  hadoop_streaming_tar_source = 
config['configurations']['cluster-env']['hadoop-streaming_tar_source']
-  sqoop_tar_source = 
config['configurations']['cluster-env']['sqoop_tar_source']
-  mapreduce_tar_source = 
config['configurations']['cluster-env']['mapreduce_tar_source']
-  tez_tar_source = config['configurations']['cluster-env']['tez_tar_source']
-
-  hive_tar_destination = 
config['configurations']['cluster-env']['hive_tar_destination_folder']  + "/" + 
os.path.basename(hive_tar_source)
-  pig_tar_destination = 
config['configurations']['cluster-env']['pig_tar_destination_folder'] + "/" + 
os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = 
config['configurations']['cluster-env']['hadoop-streaming_tar_destination_folder']
-  sqoop_tar_destination_dir = 
config['configurations']['cluster-env']['sqoop_tar_destination_folder'] + "/" + 
os.path.basename(sqoop_tar_source)
-  mapreduce_tar_destination = 
config['configurations']['cluster-env']['mapreduce_tar_destination_folder'] + 
"/" + os.path.basename(mapreduce_tar_source)
-  tez_tar_destination = 
config['configurations']['cluster-env']['tez_tar_destination_folder'] + "/" + 
os.path.basename(tez_tar_source)
-
-  tarballs_mode = 0444
-
 else:
   hadoop_bin_dir = "/usr/bin"
   hadoop_home = '/usr'
@@ -93,7 +70,7 @@ else:
   hive_tar_file = '/usr/share/HDP-webhcat/hive.tar.gz'
   sqoop_tar_file = '/usr/share/HDP-webhcat/sqoop*.tar.gz'
 
-  if hdp_stack_version_major != "" and 
compare_versions(hdp_stack_version_major, "2.1.0.0") < 0:
+  if hdp_stack_version != "" and compare_versions(hdp_stack_version, 
"2.1.0.0") < 0:
     hcat_lib = '/usr/lib/hcatalog/share/hcatalog'
     webhcat_bin_dir = '/usr/lib/hcatalog/sbin'
   # for newer versions
@@ -103,27 +80,13 @@ else:
     
   hive_specific_configs_supported = False
 
-  # --- Tarballs ---
-  hive_tar_source = hive_tar_file
-  pig_tar_source = pig_tar_file
-  hadoop_streaming_tar_source = hadoop_streeming_jars
-  sqoop_tar_source = sqoop_tar_file
-
-  hive_tar_destination = webhcat_apps_dir + "/" + 
os.path.basename(hive_tar_source)
-  pig_tar_destination = webhcat_apps_dir + "/" + 
os.path.basename(pig_tar_source)
-  hadoop_streaming_tar_destination_dir = webhcat_apps_dir
-  sqoop_tar_destination_dir = webhcat_apps_dir
-
-  tarballs_mode = 0755
-
-
 hadoop_conf_dir = "/etc/hadoop/conf"
 hive_conf_dir_prefix = "/etc/hive"
 hive_conf_dir = format("{hive_conf_dir_prefix}/conf")
 hive_client_conf_dir = format("{hive_conf_dir_prefix}/conf")
 hive_server_conf_dir = format("{hive_conf_dir_prefix}/conf.server")
 
-if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, 
"2.1.0.0") < 0:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") 
< 0:
   hcat_conf_dir = '/etc/hcatalog/conf'
   config_dir = '/etc/hcatalog/conf'
 # for newer versions
@@ -233,7 +196,7 @@ mysql_adduser_path = format("{tmp_dir}/addMysqlUser.sh")
 mysql_deluser_path = format("{tmp_dir}/removeMysqlUser.sh")
 
 ######## Metastore Schema
-if hdp_stack_version_major != "" and compare_versions(hdp_stack_version_major, 
"2.1.0.0") < 0:
+if hdp_stack_version != "" and compare_versions(hdp_stack_version, "2.1.0.0") 
< 0:
   init_metastore_schema = False
 else:
   init_metastore_schema = True
@@ -278,6 +241,7 @@ tez_user = config['configurations']['tez-env']['tez_user']
 # Tez jars
 tez_local_api_jars = '/usr/lib/tez/tez*.jar'
 tez_local_lib_jars = '/usr/lib/tez/lib/*.jar'
+app_dir_files = {tez_local_api_jars:None}
 
 # Tez libraries
 tez_lib_uris = default("/configurations/tez-site/tez.lib.uris", None)
@@ -321,6 +285,7 @@ templeton_jar = 
config['configurations']['webhcat-site']['templeton.jar']
 
 webhcat_server_host = config['clusterHostInfo']['webhcat_server_host']
 
+webhcat_apps_dir = "/apps/webhcat"
 
 hcat_hdfs_user_dir = format("/user/{hcat_user}")
 hcat_hdfs_user_mode = 0755
@@ -330,15 +295,14 @@ webhcat_hdfs_user_mode = 0755
 security_param = "true" if security_enabled else "false"
 
 import functools
-#create partial functions with common arguments for every HdfsResource call
-#to create hdfs directory we need to call params.HdfsResource in code
-HdfsResource = functools.partial(
-  HdfsResource,
-  user = hdfs_principal_name if security_enabled else hdfs_user,
+#create partial functions with common arguments for every HdfsDirectory call
+#to create hdfs directory we need to call params.HdfsDirectory in code
+HdfsDirectory = functools.partial(
+  HdfsDirectory,
+  conf_dir = hadoop_conf_dir,
+  hdfs_user = hdfs_principal_name if security_enabled else hdfs_user,
   security_enabled = security_enabled,
   keytab = hdfs_user_keytab,
   kinit_path_local = kinit_path_local,
-  hadoop_fs=fs_root,
-  hadoop_bin_dir = hadoop_bin_dir,
-  hadoop_conf_dir = hadoop_conf_dir
+  bin_dir = hadoop_bin_dir
 )

Reply via email to