Repository: ambari
Updated Branches:
  refs/heads/trunk 92da024a5 -> 6e8dce443


http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.2/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/default.json 
b/ambari-server/src/test/python/stacks/2.2/configs/default.json
index c5a6ae7..8188928 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/default.json
@@ -185,7 +185,18 @@
             "ignore_groupsusers_create": "false",
             "smokeuser": "ambari-qa",
             "kerberos_domain": "EXAMPLE.COM",
-            "user_group": "hadoop"
+            "user_group": "hadoop",
+               "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
+               "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/mapreduce/", 
+               "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
+               "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/hive/", 
+               "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/tez/", 
+               "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/mapreduce/", 
+               "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
+               "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/pig/", 
+               "mapreduce_tar_source": 
"/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
+               "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/sqoop/", 
+               "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
         },
         "ranger-knox-plugin-properties": {
             "POLICY_MGR_URL": "{{policymgr_mgr_url}}", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.2/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.2/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
index 5bd8814..e224ebc 100644
--- a/ambari-server/src/test/python/stacks/2.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.2/configs/secured.json
@@ -173,7 +173,18 @@
             "user_group": "hadoop",
             "smokeuser_keytab": 
"/etc/security/keytabs/smokeuser.headless.keytab",
             "smokeuser_principal_name": "ambari...@example.com",
-            "kinit_path_local": "/usr/bin"
+            "kinit_path_local": "/usr/bin",
+               "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", 
+               "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/mapreduce/", 
+               "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", 
+               "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/hive/", 
+               "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/tez/", 
+               "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/mapreduce/", 
+               "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", 
+               "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/pig/", 
+               "mapreduce_tar_source": 
"/usr/hdp/current/hadoop-client/mapreduce.tar.gz", 
+               "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ 
hdp_stack_version }}/sqoop/", 
+               "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz"
         },
         "webhcat-site": {
             "templeton.jar": 
"/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar",

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py 
b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
index 0d943c4..665119f 100644
--- 
a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
+++ 
b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py
@@ -33,35 +33,42 @@ class TestMahoutClient(RMFTestCase):
                        target = RMFTestCase.TARGET_COMMON_SERVICES
     )
 
-    self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f 
/user/ambari-qa/mahoutsmokeoutput /user/ambari-qa/mahoutsmokeinput',
-                              security_enabled = False,
-                              keytab = UnknownConfigurationMock(),
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              try_sleep = 5,
-                              kinit_path_local = '/usr/bin/kinit',
-                              tries = 3,
-                              user = 'ambari-qa',
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              principal = UnknownConfigurationMock(),
-                              )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -mkdir 
/user/ambari-qa/mahoutsmokeinput',
-                              try_sleep = 5,
-                              tries = 3,
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              user = 'ambari-qa',
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              )
     self.assertResourceCalled('File', '/tmp/sample-mahout-test.txt',
-                              content = 'Test text which will be converted to 
sequence file.',
-                              mode = 0755,
-                              )
-    self.assertResourceCalled('ExecuteHadoop', 'fs -put 
/tmp/sample-mahout-test.txt /user/ambari-qa/mahoutsmokeinput/',
-                              try_sleep = 5,
-                              tries = 3,
-                              bin_dir = '/usr/hdp/current/hadoop-client/bin',
-                              user = 'ambari-qa',
-                              conf_dir = '/usr/hdp/current/hadoop-client/conf',
-                              )
+        content = 'Test text which will be converted to sequence file.',
+        mode = 0755,
+    )
+    self.assertResourceCalled('HdfsResource', 
'/user/ambari-qa/mahoutsmokeinput',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'directory',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', 
'/user/ambari-qa/mahoutsmokeinput/sample-mahout-test.txt',
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        source = '/tmp/sample-mahout-test.txt',
+        user = 'hdfs',
+        owner = 'ambari-qa',
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+        type = 'file',
+        action = ['create_on_execute'],
+    )
+    self.assertResourceCalled('HdfsResource', None,
+        security_enabled = False,
+        hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin',
+        keytab = UnknownConfigurationMock(),
+        kinit_path_local = '/usr/bin/kinit',
+        user = 'hdfs',
+        action = ['execute'],
+        hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf',
+    )
     self.assertResourceCalled('Execute', 'mahout seqdirectory --input 
/user/ambari-qa/mahoutsmokeinput/'
                                          'sample-mahout-test.txt --output 
/user/ambari-qa/mahoutsmokeoutput/ '
                                          '--charset utf-8',

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/dependency-reduced-pom.xml 
b/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
new file mode 100644
index 0000000..c252f2e
--- /dev/null
+++ b/contrib/fast-hdfs-resource/dependency-reduced-pom.xml
@@ -0,0 +1,42 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.ambari</groupId>
+  <artifactId>fast-hdfs-resource</artifactId>
+  <name>fast-hdfs-resource</name>
+  <version>0.0.1-SNAPSHOT</version>
+  <url>http://maven.apache.org</url>
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>2.3</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <transformers>
+                <transformer>
+                  
<mainClass>org.apache.ambari.fast_hdfs_resource.Runner</mainClass>
+                </transformer>
+              </transformers>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <repositories>
+    <repository>
+      <id>hdp.internal</id>
+      <url>http://repo1.maven.org/maven2</url>
+    </repository>
+  </repositories>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+</project>
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/contrib/fast-hdfs-resource/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/pom.xml 
b/contrib/fast-hdfs-resource/pom.xml
new file mode 100644
index 0000000..6a92841
--- /dev/null
+++ b/contrib/fast-hdfs-resource/pom.xml
@@ -0,0 +1,86 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+
+  <groupId>org.apache.ambari</groupId>
+  <artifactId>fast-hdfs-resource</artifactId>
+  <version>0.0.1-SNAPSHOT</version>
+  <packaging>jar</packaging>
+
+  <name>fast-hdfs-resource</name>
+  <url>http://maven.apache.org</url>
+  <repositories>
+    <repository>
+      <id>hdp.internal</id>
+      <url>http://repo1.maven.org/maven2</url>
+    </repository>
+  </repositories>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-tools</artifactId>
+      <version>1.2.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-core</artifactId>
+      <version>1.2.1</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.code.gson</groupId>
+      <artifactId>gson</artifactId>
+      <version>2.2.2</version>
+    </dependency>
+  </dependencies>
+
+
+  <!-- Create executable jar with the application entry point -->
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>2.3</version>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+            <configuration>
+              <transformers>
+                <transformer
+                  
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                  <mainClass>org.apache.ambari.fast_hdfs_resource.Runner
+                  </mainClass>
+                </transformer>
+              </transformers>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/contrib/fast-hdfs-resource/resources/example.json
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/resources/example.json 
b/contrib/fast-hdfs-resource/resources/example.json
new file mode 100644
index 0000000..605deb9
--- /dev/null
+++ b/contrib/fast-hdfs-resource/resources/example.json
@@ -0,0 +1,57 @@
+[
+{
+       "target":"/tmp/some999",
+       "type":"directory",
+       "action":"delete"
+},
+{
+       "target":"/tmp/some999/more/dirs/for/recursive/tests",
+       "type":"directory",
+       "action":"create"
+},
+{
+       "target":"/tmp/some999/more/dirs/for/recursive/tests/file_empty.txt",
+       "type":"file",
+       "action":"create"
+},
+{
+       "target":"/tmp/some999",
+       "type":"directory",
+       "action":"create",
+       "owner":"oozie"
+},
+{
+       "target":"/tmp/some999",
+       "type":"directory",
+       "action":"create",
+       "group":"hive"
+},
+{
+       "target":"/tmp/some999",
+       "type":"directory",
+       "action":"create",
+       "mode":"777"
+},
+{
+       "target":"/tmp/some999/more/dirs",
+       "type":"directory",
+       "action":"create",
+       "owner":"yarn",
+       "group":"mapred",
+       "recursiveChown":true,
+       "mode":"757",
+       "recursiveChmod":true
+},
+{
+       "source":"/tmp/my.txt",
+       "target":"/tmp/some999/my_file.txt",
+       "type":"file",
+       "action":"create"
+},
+{
+       "source":"/tmp/a",
+       "target":"/tmp/some999/a_dir",
+       "type":"directory",
+       "action":"create"
+}
+]
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/contrib/fast-hdfs-resource/resources/test_perfomance.sh
----------------------------------------------------------------------
diff --git a/contrib/fast-hdfs-resource/resources/test_perfomance.sh 
b/contrib/fast-hdfs-resource/resources/test_perfomance.sh
new file mode 100644
index 0000000..40339e3
--- /dev/null
+++ b/contrib/fast-hdfs-resource/resources/test_perfomance.sh
@@ -0,0 +1,46 @@
+#!/bin/sh
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+# Delete 2 dirs
+sudo -u hdfs hadoop fs -rm -r /tmp/some999
+sudo -u hdfs hadoop fs -rm -r /tmp/some888
+# Create
+sudo -u hdfs hadoop fs -mkdir -p /tmp/some999/more/dirs/for/recursive/tests
+# Create + permissions + owner
+sudo -u hdfs hadoop fs -mkdir -p /tmp/some888/more/dirs/for/recursive/tests
+sudo -u hdfs hadoop fs -chown hadoop:hadoop 
/tmp/some888/more/dirs/for/recursive/tests
+sudo -u hdfs hadoop fs -chmod 777 /tmp/some888/more/dirs/for/recursive/tests
+# Empty dirs with permissions/owners to last dir"
+sudo -u hdfs hadoop fs -mkdir -p 
/tmp/some888/and_more/and_dirs/_andfor/recursive/tests
+sudo -u hdfs hadoop fs -chmod 777 
/tmp/some888/and_more/and_dirs/_andfor/recursive/tests
+sudo -u hdfs hadoop fs -chown hadoop:hadoop 
/tmp/some888/and_more/and_dirs/_andfor/recursive/tests
+# Empty dirs with permissions/owners to last file
+sudo -u hdfs hadoop fs -touchz /tmp/some888/file.txt
+sudo -u hdfs hadoop fs -chown hadoop:hadoop /tmp/some888/file.txt
+sudo -u hdfs hadoop fs -chmod 777 /tmp/some888/file.txt
+# Empty dirs with permissions/owners to last file
+sudo -u hdfs hadoop fs -touchz /tmp/some888/and_more/and_dirs/file2.txt
+sudo -u hdfs hadoop fs -chown hadoop:hadoop 
/tmp/some888/and_more/and_dirs/file2.txt
+sudo -u hdfs hadoop fs -chmod 777 /tmp/some888/and_more/and_dirs/file2.txt
+# Recursive permissions
+sudo -u hdfs hadoop fs -chmod -R 700 /tmp/some888
+sudo -u hdfs hadoop fs -chown -R hive:hive /tmp/some999
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
----------------------------------------------------------------------
diff --git 
a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
 
b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
new file mode 100644
index 0000000..21750e1
--- /dev/null
+++ 
b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Resource.java
@@ -0,0 +1,295 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.fast_hdfs_resource;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.File;
+import java.lang.System;
+import java.util.ArrayList;
+import java.util.HashSet;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.fs.FileSystem;
+
+/**
+ * Used to: 1) copy files/directories from localFS to hadoopFs 2) create empty
+ * files/directories in hadoopFs
+ */
+public class Resource {
+  private String source;
+  private String target;
+  private String type;
+  private String action;
+  private String owner;
+  private String group;
+  private String mode;
+  private boolean recursiveChown;
+  private boolean recursiveChmod;
+  private boolean changePermissionforParents;
+
+  public String getSource() {
+    return source;
+  }
+
+  public void setSource(String source) {
+    this.source = source;
+  }
+
+  public String getTarget() {
+    return target;
+  }
+
+  public void setTarget(String target) {
+    this.target = target;
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public void setType(String type) {
+    this.type = type;
+  }
+
+  public String getAction() {
+    return action;
+  }
+
+  public void setAction(String action) {
+    this.action = action;
+  }
+
+  public String getOwner() {
+    return owner;
+  }
+
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getGroup() {
+    return group;
+  }
+
+  public void setGroup(String group) {
+    this.group = group;
+  }
+
+  public String getMode() {
+    return mode;
+  }
+
+  public void setMode(String mode) {
+    this.mode = mode;
+  }
+
+  public boolean isRecursiveChown() {
+    return recursiveChown;
+  }
+
+  public void setRecursiveChown(boolean recursiveChown) {
+    this.recursiveChown = recursiveChown;
+  }
+
+  public boolean isRecursiveChmod() {
+    return recursiveChmod;
+  }
+
+  public void setRecursiveChmod(boolean recursiveChmod) {
+    this.recursiveChmod = recursiveChmod;
+  }
+  
+  public boolean isChangePermissionOnParents() {
+    return changePermissionforParents;
+  }
+
+  public void setChangePermissionOnParents(boolean changePermissionforParents) 
{
+    this.changePermissionforParents = changePermissionforParents;
+  }
+
+  
+  
+  
+  @Override
+  public String toString() {
+    return "Resource [source=" + source + ", target=" + target + ", type="
+        + type + ", action=" + action + ", owner=" + owner + ", group=" + group
+        + ", mode=" + mode + ", recursiveChown=" + recursiveChown
+        + ", recursiveChmod=" + recursiveChmod
+        + ", changePermissionforParents=" + changePermissionforParents + "]";
+  }
+
+  /*
+   * Check if parameters are correctly set
+   */
+  public static void checkResourceParameters(Resource resource,
+      FileSystem dfs) throws IllegalArgumentException, IOException {
+
+    ArrayList<String> actionsAvailable = new ArrayList<String>();
+    actionsAvailable.add("create");
+    actionsAvailable.add("delete");
+    ArrayList<String> typesAvailable = new ArrayList<String>();
+    typesAvailable.add("file");
+    typesAvailable.add("directory");
+
+    if (resource.getTarget() == null)
+      throw new IllegalArgumentException(
+          "Path to resource in HadoopFs must be filled.");
+
+    if (resource.getAction() == null
+        || !actionsAvailable.contains(resource.getAction()))
+      throw new IllegalArgumentException("Action is not supported.");
+
+    if (resource.getType() == null
+        || !typesAvailable.contains(resource.getType()))
+      throw new IllegalArgumentException("Type is not supported.");
+
+    // Check consistency for ("type":"file" == file in hadoop)
+    if (dfs.isFile(new Path(resource.getTarget()))
+        && !"file".equals(resource.getType()))
+      throw new IllegalArgumentException(
+          "Cannot create a directory " + resource.getTarget() +
+              " because file is present on the given path.");
+    // Check consistency for ("type":"directory" == directory in hadoop)
+    else if (dfs.isDirectory(new Path(resource.getTarget()))
+        && !"directory".equals(resource.getType()))
+      throw new IllegalArgumentException(
+          "Cannot create a file " + resource.getTarget() +
+              " because directory is present on the given path.");
+    
+    if(resource.getSource() != null) {
+      File source = new File(resource.getSource());
+      if(source.isFile()
+          && !"file".equals(resource.getType()))
+        throw new IllegalArgumentException(
+            "Cannot create a directory " + resource.getTarget() +
+                " because source " + resource.getSource() + "is a file");
+      else if(source.isDirectory()
+          && !"directory".equals(resource.getType()))
+        throw new IllegalArgumentException(
+            "Cannot create a file " + resource.getTarget() +
+                " because source " + resource.getSource() + "is a directory"); 
     
+    }
+  }
+
+  /*
+   * Create/copy resource - {type}
+   */
+  public static void createResource(Resource resource,
+      FileSystem dfs, Path pathHadoop) throws IOException {
+
+    boolean isCreate = (resource.getSource() == null) ? true : false;
+
+    if (isCreate && resource.getType().equals("directory")) {
+      dfs.mkdirs(pathHadoop); // empty dir(s)
+    } else if (isCreate && resource.getType().equals("file")) {
+      dfs.createNewFile(pathHadoop); // empty file
+    } else {
+      dfs.copyFromLocalFile(new Path(resource.getSource()), pathHadoop);// copy
+    }
+  }
+
+  /*
+   * Set permissions on resource - {mode}
+   */
+  public static void setMode(Resource resource,
+      FileSystem dfs, Path pathHadoop) throws IOException {
+
+    if (resource.getMode() != null) {
+      FsPermission permission = new FsPermission(resource.getMode());
+      dfs.setPermission(pathHadoop, permission);
+
+      // Recursive
+      
+        // Get the list of sub-directories and files
+        HashSet<String> resultSet = new HashSet<String>();
+        
+        if (resource.isRecursiveChmod())
+          resource.fillDirectoryList(dfs, resource.getTarget(), resultSet);
+        
+        if(resource.isChangePermissionOnParents())
+          resource.fillInParentDirectories(dfs, resource.getTarget(), 
resultSet);
+
+        for (String path : resultSet) {
+          dfs.setPermission(new Path(path), permission);
+        }
+
+    }
+  }
+
+  /*
+   * Set owner on resource - {owner}
+   */
+  public static void setOwner(Resource resource, FileSystem dfs,
+      Path pathHadoop) throws IOException {
+
+    if (!(resource.getOwner() == null && resource.getGroup() == null)) {
+      dfs.setOwner(pathHadoop, resource.getOwner(), resource.getGroup());
+
+      // Get the list of sub-directories and files
+      HashSet<String> resultSet = new HashSet<String>();
+      if (resource.isRecursiveChown())
+        resource.fillDirectoryList(dfs, resource.getTarget(), resultSet);
+      if(resource.isChangePermissionOnParents())
+        resource.fillInParentDirectories(dfs, resource.getTarget(), resultSet);
+
+      for (String path : resultSet) {
+        dfs.setOwner(new Path(path), resource.getOwner(), resource.getGroup());
+      }
+    }
+  }
+  
+  public void fillInParentDirectories(FileSystem dfs, String path, 
HashSet<String> resultSet) throws IOException {
+    Path filePath = new Path(path);
+      
+    while(true) {
+      filePath = filePath.getParent();
+      
+      // if(filePath.isRoot()) {
+      if(filePath.getParent() == null) {
+        break;
+      }
+      resultSet.add(filePath.toString());
+    }
+  }
+
+  /*
+   * List all files and sub-directories recursively
+   */
+  public void fillDirectoryList(FileSystem dfs, String path,
+      HashSet<String> resultSet) throws IOException {
+
+    FileStatus[] fileStatus = dfs.listStatus(new Path(path));
+    if (fileStatus != null) {
+      // Go through all resources in directory
+      for (FileStatus fs : fileStatus) {
+        String pathToResource = path + "/" + fs.getPath().getName();
+
+        resultSet.add(pathToResource);
+
+        if (fs.isDir()) {
+          // recursive
+          fillDirectoryList(dfs, pathToResource, resultSet);
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java
----------------------------------------------------------------------
diff --git 
a/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java
 
b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java
new file mode 100644
index 0000000..291a2d9
--- /dev/null
+++ 
b/contrib/fast-hdfs-resource/src/main/java/org/apache/ambari/fast_hdfs_resource/Runner.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.fast_hdfs_resource;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileSystem;
+
+import com.google.gson.Gson;
+
+public class Runner {
+  public static void main(String[] args)
+      throws IOException, URISyntaxException {
+    // 1 - Check arguments
+    if (args.length != 1) {
+      System.err.println("Incorrect number of arguments. Please provide:\n"
+          + "1) Path to json file\n"
+          + "Exiting...");
+      System.exit(1);
+    }
+
+    // 2 - Check if json-file exists
+    final String jsonFilePath = args[0];
+    File file = new File(jsonFilePath);
+
+    if (!file.isFile()) {
+      System.err
+          .println("File " + jsonFilePath + " doesn't exist.\nExiting...");
+      System.exit(1);
+    }
+
+    Gson gson = new Gson();
+    Resource[] resources = null;
+    FileSystem dfs = null;
+
+    try {
+      Configuration conf = new Configuration();
+      dfs = FileSystem.get(conf);
+
+      // 3 - Load data from JSON
+      resources = (Resource[]) gson.fromJson(new FileReader(jsonFilePath),
+          Resource[].class);
+
+      // 4 - Connect to HDFS
+      System.out.println("Using filesystem uri: " + 
FileSystem.getDefaultUri(conf).toString());
+      dfs.initialize(FileSystem.getDefaultUri(conf), conf);
+      
+      for (Resource resource : resources) {
+        System.out.println("Creating: " + resource);
+
+        Resource.checkResourceParameters(resource, dfs);
+
+        Path pathHadoop = new Path(resource.getTarget());
+        if (resource.getAction().equals("create")) {
+          // 5 - Create
+          Resource.createResource(resource, dfs, pathHadoop);
+          Resource.setMode(resource, dfs, pathHadoop);
+          Resource.setOwner(resource, dfs, pathHadoop);
+        } else if (resource.getAction().equals("delete")) {
+          // 6 - Delete
+          dfs.delete(pathHadoop, true);
+        }
+      }
+
+    } finally {
+      dfs.close();
+    }
+
+    System.out.println("All resources created.");
+  }
+
+}

Reply via email to