Author: tucu
Date: Fri Feb  1 19:46:14 2013
New Revision: 1441606

URL: http://svn.apache.org/viewvc?rev=1441606&view=rev
Log:
HDFS-4456. Add concat to HttpFS and WebHDFS REST API docs. (plamenj2003 via 
tucu)

Modified:
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
    hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java
    
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/site/apt/WebHDFS.apt.vm

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java?rev=1441606&r1=1441605&r2=1441606&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
 Fri Feb  1 19:46:14 2013
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.fs.http.client;
 
+import java.util.ArrayList;
+import java.util.List;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ContentSummary;
@@ -86,6 +88,7 @@ public class HttpFSFileSystem extends Fi
   public static final String PERMISSION_PARAM = "permission";
   public static final String DESTINATION_PARAM = "destination";
   public static final String RECURSIVE_PARAM = "recursive";
+  public static final String SOURCES_PARAM = "sources";
   public static final String OWNER_PARAM = "owner";
   public static final String GROUP_PARAM = "group";
   public static final String MODIFICATION_TIME_PARAM = "modificationtime";
@@ -167,7 +170,7 @@ public class HttpFSFileSystem extends Fi
     GETHOMEDIRECTORY(HTTP_GET), GETCONTENTSUMMARY(HTTP_GET),
     GETFILECHECKSUM(HTTP_GET),  GETFILEBLOCKLOCATIONS(HTTP_GET),
     INSTRUMENTATION(HTTP_GET),
-    APPEND(HTTP_POST),
+    APPEND(HTTP_POST), CONCAT(HTTP_POST),
     CREATE(HTTP_PUT), MKDIRS(HTTP_PUT), RENAME(HTTP_PUT), SETOWNER(HTTP_PUT),
     SETPERMISSION(HTTP_PUT), SETREPLICATION(HTTP_PUT), SETTIMES(HTTP_PUT),
     DELETE(HTTP_DELETE);
@@ -529,6 +532,29 @@ public class HttpFSFileSystem extends Fi
   }
 
   /**
+   * Concat existing files together.
+   * @param f the path to the target destination.
+   * @param psrcs the paths to the sources to use for the concatenation.
+   *
+   * @throws IOException
+   */
+  @Override
+  public void concat(Path f, Path[] psrcs) throws IOException {
+    List<String> strPaths = new ArrayList<String>(psrcs.length);
+    for(Path psrc : psrcs) {
+      strPaths.add(psrc.toUri().getPath());
+    }
+    String srcs = StringUtils.join(",", strPaths);
+
+    Map<String, String> params = new HashMap<String, String>();
+    params.put(OP_PARAM, Operation.CONCAT.toString());
+    params.put(SOURCES_PARAM, srcs);
+    HttpURLConnection conn = getConnection(Operation.CONCAT.getMethod(),
+        params, f, true);
+    HttpFSUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+  }
+
+  /**
    * Renames Path src to Path dst.  Can take place on local fs
    * or remote DFS.
    */

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java?rev=1441606&r1=1441605&r2=1441606&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/FSOperations.java
 Fri Feb  1 19:46:14 2013
@@ -199,6 +199,47 @@ public class FSOperations {
   }
 
   /**
+   * Executor that performs an append FileSystemAccess files system operation.
+   */
+  @InterfaceAudience.Private
+  public static class FSConcat implements 
FileSystemAccess.FileSystemExecutor<Void> {
+    private Path path;
+    private Path[] sources;
+
+    /**
+     * Creates a Concat executor.
+     *
+     * @param path target path to concat to.
+     * @param sources comma seperated absolute paths to use as sources.
+     */
+    public FSConcat(String path, String[] sources) {
+      this.sources = new Path[sources.length];
+
+      for(int i = 0; i < sources.length; i++) {
+        this.sources[i] = new Path(sources[i]);
+      }
+
+      this.path = new Path(path);
+    }
+
+    /**
+     * Executes the filesystem operation.
+     *
+     * @param fs filesystem instance to use.
+     *
+     * @return void.
+     *
+     * @throws IOException thrown if an IO error occured.
+     */
+    @Override
+    public Void execute(FileSystem fs) throws IOException {
+      fs.concat(path, sources);
+      return null;
+    }
+
+  }
+
+  /**
    * Executor that performs a content-summary FileSystemAccess files system 
operation.
    */
   @InterfaceAudience.Private

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java?rev=1441606&r1=1441605&r2=1441606&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSParametersProvider.java
 Fri Feb  1 19:46:14 2013
@@ -58,6 +58,7 @@ public class HttpFSParametersProvider ex
     PARAMS_DEF.put(Operation.INSTRUMENTATION, new Class[]{DoAsParam.class});
     PARAMS_DEF.put(Operation.APPEND,
       new Class[]{DoAsParam.class, DataParam.class});
+    PARAMS_DEF.put(Operation.CONCAT, new Class[]{SourcesParam.class});
     PARAMS_DEF.put(Operation.CREATE,
       new Class[]{DoAsParam.class, PermissionParam.class, OverwriteParam.class,
                   ReplicationParam.class, BlockSizeParam.class, 
DataParam.class});
@@ -389,6 +390,25 @@ public class HttpFSParametersProvider ex
   }
 
   /**
+   * Class for concat sources parameter.
+   */
+  @InterfaceAudience.Private
+  public static class SourcesParam extends StringParam {
+
+    /**
+     * Parameter name.
+     */
+    public static final String NAME = HttpFSFileSystem.SOURCES_PARAM;
+
+    /**
+     * Constructor.
+     */
+    public SourcesParam() {
+      super(NAME, null);
+    }
+  }
+
+  /**
    * Class for to-path parameter.
    */
   @InterfaceAudience.Private

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java?rev=1441606&r1=1441605&r2=1441606&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServer.java
 Fri Feb  1 19:46:14 2013
@@ -22,22 +22,23 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.http.client.HttpFSFileSystem;
-import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OperationParam;
 import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.AccessTimeParam;
 import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.BlockSizeParam;
 import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DataParam;
-import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.RecursiveParam;
+import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DestinationParam;
 import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DoAsParam;
 import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.FilterParam;
 import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.GroupParam;
 import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.LenParam;
 import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.ModifiedTimeParam;
 import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OffsetParam;
+import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OperationParam;
 import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OverwriteParam;
 import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.OwnerParam;
 import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.PermissionParam;
+import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.RecursiveParam;
 import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.ReplicationParam;
-import 
org.apache.hadoop.fs.http.server.HttpFSParametersProvider.DestinationParam;
+import org.apache.hadoop.fs.http.server.HttpFSParametersProvider.SourcesParam;
 import org.apache.hadoop.lib.service.FileSystemAccess;
 import org.apache.hadoop.lib.service.FileSystemAccessException;
 import org.apache.hadoop.lib.service.Groups;
@@ -403,9 +404,9 @@ public class HttpFSServer {
     Response response;
     path = makeAbsolute(path);
     MDC.put(HttpFSFileSystem.OP_PARAM, op.value().name());
-    String doAs = params.get(DoAsParam.NAME, DoAsParam.class);
     switch (op.value()) {
       case APPEND: {
+        String doAs = params.get(DoAsParam.NAME, DoAsParam.class);
         Boolean hasData = params.get(DataParam.NAME, DataParam.class);
         if (!hasData) {
           response = Response.temporaryRedirect(
@@ -420,6 +421,18 @@ public class HttpFSServer {
         }
         break;
       }
+      case CONCAT: {
+        System.out.println("HTTPFS SERVER CONCAT");
+        String sources = params.get(SourcesParam.NAME, SourcesParam.class);
+
+        FSOperations.FSConcat command =
+            new FSOperations.FSConcat(path, sources.split(","));
+        fsExecute(user, null, command);
+        AUDIT_LOG.info("[{}]", path);
+        System.out.println("SENT RESPONSE");
+        response = Response.ok().build();
+        break;
+      }
       default: {
         throw new IOException(
           MessageFormat.format("Invalid HTTP POST operation [{0}]",

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java?rev=1441606&r1=1441605&r2=1441606&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/BaseTestHttpFSWith.java
 Fri Feb  1 19:46:14 2013
@@ -28,6 +28,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.http.server.HttpFSServerWebApp;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.DFSTestUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.test.HFSTestCase;
 import org.apache.hadoop.test.HadoopUsersConfTestHelper;
@@ -206,6 +208,30 @@ public abstract class BaseTestHttpFSWith
     }
   }
 
+  private void testConcat() throws Exception {
+    Configuration config = getProxiedFSConf();
+    config.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, 1024);
+    if (!isLocalFS()) {
+      FileSystem fs = FileSystem.get(config);
+      fs.mkdirs(getProxiedFSTestDir());
+      Path path1 = new Path("/test/foo.txt");
+      Path path2 = new Path("/test/bar.txt");
+      Path path3 = new Path("/test/derp.txt");
+      DFSTestUtil.createFile(fs, path1, 1024, (short) 3, 0);
+      DFSTestUtil.createFile(fs, path2, 1024, (short) 3, 0);
+      DFSTestUtil.createFile(fs, path3, 1024, (short) 3, 0);
+      fs.close();
+      fs = getHttpFSFileSystem();
+      fs.concat(path1, new Path[]{path2, path3});
+      fs.close();
+      fs = FileSystem.get(config);
+      Assert.assertTrue(fs.exists(path1));
+      Assert.assertFalse(fs.exists(path2));
+      Assert.assertFalse(fs.exists(path3));
+      fs.close();
+    }
+  }
+
   private void testRename() throws Exception {
     FileSystem fs = FileSystem.get(getProxiedFSConf());
     Path path = new Path(getProxiedFSTestDir(), "foo");
@@ -450,7 +476,7 @@ public abstract class BaseTestHttpFSWith
   }
 
   protected enum Operation {
-    GET, OPEN, CREATE, APPEND, RENAME, DELETE, LIST_STATUS, WORKING_DIRECTORY, 
MKDIRS,
+    GET, OPEN, CREATE, APPEND, CONCAT, RENAME, DELETE, LIST_STATUS, 
WORKING_DIRECTORY, MKDIRS,
     SET_TIMES, SET_PERMISSION, SET_OWNER, SET_REPLICATION, CHECKSUM, 
CONTENT_SUMMARY
   }
 
@@ -468,6 +494,8 @@ public abstract class BaseTestHttpFSWith
       case APPEND:
         testAppend();
         break;
+      case CONCAT:
+        testConcat();
       case RENAME:
         testRename();
         break;

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1441606&r1=1441605&r2=1441606&view=diff
==============================================================================
--- hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
(original)
+++ hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt 
Fri Feb  1 19:46:14 2013
@@ -216,6 +216,8 @@ Release 2.0.3-alpha - Unreleased
 
     HDFS-3598. WebHDFS support for file concat. (Plamen Jeliazkov via shv)
 
+    HDFS-4456. Add concat to HttpFS and WebHDFS REST API docs. (plamenj2003 
via tucu)
+
   OPTIMIZATIONS
 
     HDFS-3429. DataNode reads checksums even if client does not need them 
(todd)

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java?rev=1441606&r1=1441605&r2=1441606&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ConcatSourcesParam.java
 Fri Feb  1 19:46:14 2013
@@ -21,7 +21,7 @@ package org.apache.hadoop.hdfs.web.resou
 /** The concat source paths parameter. */
 public class ConcatSourcesParam extends StringParam {
   /** Parameter name. */
-  public static final String NAME = "srcs";
+  public static final String NAME = "sources";
 
   public static final String DEFAULT = NULL;
 

Modified: 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/site/apt/WebHDFS.apt.vm
URL: 
http://svn.apache.org/viewvc/hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/site/apt/WebHDFS.apt.vm?rev=1441606&r1=1441605&r2=1441606&view=diff
==============================================================================
--- 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/site/apt/WebHDFS.apt.vm
 (original)
+++ 
hadoop/common/branches/branch-2/hadoop-hdfs-project/hadoop-hdfs/src/site/apt/WebHDFS.apt.vm
 Fri Feb  1 19:46:14 2013
@@ -109,6 +109,9 @@ WebHDFS REST API
     * {{{Append to a File}<<<APPEND>>>}}
         (see  
{{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.append)
 
+    * {{{Concat File(s)}<<<CONCAT>>>}}
+        (see  
{{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.concat)
+
   * HTTP DELETE
 
     * {{{Delete a File/Directory}<<<DELETE>>>}}
@@ -299,6 +302,32 @@ Content-Length: 0
    {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.append
 
 
+** {Concat File(s)}
+
+  * Submit a HTTP POST request.
+
++---------------------------------
+curl -i -X POST 
"http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CONCAT&sources=<SOURCES>"
++---------------------------------
+
+  The client receives a response with zero content length:
+
++---------------------------------
+HTTP/1.1 200 OK
+Content-Length: 0
++---------------------------------
+
+  []
+
+  This REST API call is available as of Hadoop version 2.0.3.
+  Please note that <SOURCES> is a comma seperated list of absolute paths.
+  (Example: sources=/test/file1,/test/file2,/test/file3)
+
+  See also:
+  {{{Sources}<<<sources>>>}},
+   {{{../../api/org/apache/hadoop/fs/FileSystem.html}FileSystem}}.concat
+
+
 ** {Open and Read a File}
 
   * Submit a HTTP GET request with automatically following redirects.
@@ -1727,6 +1756,29 @@ var tokenProperties =
   {{{Set Replication Factor}<<<SETREPLICATION>>>}}
 
 
+** {Sources}
+
+*----------------+-------------------------------------------------------------------+
+|| Name          | <<<sources>>> |
+*----------------+-------------------------------------------------------------------+
+|| Description   | The comma seperated absolute paths used for concatenation. |
+*----------------+-------------------------------------------------------------------+
+|| Type          | String |
+*----------------+-------------------------------------------------------------------+
+|| Default Value | \<empty\> |
+*----------------+-------------------------------------------------------------------+
+|| Valid Values  | A list of comma seperated absolute FileSystem paths without 
scheme and authority. |
+*----------------+-------------------------------------------------------------------+
+|| Syntax        | See the note in {{Delegation}}. |
+*----------------+-------------------------------------------------------------------+
+
+  <<Note>> that sources are absolute FileSystem paths.
+
+
+  See also:
+  {{{Concat File(s)}<<<CONCAT>>>}}
+
+
 ** {Token}
 
 
*----------------+-------------------------------------------------------------------+


Reply via email to