[hadoop] branch trunk updated: HDFS-15066. HttpFS: Implement setErasureCodingPolicy , unsetErasureCodingPolicy , getErasureCodingPolicy. Contributed by hemanthboyina.

2020-01-06 Thread tasanuma
This is an automated email from the ASF dual-hosted git repository.

tasanuma pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 59aac00  HDFS-15066. HttpFS: Implement setErasureCodingPolicy , 
unsetErasureCodingPolicy , getErasureCodingPolicy. Contributed by hemanthboyina.
59aac00 is described below

commit 59aac002834aaeb6475faad4c894b8c764957f68
Author: Takanobu Asanuma 
AuthorDate: Tue Jan 7 11:10:32 2020 +0900

HDFS-15066. HttpFS: Implement setErasureCodingPolicy , 
unsetErasureCodingPolicy , getErasureCodingPolicy. Contributed by hemanthboyina.
---
 .../org/apache/hadoop/hdfs/web/JsonUtilClient.java |  3 +
 .../hadoop/fs/http/client/HttpFSFileSystem.java| 34 -
 .../apache/hadoop/fs/http/server/FSOperations.java | 85 ++
 .../fs/http/server/HttpFSParametersProvider.java   | 21 ++
 .../apache/hadoop/fs/http/server/HttpFSServer.java | 26 +++
 .../hadoop/fs/http/client/BaseTestHttpFSWith.java  | 45 +++-
 .../hadoop/fs/http/server/TestHttpFSServer.java| 46 
 7 files changed, 258 insertions(+), 2 deletions(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
index 6f2f3b1..d45669f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java
@@ -716,6 +716,9 @@ public class JsonUtilClient {
   }
 
   public static ErasureCodingPolicy toECPolicy(Map m) {
+if (m == null) {
+  return null;
+}
 byte id = ((Number) m.get("id")).byteValue();
 String name = (String) m.get("name");
 String codec = (String) m.get("codecName");
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
index a168479..d92e9fc 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
@@ -48,6 +48,7 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hdfs.DFSConfigKeys;
 import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
+import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
 import org.apache.hadoop.hdfs.protocol.FsPermissionExtension;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
 import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
@@ -134,6 +135,7 @@ public class HttpFSFileSystem extends FileSystem
   public static final String SNAPSHOT_NAME_PARAM = "snapshotname";
   public static final String OLD_SNAPSHOT_NAME_PARAM = "oldsnapshotname";
   public static final String FSACTION_MODE_PARAM = "fsaction";
+  public static final String EC_POLICY_NAME_PARAM = "ecpolicy";
 
   public static final Short DEFAULT_PERMISSION = 0755;
   public static final String ACLSPEC_DEFAULT = "";
@@ -260,7 +262,8 @@ public class HttpFSFileSystem extends FileSystem
 CREATESNAPSHOT(HTTP_PUT), DELETESNAPSHOT(HTTP_DELETE),
 RENAMESNAPSHOT(HTTP_PUT), GETSNAPSHOTDIFF(HTTP_GET),
 GETSNAPSHOTTABLEDIRECTORYLIST(HTTP_GET), GETSERVERDEFAULTS(HTTP_GET),
-CHECKACCESS(HTTP_GET);
+CHECKACCESS(HTTP_GET), SETECPOLICY(HTTP_PUT), GETECPOLICY(
+HTTP_GET), UNSETECPOLICY(HTTP_POST);
 
 private String httpMethod;
 
@@ -1624,4 +1627,33 @@ public class HttpFSFileSystem extends FileSystem
 getConnection(Operation.CHECKACCESS.getMethod(), params, path, true);
 HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
   }
+
+  public void setErasureCodingPolicy(final Path path, String policyName)
+  throws IOException {
+Map params = new HashMap();
+params.put(OP_PARAM, Operation.SETECPOLICY.toString());
+params.put(EC_POLICY_NAME_PARAM, policyName);
+HttpURLConnection conn =
+getConnection(Operation.SETECPOLICY.getMethod(), params, path, true);
+HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+  }
+
+  public ErasureCodingPolicy getErasureCodingPolicy(final Path path)
+  throws IOException {
+Map params = new HashMap();
+params.put(OP_PARAM, Operation.GETECPOLICY.toString());
+HttpURLConnection conn =
+getConnection(Operation.GETECPOLICY.getMethod(), params, path, true);
+HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
+JSONObject json = (JSONObject) HttpFSUtils.jsonParse(conn);
+return JsonUtilClient.toECPolicy(json);
+  }
+
+  p

[hadoop] branch trunk updated: HDFS-14788. Use dynamic regex filter to ignore copy of source files in Distcp.

2020-01-06 Thread stevel
This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 819159f  HDFS-14788. Use dynamic regex filter to ignore copy of source 
files in Distcp.
819159f is described below

commit 819159fa060897bcf7c9ae09bf4b2fc97292f92b
Author: Mukund Thakur 
AuthorDate: Mon Jan 6 19:09:07 2020 +

HDFS-14788. Use dynamic regex filter to ignore copy of source files in 
Distcp.

Contributed by Mukund Thakur.

Change-Id: I781387ddce95ee300c12a160dc9a0f7d602403c3
---
 .../java/org/apache/hadoop/tools/CopyFilter.java   | 31 +++
 .../org/apache/hadoop/tools/DistCpConstants.java   | 14 
 .../hadoop/tools/RegexpInConfigurationFilter.java  | 72 
 .../hadoop-distcp/src/site/markdown/DistCp.md.vm   | 17 
 .../org/apache/hadoop/tools/TestCopyFilter.java| 97 ++
 .../tools/TestRegexpInConfigurationFilter.java | 55 
 6 files changed, 286 insertions(+)

diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyFilter.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyFilter.java
index 4b348a5..f5f00f1 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyFilter.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyFilter.java
@@ -17,6 +17,11 @@
  */
 package org.apache.hadoop.tools;
 
+import java.lang.reflect.Constructor;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 
@@ -26,6 +31,8 @@ import org.apache.hadoop.fs.Path;
  */
 public abstract class CopyFilter {
 
+  private static final Logger LOG = LoggerFactory.getLogger(CopyFilter.class);
+
   /**
* Default initialize method does nothing.
*/
@@ -47,6 +54,30 @@ public abstract class CopyFilter {
* @return An instance of the appropriate CopyFilter
*/
   public static CopyFilter getCopyFilter(Configuration conf) {
+String filtersClassName = conf
+.get(DistCpConstants.CONF_LABEL_FILTERS_CLASS);
+if (filtersClassName != null) {
+  try {
+Class filtersClass = conf
+.getClassByName(filtersClassName)
+.asSubclass(CopyFilter.class);
+filtersClassName = filtersClass.getName();
+Constructor constructor = filtersClass
+.getDeclaredConstructor(Configuration.class);
+return constructor.newInstance(conf);
+  } catch (Exception e) {
+LOG.error(DistCpConstants.CLASS_INSTANTIATION_ERROR_MSG +
+filtersClassName, e);
+throw new RuntimeException(
+DistCpConstants.CLASS_INSTANTIATION_ERROR_MSG +
+filtersClassName, e);
+  }
+} else {
+  return getDefaultCopyFilter(conf);
+}
+  }
+
+  private static CopyFilter getDefaultCopyFilter(Configuration conf) {
 String filtersFilename = conf.get(DistCpConstants.CONF_LABEL_FILTERS_FILE);
 
 if (filtersFilename == null) {
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
index f0adc78..2581568 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpConstants.java
@@ -120,6 +120,17 @@ public final class DistCpConstants {
   /* DistCp CopyListing class override param */
   public static final String CONF_LABEL_COPY_LISTING_CLASS = 
"distcp.copy.listing.class";
 
+  /**
+   *  DistCp Filter class override param.
+   */
+  public static final String CONF_LABEL_FILTERS_CLASS = "distcp.filters.class";
+
+  /**
+   *  Distcp exclude file regex override param.
+   */
+  public static final String DISTCP_EXCLUDE_FILE_REGEX =
+  "distcp.exclude-file-regex";
+
   /* DistCp Copy Buffer Size */
   public static final String CONF_LABEL_COPY_BUFFER_SIZE =
   "distcp.copy.buffer.size";
@@ -177,4 +188,7 @@ public final class DistCpConstants {
 
   public static final String CHECKSUM_MISMATCH_ERROR_MSG =
   "Checksum mismatch between ";
+
+  public static final String CLASS_INSTANTIATION_ERROR_MSG =
+  "Unable to instantiate ";
 }
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexpInConfigurationFilter.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexpInConfigurationFilter.java
new file mode 100644
index 000..4bf62e2
--- /dev/null
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/RegexpInConfigurationFilter.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contribut

[hadoop] branch trunk updated: YARN-9956. Improved connection error message for YARN ApiServerClient. Contributed by Prabhu Joseph

2020-01-06 Thread eyang
This is an automated email from the ASF dual-hosted git repository.

eyang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new d81d45f  YARN-9956. Improved connection error message for YARN 
ApiServerClient.Contributed by Prabhu Joseph
d81d45f is described below

commit d81d45ff2fc9a1c424222e021f9306bf64c916b2
Author: Eric Yang 
AuthorDate: Mon Jan 6 13:24:16 2020 -0500

YARN-9956. Improved connection error message for YARN ApiServerClient.
   Contributed by Prabhu Joseph
---
 .../yarn/service/client/ApiServiceClient.java  | 78 --
 .../yarn/service/client/TestApiServiceClient.java  | 22 ++
 .../service/client/TestSecureApiServiceClient.java |  2 +
 .../hadoop/yarn/client/util/YarnClientUtils.java   |  2 +-
 4 files changed, 68 insertions(+), 36 deletions(-)

diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java
index 834bb03..3c2c3c4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-api/src/main/java/org/apache/hadoop/yarn/service/client/ApiServiceClient.java
@@ -44,6 +44,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.client.api.AppAdminClient;
 import org.apache.hadoop.yarn.client.api.YarnClient;
 import org.apache.hadoop.yarn.client.util.YarnClientUtils;
+import org.apache.hadoop.yarn.conf.HAUtil;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.service.api.records.Component;
@@ -94,7 +95,7 @@ public class ApiServiceClient extends AppAdminClient {
   /**
* Calculate Resource Manager address base on working REST API.
*/
-  String getRMWebAddress() {
+  String getRMWebAddress() throws IOException {
 Configuration conf = getConfig();
 String scheme = "http://";;
 String path = "/app/v1/services/version";
@@ -105,43 +106,50 @@ public class ApiServiceClient extends AppAdminClient {
   rmAddress = conf
   .get("yarn.resourcemanager.webapp.https.address");
 }
-boolean useKerberos = UserGroupInformation.isSecurityEnabled();
-List rmServers = getRMHAWebAddresses(conf);
-for (String host : rmServers) {
-  try {
-Client client = Client.create();
-client.setFollowRedirects(false);
-StringBuilder sb = new StringBuilder();
-sb.append(scheme)
-.append(host)
-.append(path);
-if (!useKerberos) {
-  try {
-String username = 
UserGroupInformation.getCurrentUser().getShortUserName();
-sb.append("?user.name=")
-.append(username);
-  } catch (IOException e) {
-LOG.debug("Fail to resolve username: {}", e);
+
+if (HAUtil.isHAEnabled(conf)) {
+  boolean useKerberos = UserGroupInformation.isSecurityEnabled();
+  List rmServers = getRMHAWebAddresses(conf);
+  StringBuilder diagnosticsMsg = new StringBuilder();
+  for (String host : rmServers) {
+try {
+  Client client = Client.create();
+  client.setFollowRedirects(false);
+  StringBuilder sb = new StringBuilder();
+  sb.append(scheme)
+  .append(host)
+  .append(path);
+  if (!useKerberos) {
+try {
+  String username = UserGroupInformation.getCurrentUser()
+  .getShortUserName();
+  sb.append("?user.name=")
+  .append(username);
+} catch (IOException e) {
+  LOG.debug("Fail to resolve username: {}", e);
+}
   }
+  Builder builder = client
+  .resource(sb.toString()).type(MediaType.APPLICATION_JSON);
+  if (useKerberos) {
+String[] server = host.split(":");
+String challenge = YarnClientUtils.generateToken(server[0]);
+builder.header(HttpHeaders.AUTHORIZATION, "Negotiate " +
+challenge);
+LOG.debug("Authorization: Negotiate {}", challenge);
+  }
+  ClientResponse test = builder.get(ClientResponse.class);
+  if (test.getStatus() == 200) {
+return scheme + host;
+  }
+} catch (Exception e) {
+  LOG.info("Fail to connect to: " + host);
+  LOG.debug("Root cause: ", e);
+

[hadoop] branch trunk updated: YARN-10026. Pull out common code pieces from ATS v1.5 and v2. Contributed by Adam Antal

2020-01-06 Thread snemeth
This is an automated email from the ASF dual-hosted git repository.

snemeth pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new dd2607e  YARN-10026. Pull out common code pieces from ATS v1.5 and v2. 
Contributed by Adam Antal
dd2607e is described below

commit dd2607e3ec3c349130e4143b0f67b23e11da420a
Author: Szilard Nemeth 
AuthorDate: Mon Jan 6 17:16:11 2020 +0100

YARN-10026. Pull out common code pieces from ATS v1.5 and v2. Contributed 
by Adam Antal
---
 .../webapp/AHSWebServices.java | 204 ++--
 .../webapp/TestAHSWebServices.java |  25 +-
 .../hadoop/yarn/server/webapp/AppInfoProvider.java |  54 +
 .../hadoop/yarn/server/webapp/BasicAppInfo.java|  47 
 .../hadoop/yarn/server/webapp/LogServlet.java  | 260 +
 .../hadoop/yarn/server/webapp/LogWebService.java   | 243 +++
 .../hadoop/yarn/server/webapp/WebServices.java |  33 ++-
 .../hadoop/yarn/server/webapp/package-info.java|  18 ++
 .../yarn/server/webapp/TestLogWebService.java  |  23 +-
 9 files changed, 481 insertions(+), 426 deletions(-)

diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
index d9918d3..5e77718 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AHSWebServices.java
@@ -32,25 +32,18 @@ import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import javax.ws.rs.core.Response.ResponseBuilder;
-import javax.ws.rs.core.Response.Status;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.sun.jersey.api.client.ClientHandlerException;
-import com.sun.jersey.api.client.UniformInterfaceException;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceAudience.Public;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.JettyUtils;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.ApplicationBaseProtocol;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
-import 
org.apache.hadoop.yarn.logaggregation.filecontroller.LogAggregationFileControllerFactory;
-import org.apache.hadoop.yarn.server.webapp.LogWebServiceUtils;
+import org.apache.hadoop.yarn.server.webapp.LogServlet;
 import org.apache.hadoop.yarn.server.webapp.WebServices;
 import org.apache.hadoop.yarn.server.webapp.YarnWebServiceParams;
 import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
@@ -61,33 +54,20 @@ import 
org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
 import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo;
 import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
 import org.apache.hadoop.yarn.webapp.BadRequestException;
-import org.apache.hadoop.yarn.webapp.NotFoundException;
-import com.google.common.base.Joiner;
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
-import org.codehaus.jettison.json.JSONException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 @Singleton
 @Path("/ws/v1/applicationhistory")
 public class AHSWebServices extends WebServices {
 
-  private static final Logger LOG = LoggerFactory
-  .getLogger(AHSWebServices.class);
-  private static final String NM_DOWNLOAD_URI_STR =
-  "/ws/v1/node/containers";
-  private static final Joiner JOINER = Joiner.on("");
-  private static final Joiner DOT_JOINER = Joiner.on(". ");
-  private final Configuration conf;
-  private final LogAggregationFileControllerFactory factory;
+  private LogServlet logServlet;
 
   @Inject
   public AHSWebServices(ApplicationBaseProtocol appBaseProt,
   Configuration conf) {
 super(appBaseProt);
-this.conf = conf;
-this.factory = new LogAggregationFileControllerFactory(conf);
+this.logServlet = new LogServlet(conf, this);
   }
 
   @GET
@@ -244,87 +224,9 @@ public class AHSWebServices extends WebServices {
   @Query

[hadoop] branch trunk updated: YARN-10035. Add ability to filter the Cluster Applications API request by name. Contributed by Adam Antal

2020-01-06 Thread snemeth
This is an automated email from the ASF dual-hosted git repository.

snemeth pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 768ee22  YARN-10035. Add ability to filter the Cluster Applications 
API request by name. Contributed by Adam Antal
768ee22 is described below

commit 768ee22e9e73543d2fb193d9b6ec34a247cb0411
Author: Szilard Nemeth 
AuthorDate: Mon Jan 6 16:26:33 2020 +0100

YARN-10035. Add ability to filter the Cluster Applications API request by 
name. Contributed by Adam Antal
---
 .../protocolrecords/GetApplicationsRequest.java| 18 
 .../src/main/proto/yarn_service_protos.proto   |  1 +
 .../impl/pb/GetApplicationsRequestPBImpl.java  | 25 ++
 .../webapp/AHSWebServices.java |  6 --
 .../hadoop/yarn/server/webapp/WebServices.java |  7 +-
 .../server/resourcemanager/ClientRMService.java|  5 +
 .../webapp/ApplicationsRequestBuilder.java |  9 
 .../server/resourcemanager/webapp/RMWSConsts.java  |  1 +
 .../webapp/RMWebServiceProtocol.java   |  3 ++-
 .../resourcemanager/webapp/RMWebServices.java  |  2 ++
 .../resourcemanager/webapp/TestRMWebServices.java  |  9 +---
 .../webapp/DefaultRequestInterceptorREST.java  |  2 +-
 .../router/webapp/FederationInterceptorREST.java   |  4 ++--
 .../server/router/webapp/RouterWebServices.java|  3 ++-
 .../router/webapp/BaseRouterWebServicesTest.java   |  3 ++-
 .../webapp/MockDefaultRequestInterceptorREST.java  |  2 +-
 .../router/webapp/MockRESTRequestInterceptor.java  |  2 +-
 .../webapp/PassThroughRESTRequestInterceptor.java  |  4 ++--
 .../webapp/TestFederationInterceptorREST.java  |  2 +-
 .../webapp/TestFederationInterceptorRESTRetry.java |  6 +++---
 .../src/site/markdown/ResourceManagerRest.md   |  1 +
 21 files changed, 95 insertions(+), 20 deletions(-)

diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetApplicationsRequest.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetApplicationsRequest.java
index a52b405..81d98b5 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetApplicationsRequest.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/GetApplicationsRequest.java
@@ -393,4 +393,22 @@ public abstract class GetApplicationsRequest {
   @Private
   @Unstable
   public abstract void setScope(ApplicationsRequestScope scope);
+
+  /**
+   * Set the name to filter applications.
+   *
+   * @return the name
+   */
+  @Private
+  @Unstable
+  public abstract String getName();
+
+  /**
+   * Get the name to filter applications.
+   *
+   * @param name of the application
+   */
+  @Private
+  @Unstable
+  public abstract void setName(String name);
 }
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
index d562cdb..8a0273d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
@@ -206,6 +206,7 @@ message GetApplicationsRequestProto {
   optional int64 finish_end = 9;
   repeated string applicationTags = 10;
   optional ApplicationsRequestScopeProto scope = 11 [default = ALL];
+  optional string name = 12;
 }
 
 message GetApplicationsResponseProto {
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
index 4c5fee0..9c3045e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/GetApplicationsRequestPBImpl.java
@@ -55,6 +55,7 @@ public class GetApplicationsRequestPBImpl extends 
GetApplicationsRequest {
   Range finish = null;
   private Set applicationTags;
   private ApplicationsRequestScope scope;
+  private String name;
 
   public GetApplicationsRequestPBImpl() {
 builder = GetApplicationsRequestProto.newBuilder();
@@ -121,6 +122,9 @@ public class GetApplicationsRequestPBImpl extends 
GetApplicationsRequest {
   builder.clearQueues();
   builder.addAllQueues(queues);
 }
+if (name != null) {
+  builder.setName(name);
+}
   }
 
 

[hadoop] branch branch-3.2 updated: HADOOP-16772. Extract version numbers to head of pom.xml (addendum) (#1774). Contributed by Tamas Penzes.

2020-01-06 Thread gabota
This is an automated email from the ASF dual-hosted git repository.

gabota pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
 new 250cd9f  HADOOP-16772. Extract version numbers to head of pom.xml 
(addendum) (#1774). Contributed by Tamas Penzes.
250cd9f is described below

commit 250cd9f5750984027c2fe6a8414b7ef63d5045e9
Author: Tamás Pénzes 
AuthorDate: Mon Jan 6 14:12:28 2020 +0100

HADOOP-16772. Extract version numbers to head of pom.xml (addendum) 
(#1774). Contributed by Tamas Penzes.

Follow up task of HADOOP-16729, extract even more version numbers for 
branch-3.2.

Change-Id: I0546fc8ec8f68baa4bcfa9db0df4d8293b078913
---
 hadoop-project/pom.xml | 40 ++--
 1 file changed, 26 insertions(+), 14 deletions(-)

diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index cd13ac3..69d091d 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -107,10 +107,17 @@
 
 1.9.4
 1.2
+1.11
 3.2.2
+1.19
+1.0
 2.5
 3.7
+1.1.3
+1.1
 3.1.1
+3.6
+1.4
 
 
 0.3.0-eca3531-SNAPSHOT
@@ -119,7 +126,12 @@
 2.4.12
 6.2.1.jre7
 2.7.5
+1.1
+2.2.21
 2.2.4
+3.1.0-incubating
+4.1.0-incubating
+3.2.4
 3.10.6.Final
 4.0.52.Final
 
@@ -637,12 +649,12 @@
   
 org.apache.commons
 commons-compress
-1.19
+${commons-compress.version}
   
   
 org.apache.commons
 commons-csv
-1.0
+${commons-csv.version}
   
   
 org.apache.httpcomponents
@@ -657,12 +669,12 @@
   
 commons-codec
 commons-codec
-1.11
+${commons-codec.version}
   
   
 commons-net
 commons-net
-3.6
+${commons-net.version}
   
   
 javax.servlet
@@ -869,7 +881,7 @@
   
 commons-logging
 commons-logging
-1.1.3
+${commons-logging.version}
 
   
 avalon-framework
@@ -888,7 +900,7 @@
   
 commons-logging
 commons-logging-api
-1.1
+${commons-logging-api.version}
   
   
 log4j
@@ -987,7 +999,7 @@
   
 org.apache.commons
 commons-text
-1.4
+${commons-text.version}
   
   
 org.slf4j
@@ -1123,17 +1135,17 @@
   
 org.apache.htrace
 htrace-core
-3.1.0-incubating
+${htrace3.version}
   
   
 org.apache.htrace
 htrace-core4
-4.1.0-incubating
+${htrace4.version}
   
   
 org.jdom
 jdom
-1.1
+${jdom.version}
   
   
 com.googlecode.json-simple
@@ -1193,7 +1205,7 @@
   
 io.dropwizard.metrics
 metrics-core
-3.2.4
+${metrics.version}
   
   
 org.apache.hadoop
@@ -1235,17 +1247,17 @@
   
 org.glassfish.grizzly
 grizzly-http-servlet
-2.2.21
+${grizzly.version}
   
   
 org.glassfish.grizzly
 grizzly-http
-2.2.21
+${grizzly.version}
   
   
 org.glassfish.grizzly
 grizzly-http-server
-2.2.21
+${grizzly.version}
   
 
   


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[hadoop] branch trunk updated: HDFS-15090. RBF: MountPoint Listing Should Return Flag Values Of Destination. Contributed by Ayush Saxena.

2020-01-06 Thread tasanuma
This is an automated email from the ASF dual-hosted git repository.

tasanuma pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 4a76ab7  HDFS-15090. RBF: MountPoint Listing Should Return Flag Values 
Of Destination. Contributed by Ayush Saxena.
4a76ab7 is described below

commit 4a76ab777fdd2b72c438c73d45ffbe2f6bb8bb0d
Author: Takanobu Asanuma 
AuthorDate: Mon Jan 6 18:09:59 2020 +0900

HDFS-15090. RBF: MountPoint Listing Should Return Flag Values Of 
Destination. Contributed by Ayush Saxena.
---
 .../federation/router/RouterClientProtocol.java   |  6 ++
 .../federation/router/TestRouterMountTable.java   | 19 ++-
 2 files changed, 24 insertions(+), 1 deletion(-)

diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterClientProtocol.java
 
b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterClientProtocol.java
index b7c2b03..5798380 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterClientProtocol.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterClientProtocol.java
@@ -1921,6 +1921,8 @@ public class RouterClientProtocol implements 
ClientProtocol {
 FsPermission permission = FsPermission.getDirDefault();
 String owner = this.superUser;
 String group = this.superGroup;
+EnumSet flags =
+EnumSet.noneOf(HdfsFileStatus.Flags.class);
 if (subclusterResolver instanceof MountTableResolver) {
   try {
 String mName = name.startsWith("/") ? name : "/" + name;
@@ -1940,6 +1942,9 @@ public class RouterClientProtocol implements 
ClientProtocol {
 owner = fInfo.getOwner();
 group = fInfo.getGroup();
 childrenNum = fInfo.getChildrenNum();
+flags = DFSUtil
+.getFlags(fInfo.isEncrypted(), fInfo.isErasureCoded(),
+fInfo.isSnapshotEnabled(), fInfo.hasAcl());
   }
 }
   } catch (IOException e) {
@@ -1971,6 +1976,7 @@ public class RouterClientProtocol implements 
ClientProtocol {
 .path(DFSUtil.string2Bytes(name))
 .fileId(inodeId)
 .children(childrenNum)
+.flags(flags)
 .build();
   }
 
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterMountTable.java
 
b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterMountTable.java
index 572b33d..77ec47a 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterMountTable.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/router/TestRouterMountTable.java
@@ -37,6 +37,7 @@ import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.ClientProtocol;
 import org.apache.hadoop.hdfs.protocol.DirectoryListing;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@@ -663,4 +664,20 @@ public class TestRouterMountTable {
   nnFs0.delete(new Path("/testrename2"), true);
 }
   }
-}
\ No newline at end of file
+
+  @Test
+  public void testListStatusMountPoint() throws Exception {
+try {
+  MountTable addEntry = MountTable.newInstance("/mount/testLsMountEntry",
+  Collections.singletonMap("ns0", "/testLsMountEntryDest"));
+  assertTrue(addMountTable(addEntry));
+  nnFs0.mkdirs(new Path("/testLsMountEntryDest"));
+  DistributedFileSystem routerDfs = (DistributedFileSystem) routerFs;
+  Path mountPath = new Path("/mount/testLsMountEntry");
+  routerDfs.setErasureCodingPolicy(mountPath, "RS-6-3-1024k");
+  assertTrue(routerDfs.listStatus(new Path("/mount"))[0].isErasureCoded());
+} finally {
+  nnFs0.delete(new Path("/testLsMountEntryDest"), true);
+}
+  }
+}


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org