[26/50] [abbrv] hadoop git commit: YARN-6130. [ATSv2 Security] Generate a delegation token for AM when app collector is created and pass it to AM via NM and RM. Contributed by Varun Saxena.

2017-08-29 Thread varunsaxena
YARN-6130. [ATSv2 Security] Generate a delegation token for AM when app 
collector is created and pass it to AM via NM and RM. Contributed by Varun 
Saxena.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7594d1de
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7594d1de
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7594d1de

Branch: refs/heads/trunk
Commit: 7594d1de7bbc34cd2e64202095a5e1757154d7d0
Parents: 9f65405
Author: Rohith Sharma K S 
Authored: Mon Jul 31 17:26:34 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../v2/app/rm/RMContainerAllocator.java |   9 +-
 .../api/protocolrecords/AllocateResponse.java   |  32 ++--
 .../hadoop/yarn/api/records/CollectorInfo.java  |  55 +++
 .../src/main/proto/yarn_protos.proto|   5 +
 .../src/main/proto/yarn_service_protos.proto|   2 +-
 .../api/async/impl/AMRMClientAsyncImpl.java |   6 +-
 .../ApplicationMasterServiceProtoTestBase.java  |  72 +
 .../hadoop/yarn/client/ProtocolHATestBase.java  |  20 ++-
 ...ationMasterServiceProtocolForTimelineV2.java |  71 +
 ...estApplicationMasterServiceProtocolOnHA.java |  46 +-
 .../api/async/impl/TestAMRMClientAsync.java |   2 +-
 .../impl/pb/AllocateResponsePBImpl.java |  37 -
 .../records/impl/pb/CollectorInfoPBImpl.java| 148 +++
 .../hadoop/yarn/api/TestPBImplRecords.java  |   2 +
 .../ReportNewCollectorInfoRequest.java  |   5 +-
 .../impl/pb/NodeHeartbeatRequestPBImpl.java |  25 +++-
 .../impl/pb/NodeHeartbeatResponsePBImpl.java|  21 ++-
 .../pb/ReportNewCollectorInfoRequestPBImpl.java |   4 +-
 .../server/api/records/AppCollectorData.java|  27 +++-
 .../records/impl/pb/AppCollectorDataPBImpl.java |  29 +++-
 .../yarn_server_common_service_protos.proto |   2 +
 .../java/org/apache/hadoop/yarn/TestRPC.java|  30 +++-
 .../hadoop/yarn/TestYarnServerApiClasses.java   |   4 +-
 .../nodemanager/NodeStatusUpdaterImpl.java  |   1 -
 .../application/ApplicationImpl.java|   1 -
 .../ApplicationMasterService.java   |   2 -
 .../resourcemanager/DefaultAMSProcessor.java|   8 +-
 .../server/resourcemanager/rmapp/RMApp.java |  15 +-
 .../server/resourcemanager/rmapp/RMAppImpl.java |  10 +-
 .../applicationsmanager/MockAsm.java|   6 +
 .../server/resourcemanager/rmapp/MockRMApp.java |   6 +
 .../TestTimelineServiceClientIntegration.java   |   2 +-
 .../security/TestTimelineAuthFilterForV2.java   | 121 +++
 .../collector/AppLevelTimelineCollector.java|  24 +++
 .../AppLevelTimelineCollectorWithAgg.java   |   4 +-
 .../collector/NodeTimelineCollectorManager.java |  83 +--
 .../PerNodeTimelineCollectorsAuxService.java|   7 +-
 ...neV2DelegationTokenSecretManagerService.java |  31 
 .../TestNMTimelineCollectorManager.java |   4 +-
 39 files changed, 829 insertions(+), 150 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7594d1de/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
index 0952797..969ec4c 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
@@ -878,13 +878,16 @@ public class RMContainerAllocator extends 
RMContainerRequestor
 handleUpdatedNodes(response);
 handleJobPriorityChange(response);
 // handle receiving the timeline collector address for this app
-String collectorAddr = response.getCollectorAddr();
+String collectorAddr = null;
+if (response.getCollectorInfo() != null) {
+  collectorAddr = response.getCollectorInfo().getCollectorAddr();
+}
+
 MRAppMaster.RunningAppContext appContext =
 (MRAppMaster.RunningAppContext)this.getContext();
 if (collectorAddr != null && !collectorAddr.isEmpty()
 && appContext.getTimelineV2Client() != null) {
-  appContext.getTimelineV2Client().setTimelineServiceAddress(
-  response.getCollectorAddr());
+  
appContext.getTimelineV2Client().setTimelineServiceAddress(col

[20/50] [abbrv] hadoop git commit: YARN-5928. Move ATSv2 HBase backend code into a new module that is only dependent at runtime by yarn servers. Contributed by Haibo Chen.

2017-08-29 Thread varunsaxena
YARN-5928. Move ATSv2 HBase backend code into a new module that is only 
dependent at runtime by yarn servers. Contributed by Haibo Chen.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ccb38c19
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ccb38c19
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ccb38c19

Branch: refs/heads/trunk
Commit: ccb38c19919d197ac751852717d4cac76518c211
Parents: 580d884
Author: Sangjin Lee 
Authored: Thu Jan 19 21:21:48 2017 -0800
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../server/resourcemanager/TestRMHATimelineCollectors.java | 6 ++
 .../yarn/server/timelineservice/storage/package-info.java  | 6 +++---
 .../hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md| 2 +-
 3 files changed, 10 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccb38c19/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMHATimelineCollectors.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMHATimelineCollectors.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMHATimelineCollectors.java
index a54ff34..fa0d318 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMHATimelineCollectors.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMHATimelineCollectors.java
@@ -24,6 +24,8 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.api.records.AppCollectorData;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineWriterImpl;
+import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineWriter;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -47,8 +49,12 @@ public class TestRMHATimelineCollectors extends RMHATestBase 
{
 super.setup();
 confForRM1.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
 confForRM2.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
+confForRM1.setClass(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS,
+FileSystemTimelineWriterImpl.class, TimelineWriter.class);
 confForRM1.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
 confForRM2.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
+confForRM2.setClass(YarnConfiguration.TIMELINE_SERVICE_WRITER_CLASS,
+FileSystemTimelineWriterImpl.class, TimelineWriter.class);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ccb38c19/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
index d0bc366..e78db2a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/package-info.java
@@ -17,12 +17,12 @@
  */
 
 /**
- * Package org.apache.hadoop.server.timelineservice contains classes to be used
- * across timeline reader and collector.
+ * Package org.apache.hadoop.yarn.server.timelineservice.storage contains
+ * classes which define and implement reading and writing to backend storage.
  */
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 package org.apache.hadoop.yarn.server.timelineservice.storage;
 
 import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
\ No newline at end of file
+impor

[15/50] [abbrv] hadoop git commit: YARN-6256. Add FROM_ID info key for timeline entities in reader response (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6256. Add FROM_ID info key for timeline entities in reader response 
(Rohith Sharma K S via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/c3bd8d6a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/c3bd8d6a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/c3bd8d6a

Branch: refs/heads/trunk
Commit: c3bd8d6ad3e30c08865cc1a5f374d1d2a485f844
Parents: 8bb2646
Author: Varun Saxena 
Authored: Tue Mar 7 23:54:38 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 ...stTimelineReaderWebServicesHBaseStorage.java |  33 +--
 .../storage/TestHBaseTimelineStorageApps.java   |  19 +-
 .../TestHBaseTimelineStorageEntities.java   |  21 +-
 .../storage/application/ApplicationRowKey.java  |  49 +++-
 .../storage/entity/EntityRowKey.java|  56 -
 .../storage/flow/FlowRunRowKey.java |  47 +++-
 .../storage/reader/ApplicationEntityReader.java |  28 ++-
 .../storage/reader/FlowRunEntityReader.java |  32 +--
 .../storage/reader/GenericEntityReader.java |  25 +-
 .../storage/common/TestRowKeys.java |  21 --
 .../storage/common/TestRowKeysAsString.java | 115 ++
 .../reader/TimelineEntityFilters.java   |  29 +--
 .../reader/TimelineReaderWebServices.java   | 227 +++
 .../reader/TimelineReaderWebServicesUtils.java  |   4 +-
 14 files changed, 445 insertions(+), 261 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/c3bd8d6a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index d0f674f..b2fe267 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -825,7 +825,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
   assertEquals(2, entities1.size());
   for (TimelineEntity entity : entities1) {
 assertNotNull(entity.getInfo());
-assertEquals(1, entity.getInfo().size());
+assertEquals(2, entity.getInfo().size());
 String uid =
 (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
 assertNotNull(uid);
@@ -853,7 +853,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
   assertEquals(2, entities2.size());
   for (TimelineEntity entity : entities2) {
 assertNotNull(entity.getInfo());
-assertEquals(1, entity.getInfo().size());
+assertEquals(2, entity.getInfo().size());
 String uid =
 (String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
 assertNotNull(uid);
@@ -1417,8 +1417,9 @@ public class TestTimelineReaderWebServicesHBaseStorage
 infoCnt += entity.getInfo().size();
 assertEquals("entity2", entity.getId());
   }
-  // Includes UID in info field even if fields not specified as INFO.
-  assertEquals(1, infoCnt);
+  // Includes UID and FROM_ID in info field even if fields not specified as
+  // INFO.
+  assertEquals(2, infoCnt);
 
   // infofilters=(info1 eq cluster1 AND info4 eq 35000) OR
   // (info1 eq cluster2 AND info2 eq 2.0)
@@ -1436,8 +1437,8 @@ public class TestTimelineReaderWebServicesHBaseStorage
 infoCnt += entity.getInfo().size();
 assertEquals("entity2", entity.getId());
   }
-  // Includes UID in info field.
-  assertEquals(4, infoCnt);
+  // Includes UID and FROM_ID in info field.
+  assertEquals(5, infoCnt);
 
   // Test for behavior when compare op is ne(not equals) vs ene
   // (exists and not equals). info3 does not exist for entity2. For ne,
@@ -2171,8 +2172,8 @@ public class TestTimelineReaderWebServicesHBaseStorage
   // verify for entity-10 to entity-7 in descending order.
   TimelineEntity entity = verifyPaginatedEntites(entities, limit, 10)

[49/50] [abbrv] hadoop git commit: YARN-6323. Rolling upgrade/config change is broken on timeline v2. (Vrushali C via Haibo Chen)

2017-08-29 Thread varunsaxena
YARN-6323. Rolling upgrade/config change is broken on timeline v2. (Vrushali C 
via Haibo Chen)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9b08f365
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9b08f365
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9b08f365

Branch: refs/heads/trunk
Commit: 9b08f365d749185d7ed8e34dc379b2e415a29e99
Parents: b2efebd
Author: Haibo Chen 
Authored: Mon Aug 21 10:45:10 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 .../org/apache/hadoop/yarn/conf/YarnConfiguration.java|  2 ++
 .../containermanager/ContainerManagerImpl.java| 10 ++
 2 files changed, 12 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9b08f365/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 8a513ac..be45ddf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -2221,6 +2221,8 @@ public class YarnConfiguration extends Configuration {
 
   public static final int DEFAULT_NUMBER_OF_ASYNC_ENTITIES_TO_MERGE = 10;
 
+  /** default version for any flow. */
+  public static final String DEFAULT_FLOW_VERSION = "1";
 
   /**
* The time period for which timeline v2 client will wait for draining

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9b08f365/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
index c7880d5..e497f62 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
@@ -402,6 +402,16 @@ public class ContainerManagerImpl extends CompositeService 
implements
 LOG.debug(
 "Recovering Flow context: " + fc + " for an application " + appId);
   }
+} else {
+  // in upgrade situations, where there is no prior existing flow context,
+  // default would be used.
+  fc = new FlowContext(TimelineUtils.generateDefaultFlowName(null, appId),
+  YarnConfiguration.DEFAULT_FLOW_VERSION, appId.getClusterTimestamp());
+  if (LOG.isDebugEnabled()) {
+LOG.debug(
+"No prior existing flow context found. Using default Flow context: 
"
++ fc + " for an application " + appId);
+  }
 }
 
 LOG.info("Recovering application " + appId);


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[36/50] [abbrv] hadoop git commit: YARN-6801. NPE in RM while setting collectors map in NodeHeartbeatResponse. Contributed by Vrushali C.

2017-08-29 Thread varunsaxena
YARN-6801. NPE in RM while setting collectors map in NodeHeartbeatResponse. 
Contributed by Vrushali C.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/66041316
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/66041316
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/66041316

Branch: refs/heads/trunk
Commit: 660413165aa25815bbba66ac2195b0ae17184844
Parents: ac7f52d
Author: Rohith Sharma K S 
Authored: Tue Jul 11 17:59:47 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../resourcemanager/ResourceTrackerService.java   | 18 +++---
 1 file changed, 11 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/66041316/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
index 112c2dd..cc47e02 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceTrackerService.java
@@ -620,13 +620,17 @@ public class ResourceTrackerService extends 
AbstractService implements
 Map rmApps = rmContext.getRMApps();
 // Set collectors for all running apps on this node.
 for (ApplicationId appId : runningApps) {
-  AppCollectorData appCollectorData = rmApps.get(appId).getCollectorData();
-  if (appCollectorData != null) {
-liveAppCollectorsMap.put(appId, appCollectorData);
-  } else {
-if (LOG.isDebugEnabled()) {
-  LOG.debug("Collector for applicaton: " + appId +
-  " hasn't registered yet!");
+  RMApp app = rmApps.get(appId);
+  if (app != null) {
+AppCollectorData appCollectorData = rmApps.get(appId)
+.getCollectorData();
+if (appCollectorData != null) {
+  liveAppCollectorsMap.put(appId, appCollectorData);
+} else {
+  if (LOG.isDebugEnabled()) {
+LOG.debug("Collector for applicaton: " + appId +
+" hasn't registered yet!");
+  }
 }
   }
 }


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[03/50] [abbrv] hadoop git commit: MAPREDUCE-6818. Remove direct reference to TimelineClientImpl. Contributed by Li Lu.

2017-08-29 Thread varunsaxena
MAPREDUCE-6818. Remove direct reference to TimelineClientImpl. Contributed by 
Li Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/84992997
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/84992997
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/84992997

Branch: refs/heads/trunk
Commit: 84992997406ef88ab17864594d445db40472fd7e
Parents: ecbc8d4
Author: Sangjin Lee 
Authored: Thu Dec 8 18:14:09 2016 -0800
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:51 2017 +0530

--
 .../org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java | 3 +--
 .../java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java| 3 +--
 2 files changed, 2 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/84992997/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java
index 447ea4e..d553596 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/JobHistoryFileReplayMapperV1.java
@@ -37,7 +37,6 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.client.api.TimelineClient;
-import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 
 
@@ -54,7 +53,7 @@ class JobHistoryFileReplayMapperV1 extends
 
   public void map(IntWritable key, IntWritable val, Context context) throws 
IOException {
 // collect the apps it needs to process
-TimelineClient tlc = new TimelineClientImpl();
+TimelineClient tlc = TimelineClient.createTimelineClient();
 TimelineEntityConverterV1 converter = new TimelineEntityConverterV1();
 JobHistoryFileReplayHelper helper = new 
JobHistoryFileReplayHelper(context);
 int replayMode = helper.getReplayMode();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/84992997/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
index 16d14a1..6d6151f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/SimpleEntityWriterV1.java
@@ -32,7 +32,6 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
 import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
 import org.apache.hadoop.yarn.client.api.TimelineClient;
-import org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl;
 
 /**
* Adds simple entities with random string payload, events, metrics, and
@@ -46,7 +45,7 @@ class SimpleEntityWriterV1
 
   public void map(IntWritable key, IntWritable val, Context context)
   throws IOException {
-TimelineClient tlc = new TimelineClientImpl();
+TimelineClient tlc = TimelineClient.createTimelineClient();
 Configuration conf = context.getConfiguration();
 
 final int kbs = conf.getInt(KBS_SENT, KBS_SENT_DEFAULT);


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[16/50] [abbrv] hadoop git commit: YARN-6027. Support fromid(offset) filter for /flows API (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
http://git-wip-us.apache.org/repos/asf/hadoop/blob/8bb26465/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
index d7eff32..6cdf937 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
@@ -185,7 +185,7 @@ public class TimelineReaderServer extends CompositeService {
   }
 
   @VisibleForTesting
-  int getWebServerPort() {
+  public int getWebServerPort() {
 return readerWebServer.getConnectorAddress(0).getPort();
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8bb26465/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderUtils.java
index c93c631..8f92433 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderUtils.java
@@ -24,14 +24,30 @@ import java.util.List;
 
 import org.apache.commons.lang.StringUtils;
 
+import com.google.common.annotations.VisibleForTesting;
+
 /**
  * Set of utility methods to be used across timeline reader.
  */
-final class TimelineReaderUtils {
+public final class TimelineReaderUtils {
   private TimelineReaderUtils() {
   }
 
   /**
+   * Default delimiter for joining strings.
+   */
+  @VisibleForTesting
+  public static final char DEFAULT_DELIMITER_CHAR = '!';
+
+  /**
+   * Default escape character used for joining strings.
+   */
+  @VisibleForTesting
+  public static final char DEFAULT_ESCAPE_CHAR = '*';
+
+  public static final String FROMID_KEY = "FROM_ID";
+
+  /**
* Split the passed string along the passed delimiter character while looking
* for escape char to interpret the splitted parts correctly. For delimiter 
or
* escape character to be interpreted as part of the string, they have to be
@@ -168,4 +184,14 @@ final class TimelineReaderUtils {
 // Join the strings after they have been escaped.
 return StringUtils.join(strs, delimiterChar);
   }
+
+  public static List split(final String str)
+  throws IllegalArgumentException {
+return split(str, DEFAULT_DELIMITER_CHAR, DEFAULT_ESCAPE_CHAR);
+  }
+
+  public static String joinAndEscapeStrings(final String[] strs) {
+return joinAndEscapeStrings(strs, DEFAULT_DELIMITER_CHAR,
+DEFAULT_ESCAPE_CHAR);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8bb26465/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
index 7133528..290c255 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
@@ -1334,6 +1334,10 @@ public 

[25/50] [abbrv] hadoop git commit: YARN-6130. [ATSv2 Security] Generate a delegation token for AM when app collector is created and pass it to AM via NM and RM. Contributed by Varun Saxena.

2017-08-29 Thread varunsaxena
http://git-wip-us.apache.org/repos/asf/hadoop/blob/7594d1de/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java
index 07058f6..eb4381d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/TestTimelineServiceClientIntegration.java
@@ -80,7 +80,7 @@ public class TestTimelineServiceClientIntegration {
   auxService =
   PerNodeTimelineCollectorsAuxService.launchServer(new String[0],
   collectorManager, conf);
-  auxService.addApplication(ApplicationId.newInstance(0, 1));
+  auxService.addApplication(ApplicationId.newInstance(0, 1), "user");
 } catch (ExitUtil.ExitException e) {
   fail();
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7594d1de/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
index 608ef67..0ddf287 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
@@ -23,7 +23,10 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
 import java.io.BufferedReader;
@@ -40,6 +43,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.http.HttpConfig;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.minikdc.MiniKdc;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
@@ -51,10 +55,12 @@ import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.client.api.TimelineV2Client;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
+import 
org.apache.hadoop.yarn.security.client.TimelineDelegationTokenIdentifier;
 import org.apache.hadoop.yarn.server.api.CollectorNodemanagerProtocol;
 import 
org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextRequest;
 import 
org.apache.hadoop.yarn.server.api.protocolrecords.GetTimelineCollectorContextResponse;
 import 
org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer;
+import 
org.apache.hadoop.yarn.server.timelineservice.collector.AppLevelTimelineCollector;
 import 
org.apache.hadoop.yarn.server.timelineservice.collector.NodeTimelineCollectorManager;
 import 
org.apache.hadoop.yarn.server.timelineservice.collector.PerNodeTimelineCollectorsAuxService;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineReaderImpl;
@@ -76,7 +82,6 @@ public class TestTimelineAuthFilterForV2 {
 
   private static final String FOO_USER = "foo";
   private static final String HTTP_USER = "HTTP";
-
   private static final File TEST_ROOT_DIR = new File(
   System.getProperty("test.build.dir", "target" + File.separator +
   "test-dir"), UUID.randomUUID().toString());
@@ -88,21 +93,35 @@ public class TestTimelineAuthFilterForV2 {
   private static String httpSpnegoPrincipal = KerberosTestUtils.
   getServerPrincipal();
 
+  //

[32/50] [abbrv] hadoop git commit: YARN-7006. [ATSv2 Security] Changes for authentication for CollectorNodemanagerProtocol. Contributed by Varun Saxena

2017-08-29 Thread varunsaxena
YARN-7006. [ATSv2 Security] Changes for authentication for 
CollectorNodemanagerProtocol. Contributed by Varun Saxena


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b6645695
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b6645695
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b6645695

Branch: refs/heads/trunk
Commit: b664569586db39647f15340ce82ccc0f0869897e
Parents: d5ff965
Author: Jian He 
Authored: Wed Aug 16 11:01:06 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../collectormanager/NMCollectorService.java|  7 +-
 .../containermanager/AuxServices.java   |  3 +-
 .../timelineservice/NMTimelinePublisher.java| 29 ++--
 .../CollectorNodemanagerSecurityInfo.java   | 69 
 .../org.apache.hadoop.security.SecurityInfo | 14 
 5 files changed, 112 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6645695/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
index a5ffc74..7db6d70 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/collectormanager/NMCollectorService.java
@@ -73,13 +73,13 @@ public class NMCollectorService extends CompositeService 
implements
 
 Configuration serverConf = new Configuration(conf);
 
-// TODO Security settings.
 YarnRPC rpc = YarnRPC.create(conf);
 
+// Kerberos based authentication to be used for CollectorNodemanager
+// protocol if security is enabled.
 server =
 rpc.getServer(CollectorNodemanagerProtocol.class, this,
-collectorServerAddress, serverConf,
-this.context.getNMTokenSecretManager(),
+collectorServerAddress, serverConf, null,
 conf.getInt(YarnConfiguration.NM_COLLECTOR_SERVICE_THREAD_COUNT,
 YarnConfiguration.DEFAULT_NM_COLLECTOR_SERVICE_THREAD_COUNT));
 
@@ -94,7 +94,6 @@ public class NMCollectorService extends CompositeService 
implements
 LOG.info("NMCollectorService started at " + collectorServerAddress);
   }
 
-
   @Override
   public void serviceStop() throws Exception {
 if (server != null) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6645695/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java
index 2efc932..5e0f293 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/AuxServices.java
@@ -244,7 +244,8 @@ public class AuxServices extends AbstractService
 for (AuxiliaryService serv : serviceMap.values()) {
   try {
 serv.initializeContainer(new ContainerInitializationContext(
-event.getUser(), event.getContainer().getContainerId(),
+event.getContainer().getUser(),
+event.getContainer().getContainerId(),
 event.getContainer().getResource(), event.getContainer()
 .getContainerTokenIdentifier().getContainerType()));
   } catch (Throwable th) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b6645695/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/y

[05/50] [abbrv] hadoop git commit: YARN-5585. [Atsv2] Reader side changes for entity prefix and support for pagination via additional filters (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-5585. [Atsv2] Reader side changes for entity prefix and support for 
pagination via additional filters (Rohith Sharma K S via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/02a9710a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/02a9710a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/02a9710a

Branch: refs/heads/trunk
Commit: 02a9710a099fc9572122d87dd3e90c78522f5836
Parents: 2556c01
Author: Varun Saxena 
Authored: Sat Jan 7 01:38:36 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:51 2017 +0530

--
 .../records/timelineservice/TimelineEntity.java |  16 +-
 ...stTimelineReaderWebServicesHBaseStorage.java | 102 +++-
 .../reader/filter/TimelineFilterUtils.java  |  17 ++
 .../storage/entity/EntityRowKey.java|  26 ++-
 .../storage/entity/EntityRowKeyPrefix.java  |  13 +-
 .../storage/reader/ApplicationEntityReader.java |   2 +-
 .../reader/FlowActivityEntityReader.java|   2 +-
 .../storage/reader/FlowRunEntityReader.java |   2 +-
 .../storage/reader/GenericEntityReader.java |  99 ++--
 .../storage/reader/TimelineEntityReader.java|  29 +--
 .../reader/TimelineEntityReaderFactory.java |   2 +-
 .../storage/common/TestRowKeys.java |   8 +-
 .../reader/TimelineEntityFilters.java   |  53 -
 .../reader/TimelineReaderContext.java   |  20 +-
 .../reader/TimelineReaderManager.java   |   1 +
 .../reader/TimelineReaderWebServices.java   | 230 +++
 .../reader/TimelineReaderWebServicesUtils.java  |  13 +-
 .../reader/TimelineUIDConverter.java|  19 +-
 .../timelineservice/storage/TimelineReader.java |  10 +-
 .../reader/TestTimelineUIDConverter.java|   8 +-
 20 files changed, 512 insertions(+), 160 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/02a9710a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
index e6fcbd2..0af5ea4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/timelineservice/TimelineEntity.java
@@ -550,20 +550,10 @@ public class TimelineEntity implements 
Comparable {
   public int compareTo(TimelineEntity other) {
 int comparison = getType().compareTo(other.getType());
 if (comparison == 0) {
-  if (getCreatedTime() == null) {
-if (other.getCreatedTime() == null) {
-  return getId().compareTo(other.getId());
-} else {
-  return 1;
-}
-  }
-  if (other.getCreatedTime() == null) {
-return -1;
-  }
-  if (getCreatedTime() > other.getCreatedTime()) {
-// Order by created time desc
+  if (getIdPrefix() > other.getIdPrefix()) {
+// Descending order by entity id prefix
 return -1;
-  } else if (getCreatedTime() < other.getCreatedTime()) {
+  } else if (getIdPrefix() < other.getIdPrefix()) {
 return 1;
   } else {
 return getId().compareTo(other.getId());

http://git-wip-us.apache.org/repos/asf/hadoop/blob/02a9710a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index 3f8978c..7d9d46a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimeli

[33/50] [abbrv] hadoop git commit: YARN-4455. Support fetching metrics by time range. Contributed by Varun Saxena.

2017-08-29 Thread varunsaxena
http://git-wip-us.apache.org/repos/asf/hadoop/blob/70078e91/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
index 360ac20..d67de71 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServices.java
@@ -265,6 +265,11 @@ public class TimelineReaderWebServices {
* or has a value less than 1, and metrics have to be retrieved, then
* metricsLimit will be considered as 1 i.e. latest single value of
* metric(s) will be returned. (Optional query param).
+   * @param metricsTimeStart If specified, returned metrics for the entities
+   * would not contain metric values before this timestamp(Optional query
+   * param).
+   * @param metricsTimeEnd If specified, returned metrics for the entities 
would
+   * not contain metric values after this timestamp(Optional query param).
* @param fromId If specified, retrieve the next set of entities from the
* given fromId. The set of entities retrieved is inclusive of specified
* fromId. fromId should be taken from the value associated with FROM_ID
@@ -300,6 +305,8 @@ public class TimelineReaderWebServices {
   @QueryParam("metricstoretrieve") String metricsToRetrieve,
   @QueryParam("fields") String fields,
   @QueryParam("metricslimit") String metricsLimit,
+  @QueryParam("metricstimestart") String metricsTimeStart,
+  @QueryParam("metricstimeend") String metricsTimeEnd,
   @QueryParam("fromid") String fromId) {
 String url = req.getRequestURI() +
 (req.getQueryString() == null ? "" :
@@ -326,7 +333,8 @@ public class TimelineReaderWebServices {
   infofilters, conffilters, metricfilters, eventfilters,
   fromId),
   TimelineReaderWebServicesUtils.createTimelineDataToRetrieve(
-  confsToRetrieve, metricsToRetrieve, fields, metricsLimit));
+  confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
+  metricsTimeStart, metricsTimeEnd));
 } catch (Exception e) {
   handleException(e, url, startTime,
   "createdTime start/end or limit or flowrunid");
@@ -407,6 +415,11 @@ public class TimelineReaderWebServices {
* or has a value less than 1, and metrics have to be retrieved, then
* metricsLimit will be considered as 1 i.e. latest single value of
* metric(s) will be returned. (Optional query param).
+   * @param metricsTimeStart If specified, returned metrics for the entities
+   * would not contain metric values before this timestamp(Optional query
+   * param).
+   * @param metricsTimeEnd If specified, returned metrics for the entities 
would
+   * not contain metric values after this timestamp(Optional query param).
* @param fromId If specified, retrieve the next set of entities from the
* given fromId. The set of entities retrieved is inclusive of specified
* fromId. fromId should be taken from the value associated with FROM_ID
@@ -447,12 +460,14 @@ public class TimelineReaderWebServices {
   @QueryParam("metricstoretrieve") String metricsToRetrieve,
   @QueryParam("fields") String fields,
   @QueryParam("metricslimit") String metricsLimit,
+  @QueryParam("metricstimestart") String metricsTimeStart,
+  @QueryParam("metricstimeend") String metricsTimeEnd,
   @QueryParam("fromid") String fromId) {
 return getEntities(req, res, null, appId, entityType, userId, flowName,
 flowRunId, limit, createdTimeStart, createdTimeEnd, relatesTo,
 isRelatedTo, infofilters, conffilters, metricfilters, eventfilters,
 confsToRetrieve, metricsToRetrieve, fields, metricsLimit,
-fromId);
+metricsTimeStart, metricsTimeEnd, fromId);
   }
 
   /**
@@ -523,6 +538,11 @@ public class TimelineReaderWebServices {
* or has a value less than 1, and metrics have to be retrieved, then
* metricsLimit will be considered as 1 i.e. latest single value of
* metric(s) will be returned. (Optional query param).
+   * @param metricsTimeStart If specified, returned metrics for the entities
+   * would not contain

[31/50] [abbrv] hadoop git commit: YARN-6638. [ATSv2 Security] Timeline reader side changes for loading auth filters and principals. Contributed by Varun Saxena

2017-08-29 Thread varunsaxena
YARN-6638. [ATSv2 Security] Timeline reader side changes for loading auth 
filters and principals. Contributed by Varun Saxena


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d3f11e3f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d3f11e3f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d3f11e3f

Branch: refs/heads/trunk
Commit: d3f11e3f13ed5efc7f0b7f19567d142e554c35ed
Parents: 879de51
Author: Jian He 
Authored: Fri Jun 9 13:42:38 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 ...TimelineAuthenticationFilterInitializer.java | 69 ++-
 .../AbstractTimelineReaderHBaseTestBase.java| 11 +--
 .../reader/TimelineReaderServer.java| 70 +++-
 ...neReaderAuthenticationFilterInitializer.java | 53 +++
 .../reader/security/package-info.java   | 25 +++
 5 files changed, 160 insertions(+), 68 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d3f11e3f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilterInitializer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilterInitializer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilterInitializer.java
index 4e7c29a..06f9868e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilterInitializer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/timeline/security/TimelineAuthenticationFilterInitializer.java
@@ -51,30 +51,18 @@ import java.util.Map;
 public class TimelineAuthenticationFilterInitializer extends FilterInitializer 
{
 
   /**
-   * The configuration prefix of timeline HTTP authentication
+   * The configuration prefix of timeline HTTP authentication.
*/
   public static final String PREFIX = 
"yarn.timeline-service.http-authentication.";
 
   @VisibleForTesting
   Map filterConfig;
 
-  /**
-   * Initializes {@link TimelineAuthenticationFilter}
-   * 
-   * Propagates to {@link TimelineAuthenticationFilter} configuration all YARN
-   * configuration properties prefixed with {@value #PREFIX}
-   *
-   * @param container
-   *  The filter container
-   * @param conf
-   *  Configuration for run-time parameters
-   */
-  @Override
-  public void initFilter(FilterContainer container, Configuration conf) {
+  protected void setAuthFilterConfig(Configuration conf) {
 filterConfig = new HashMap();
 
 // setting the cookie path to root '/' so it is used for all resources.
-filterConfig.put(TimelineAuthenticationFilter.COOKIE_PATH, "/");
+filterConfig.put(AuthenticationFilter.COOKIE_PATH, "/");
 
 for (Map.Entry entry : conf) {
   String name = entry.getKey();
@@ -95,6 +83,41 @@ public class TimelineAuthenticationFilterInitializer extends 
FilterInitializer {
   }
 }
 
+// Resolve _HOST into bind address
+String bindAddress = conf.get(HttpServer2.BIND_ADDRESS);
+String principal =
+filterConfig.get(KerberosAuthenticationHandler.PRINCIPAL);
+if (principal != null) {
+  try {
+principal = SecurityUtil.getServerPrincipal(principal, bindAddress);
+  } catch (IOException ex) {
+throw new RuntimeException("Could not resolve Kerberos principal " +
+"name: " + ex.toString(), ex);
+  }
+  filterConfig.put(KerberosAuthenticationHandler.PRINCIPAL,
+  principal);
+}
+  }
+
+  protected Map getFilterConfig() {
+return filterConfig;
+  }
+
+  /**
+   * Initializes {@link TimelineAuthenticationFilter}
+   * 
+   * Propagates to {@link TimelineAuthenticationFilter} configuration all YARN
+   * configuration properties prefixed with {@value #PREFIX}
+   *
+   * @param container
+   *  The filter container
+   * @param conf
+   *  Configuration for run-time parameters
+   */
+  @Override
+  public void initFilter(FilterContainer container, Configuration conf) {
+setAuthFilterConfig(conf);
+
 String authType = filterConfig.get(AuthenticationFilter.AUTH_TYPE);
 if (authType.equals(PseudoAuthenticationHandler.TYPE)) {
   filterConfig.put(AuthenticationFilter.AUTH_TYPE,
@@ -102,23 +125,7 @@ public class TimelineAuthenticationFilterInitializer 
exte

[10/50] [abbrv] hadoop git commit: YARN-6146. Add Builder methods for TimelineEntityFilters (Haibo Chen via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6146. Add Builder methods for TimelineEntityFilters (Haibo Chen via Varun 
Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b87b72b4
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b87b72b4
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b87b72b4

Branch: refs/heads/trunk
Commit: b87b72b40a3cd4e124d6c941276481747133895f
Parents: 44999aa
Author: Varun Saxena 
Authored: Thu Mar 23 14:35:37 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../storage/TestHBaseTimelineStorageApps.java   | 190 -
 .../TestHBaseTimelineStorageEntities.java   | 202 +--
 .../flow/TestHBaseStorageFlowActivity.java  |   9 +-
 .../storage/flow/TestHBaseStorageFlowRun.java   |  48 +++--
 .../storage/reader/TimelineEntityReader.java|   2 +-
 .../reader/TimelineEntityFilters.java   | 160 ---
 .../reader/TimelineReaderWebServices.java   |   6 +-
 .../reader/TimelineReaderWebServicesUtils.java  |  44 +++-
 .../TestFileSystemTimelineReaderImpl.java   | 100 +
 9 files changed, 381 insertions(+), 380 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b87b72b4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
index 4b1147d..7eb9ad1 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
@@ -660,7 +660,7 @@ public class TestHBaseTimelineStorageApps {
 new TimelineReaderContext("cluster1", "user1", "some_flow_name",
 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(),
 null),
-new TimelineEntityFilters(),
+new TimelineEntityFilters.Builder().build(),
 new TimelineDataToRetrieve(null, null, EnumSet.of(Field.ALL), null));
 assertEquals(3, entities.size());
 int cfgCnt = 0;
@@ -697,8 +697,8 @@ public class TestHBaseTimelineStorageApps {
 new TimelineReaderContext("cluster1", "user1", "some_flow_name",
 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(),
 null),
-new TimelineEntityFilters(null, 1425016502000L, 1425016502040L, null,
-null, null, null, null, null),
+new TimelineEntityFilters.Builder().createdTimeBegin(1425016502000L)
+.createTimeEnd(1425016502040L).build(),
 new TimelineDataToRetrieve());
 assertEquals(3, entities.size());
 for (TimelineEntity entity : entities) {
@@ -714,8 +714,8 @@ public class TestHBaseTimelineStorageApps {
 new TimelineReaderContext("cluster1", "user1", "some_flow_name",
 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(),
 null),
-new TimelineEntityFilters(null, 1425016502015L, null, null, null, null,
-null, null, null),
+new TimelineEntityFilters.Builder().createdTimeBegin(1425016502015L)
+.build(),
 new TimelineDataToRetrieve());
 assertEquals(2, entities.size());
 for (TimelineEntity entity : entities) {
@@ -729,8 +729,8 @@ public class TestHBaseTimelineStorageApps {
 new TimelineReaderContext("cluster1", "user1", "some_flow_name",
 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(),
 null),
-new TimelineEntityFilters(null, null, 1425016502015L, null, null, null,
-null, null, null),
+new TimelineEntityFilters.Builder().createTimeEnd(1425016502015L)
+.build(),
 new TimelineDataToRetrieve());
 assertEquals(1, entities.size());
 for (TimelineEntity entity : entities) {
@@ -757,7 +757,7 @@ public class TestHBaseTimelineStorageApps {
 new TimelineReaderContext("cluster1", "user1", "some_flow_name",
 1002345678919L, null, TimelineEntityType.YARN_APPLICATION.toString(),
 null),
-new Tim

[24/50] [abbrv] hadoop git commit: YARN-6888. Refactor AppLevelTimelineCollector such that RM does not have aggregator threads created. Contributed by Vrushali C.

2017-08-29 Thread varunsaxena
YARN-6888. Refactor AppLevelTimelineCollector such that RM does not have 
aggregator threads created. Contributed by Vrushali C.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3fb71b13
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3fb71b13
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3fb71b13

Branch: refs/heads/trunk
Commit: 3fb71b1393018e1001da55b794dda7d26491cf35
Parents: a990ff7
Author: Rohith Sharma K S 
Authored: Fri Jul 28 11:47:16 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../collector/AppLevelTimelineCollector.java|  87 +--
 .../AppLevelTimelineCollectorWithAgg.java   | 150 +++
 .../PerNodeTimelineCollectorsAuxService.java|   2 +-
 .../TestNMTimelineCollectorManager.java |   4 +-
 4 files changed, 155 insertions(+), 88 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fb71b13/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
index 56f7b2b..10d68bb 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/collector/AppLevelTimelineCollector.java
@@ -18,27 +18,17 @@
 
 package org.apache.hadoop.yarn.server.timelineservice.collector;
 
-import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntities;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
-import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 
 import com.google.common.base.Preconditions;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-
 /**
  * Service that handles writes to the timeline service and writes them to the
  * backing storage for a given YARN application.
@@ -51,15 +41,8 @@ public class AppLevelTimelineCollector extends 
TimelineCollector {
   private static final Logger LOG =
   LoggerFactory.getLogger(TimelineCollector.class);
 
-  private final static int AGGREGATION_EXECUTOR_NUM_THREADS = 1;
-  private final static int AGGREGATION_EXECUTOR_EXEC_INTERVAL_SECS = 15;
-  private static Set entityTypesSkipAggregation
-  = initializeSkipSet();
-
   private final ApplicationId appId;
   private final TimelineCollectorContext context;
-  private ScheduledThreadPoolExecutor appAggregationExecutor;
-  private AppLevelAggregator appAggregator;
   private UserGroupInformation currentUser;
 
   public AppLevelTimelineCollector(ApplicationId appId) {
@@ -69,12 +52,8 @@ public class AppLevelTimelineCollector extends 
TimelineCollector {
 context = new TimelineCollectorContext();
   }
 
-  private static Set initializeSkipSet() {
-Set result = new HashSet<>();
-result.add(TimelineEntityType.YARN_APPLICATION.toString());
-result.add(TimelineEntityType.YARN_FLOW_RUN.toString());
-result.add(TimelineEntityType.YARN_FLOW_ACTIVITY.toString());
-return result;
+  public UserGroupInformation getCurrentUser() {
+return currentUser;
   }
 
   @Override
@@ -92,29 +71,11 @@ public class AppLevelTimelineCollector extends 
TimelineCollector {
 
   @Override
   protected void serviceStart() throws Exception {
-// Launch the aggregation thread
-appAggregationExecutor = new ScheduledThreadPoolExecutor(
-AppLevelTimelineCollector.AGGREGATION_EXECUTOR_NUM_THREADS,
-new ThreadFactoryBuilder()
-.setNameFormat("TimelineCollect

[41/50] [abbrv] hadoop git commit: YARN-6134. [ATSv2 Security] Regenerate delegation token for app just before token expires if app collector is active. Contributed by Varun Saxena

2017-08-29 Thread varunsaxena
YARN-6134. [ATSv2 Security] Regenerate delegation token for app just before 
token expires if app collector is active. Contributed by Varun Saxena


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/7fd6ae24
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/7fd6ae24
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/7fd6ae24

Branch: refs/heads/trunk
Commit: 7fd6ae24798cd3fdd77dbb00089a922407026e02
Parents: e276c75
Author: Jian He 
Authored: Fri Aug 18 23:20:44 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 .../security/TestTimelineAuthFilterForV2.java   | 104 ++--
 .../collector/AppLevelTimelineCollector.java|  35 --
 .../collector/NodeTimelineCollectorManager.java | 118 ++-
 ...neV2DelegationTokenSecretManagerService.java |  11 ++
 4 files changed, 222 insertions(+), 46 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/7fd6ae24/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
index f1d5185..bc1594c 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.yarn.server.timelineservice.security;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
@@ -27,6 +28,7 @@ import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.atLeastOnce;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.spy;
+import static org.mockito.Mockito.times;
 import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.when;
 
@@ -51,6 +53,8 @@ import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.security.authentication.KerberosTestUtils;
 import 
org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
 import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.hadoop.security.token.Token;
+import 
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.client.api.TimelineV2Client;
@@ -190,6 +194,10 @@ public class TestTimelineAuthFilterForV2 {
   // renewed automatically if app is still alive.
   conf.setLong(
   YarnConfiguration.TIMELINE_DELEGATION_TOKEN_RENEW_INTERVAL, 100);
+  // Set token max lifetime to 4 seconds to test if timeline delegation
+  // token for the app is regenerated automatically if app is still alive.
+  conf.setLong(
+  YarnConfiguration.TIMELINE_DELEGATION_TOKEN_MAX_LIFETIME, 4000);
 }
 UserGroupInformation.setConfiguration(conf);
 collectorManager = new DummyNodeTimelineCollectorManager();
@@ -205,9 +213,8 @@ public class TestTimelineAuthFilterForV2 {
 if (!withKerberosLogin) {
   AppLevelTimelineCollector collector =
   (AppLevelTimelineCollector)collectorManager.get(appId);
-  org.apache.hadoop.security.token.Token
-   token =
-  collector.getDelegationTokenForApp();
+  Token token =
+  collector.getDelegationTokenForApp();
   token.setService(new Text("localhost" + token.getService().toString().
   substring(token.getService().toString().indexOf(":";
   UserGroupInformation.getCurrentUser().addToken(token);
@@ -304,6 +311,20 @@ public class TestTimelineAuthFilterForV2 {
 }
   }
 
+  private boolean publishWithRetries(ApplicationId appId, File entityTypeDir,
+  String entityType, int numEntities) throws Exception {
+for (int i = 0; i < 10; i++) {
+  try {
+publishAndVerifyEntity(appId, entityTypeDir, entityType, numEntities);
+  } catch (YarnException e) {

[08/50] [abbrv] hadoop git commit: YARN-6237. Move UID constant to TimelineReaderUtils (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6237. Move UID constant to TimelineReaderUtils (Rohith Sharma K S via 
Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/18b3a80d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/18b3a80d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/18b3a80d

Branch: refs/heads/trunk
Commit: 18b3a80df7e028dfa0d1fa2d48b9f9ac1401ec54
Parents: c3bd8d6
Author: Varun Saxena 
Authored: Thu Mar 9 01:06:54 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../TestTimelineReaderWebServicesHBaseStorage.java  | 12 ++--
 .../timelineservice/reader/TimelineReaderManager.java   | 12 
 .../timelineservice/reader/TimelineReaderUtils.java |  3 +++
 .../reader/TestTimelineReaderWebServices.java   |  4 ++--
 4 files changed, 15 insertions(+), 16 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/18b3a80d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index b2fe267..6b0f95e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -657,7 +657,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
   List listFlowUIDs = new ArrayList();
   for (FlowActivityEntity entity : flowEntities) {
 String flowUID =
-(String)entity.getInfo().get(TimelineReaderManager.UID_KEY);
+(String) entity.getInfo().get(TimelineReaderUtils.UID_KEY);
 listFlowUIDs.add(flowUID);
 assertEquals(TimelineUIDConverter.FLOW_UID.encodeUID(
 new TimelineReaderContext(entity.getCluster(), entity.getUser(),
@@ -681,7 +681,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
 assertNotNull(frEntities);
 for (FlowRunEntity entity : frEntities) {
   String flowRunUID =
-  (String)entity.getInfo().get(TimelineReaderManager.UID_KEY);
+  (String) entity.getInfo().get(TimelineReaderUtils.UID_KEY);
   listFlowRunUIDs.add(flowRunUID);
   assertEquals(TimelineUIDConverter.FLOWRUN_UID.encodeUID(
   new TimelineReaderContext("cluster1", entity.getUser(),
@@ -713,7 +713,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
 assertNotNull(appEntities);
 for (TimelineEntity entity : appEntities) {
   String appUID =
-  (String)entity.getInfo().get(TimelineReaderManager.UID_KEY);
+  (String) entity.getInfo().get(TimelineReaderUtils.UID_KEY);
   listAppUIDs.add(appUID);
   assertEquals(TimelineUIDConverter.APPLICATION_UID.encodeUID(
   new TimelineReaderContext(context.getClusterId(),
@@ -746,7 +746,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
 assertNotNull(entities);
 for (TimelineEntity entity : entities) {
   String entityUID =
-  (String)entity.getInfo().get(TimelineReaderManager.UID_KEY);
+  (String) entity.getInfo().get(TimelineReaderUtils.UID_KEY);
   listEntityUIDs.add(entityUID);
   assertEquals(TimelineUIDConverter.GENERIC_ENTITY_UID.encodeUID(
   new TimelineReaderContext(context.getClusterId(),
@@ -827,7 +827,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
 assertNotNull(entity.getInfo());
 assertEquals(2, entity.getInfo().size());
 String uid =
-(String) entity.getInfo().get(TimelineReaderManager.UID_KEY);
+(String) entity.getInfo().get(TimelineReaderUtils.UID_KEY);
 assertNotNull(uid);
 assertTrue(uid.equals(appUIDWithFlowInfo + "!type1!0!entity1")
 || uid.equals(appUIDWithFlowInfo + "!type1!0!entity2"));
@@ -855,7 +855,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
 assertNo

[45/50] [abbrv] hadoop git commit: Made fixes for whitespace errors and checstyle warnings before merge.

2017-08-29 Thread varunsaxena
Made fixes for whitespace errors and checstyle warnings before merge.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3d00c8f3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3d00c8f3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3d00c8f3

Branch: refs/heads/trunk
Commit: 3d00c8f3942da931150de79f42cd4913bf751123
Parents: 512068a
Author: Varun Saxena 
Authored: Wed Aug 30 01:17:40 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 .../v2/app/rm/TestRMContainerAllocator.java | 13 ++--
 .../TaskAttemptUnsuccessfulCompletionEvent.java |  6 +++---
 .../mapred/TestMRTimelineEventHandling.java |  2 +-
 .../hadoop/yarn/conf/YarnConfiguration.java |  2 +-
 .../distributedshell/TestDistributedShell.java  |  2 +-
 .../yarn/client/api/TimelineV2Client.java   |  1 -
 .../src/main/resources/yarn-default.xml |  2 +-
 ...TimelineAuthenticationFilterInitializer.java | 11 +-
 ...TimelineAuthenticationFilterInitializer.java |  4 +++-
 .../security/authorize/NMPolicyProvider.java| 22 ++--
 .../apptoflow/AppToFlowColumnPrefix.java|  2 +-
 .../storage/reader/EntityTypeReader.java|  5 ++---
 .../timelineservice/storage/TimelineReader.java |  4 ++--
 13 files changed, 38 insertions(+), 38 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/3d00c8f3/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
index 6c74a7a..e4a8a1a 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
@@ -142,7 +142,6 @@ import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaS
 import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
 import 
org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
 import 
org.apache.hadoop.yarn.server.resourcemanager.security.AMRMTokenSecretManager;
-import 
org.apache.hadoop.yarn.server.timelineservice.security.TimelineV2DelegationTokenSecretManagerService.TimelineV2DelegationTokenSecretManager;
 import org.apache.hadoop.yarn.server.utils.BuilderUtils;
 import org.apache.hadoop.yarn.util.Clock;
 import org.apache.hadoop.yarn.util.ControlledClock;
@@ -777,7 +776,7 @@ public class TestRMContainerAllocator {
 new Text("renewer"), null);
 ident.setSequenceNumber(1);
 Token collectorToken =
-new Token (ident.getBytes(),
+new Token(ident.getBytes(),
 new byte[0], TimelineDelegationTokenIdentifier.KIND_NAME,
 new Text(localAddr));
 org.apache.hadoop.yarn.api.records.Token token =
@@ -825,7 +824,7 @@ public class TestRMContainerAllocator {
 // new token.
 ident.setSequenceNumber(100);
 Token collectorToken1 =
-new Token (ident.getBytes(),
+new Token(ident.getBytes(),
 new byte[0], TimelineDelegationTokenIdentifier.KIND_NAME,
 new Text(localAddr));
 token = org.apache.hadoop.yarn.api.records.Token.newInstance(
@@ -3585,15 +3584,15 @@ public class TestRMContainerAllocator {
 }
   }
 
-  private static class MockSchedulerForTimelineCollector
+  private final static class MockSchedulerForTimelineCollector
   implements ApplicationMasterProtocol {
-CollectorInfo collectorInfo;
+private CollectorInfo collectorInfo;
 
-public MockSchedulerForTimelineCollector(CollectorInfo info) {
+private MockSchedulerForTimelineCollector(CollectorInfo info) {
   this.collectorInfo = info;
 }
 
-void updateCollectorInfo(CollectorInfo info) {
+private void updateCollectorInfo(CollectorInfo info) {
   collectorInfo = info;
 }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/3d00c8f3/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptUnsuccessfulCompletionEvent.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapre

[17/50] [abbrv] hadoop git commit: YARN-6027. Support fromid(offset) filter for /flows API (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6027. Support fromid(offset) filter for /flows API (Rohith Sharma K S via 
Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/8bb26465
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/8bb26465
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/8bb26465

Branch: refs/heads/trunk
Commit: 8bb26465956a37d7398818bc0919772e12953725
Parents: cf30b3b
Author: Varun Saxena 
Authored: Thu Mar 2 01:49:34 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../AbstractTimelineReaderHBaseTestBase.java| 176 
 ...stTimelineReaderWebServicesHBaseStorage.java | 424 ---
 .../storage/common/KeyConverterToString.java|  38 ++
 .../storage/flow/FlowActivityRowKey.java|  59 ++-
 .../reader/FlowActivityEntityReader.java|  28 +-
 .../storage/common/TestRowKeys.java |  21 +
 .../reader/TimelineReaderServer.java|   2 +-
 .../reader/TimelineReaderUtils.java |  28 +-
 .../reader/TimelineReaderWebServices.java   |  18 +-
 .../reader/TimelineUIDConverter.java|  30 +-
 10 files changed, 549 insertions(+), 275 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/8bb26465/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/AbstractTimelineReaderHBaseTestBase.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/AbstractTimelineReaderHBaseTestBase.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/AbstractTimelineReaderHBaseTestBase.java
new file mode 100644
index 000..7853c94
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/AbstractTimelineReaderHBaseTestBase.java
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.timelineservice.reader;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.net.HttpURLConnection;
+import java.net.URI;
+import java.net.URL;
+import java.util.List;
+
+import javax.ws.rs.core.MediaType;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.DataGeneratorForTest;
+import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
+import org.junit.Assert;
+
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.GenericType;
+import com.sun.jersey.api.client.config.ClientConfig;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
+import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
+
+/**
+ * Test Base for TimelineReaderServer HBase tests.
+ */
+public abstract class AbstractTimelineReaderHBaseTestBase {
+  private static int serverPort;
+  private static TimelineReaderServer server;
+  private static HBaseTestingUtility util;
+
+  public static void setup() throws Exception {
+util = new HBaseTestingUtility();
+Configuration conf = util.getConfiguration();
+conf.setInt("hfile.format.version", 3)

[06/50] [abbrv] hadoop git commit: YARN-6555. Store application flow context in NM state store for work-preserving restart. (Rohith Sharma K S via Haibo Chen)

2017-08-29 Thread varunsaxena
YARN-6555. Store application flow context in NM state store for work-preserving 
restart. (Rohith Sharma K S via Haibo Chen)

(cherry picked from commit 47474fffac085e0e5ea46336bf80ccd0677017a3)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a8f082a1
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a8f082a1
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a8f082a1

Branch: refs/heads/trunk
Commit: a8f082a180cfc14e7fd347b2c03ab0f31e1dd33c
Parents: b87b72b
Author: Haibo Chen 
Authored: Thu May 25 21:15:27 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../nodemanager/containermanager/application/ApplicationImpl.java   | 1 +
 1 file changed, 1 insertion(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a8f082a1/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
index 39be7a7..0097cd2 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
@@ -46,6 +46,7 @@ import org.apache.hadoop.yarn.proto.YarnProtos;
 import 
org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto;
 import 
org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto;
 import org.apache.hadoop.yarn.server.api.records.AppCollectorData;
+import 
org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
 import 
org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent;
 import 
org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType;


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[42/50] [abbrv] hadoop git commit: MAPREDUCE-6838. [ATSv2 Security] Add timeline delegation token received in allocate response to UGI. Contributed by Varun Saxena

2017-08-29 Thread varunsaxena
MAPREDUCE-6838. [ATSv2 Security] Add timeline delegation token received in 
allocate response to UGI. Contributed by Varun Saxena


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/08f40bcc
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/08f40bcc
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/08f40bcc

Branch: refs/heads/trunk
Commit: 08f40bcc7f4174857bb1fc7c8eb1108d5caaafb3
Parents: bea3e4d
Author: Jian He 
Authored: Mon Aug 21 22:08:07 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 .../v2/app/rm/RMContainerAllocator.java |  17 +--
 .../v2/app/rm/TestRMContainerAllocator.java | 137 +++
 .../hadoop/yarn/api/records/CollectorInfo.java  |   4 +
 .../api/async/impl/AMRMClientAsyncImpl.java |  13 +-
 .../yarn/client/api/TimelineV2Client.java   |  11 +-
 .../client/api/impl/TimelineV2ClientImpl.java   |  80 ++-
 .../api/impl/TestTimelineClientV2Impl.java  |  56 +++-
 .../timelineservice/NMTimelinePublisher.java|   3 +-
 .../TestTimelineServiceClientIntegration.java   |  13 +-
 .../security/TestTimelineAuthFilterForV2.java   |   3 +-
 10 files changed, 301 insertions(+), 36 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/08f40bcc/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
index 969ec4c..0dc7642 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
@@ -848,7 +848,8 @@ public class RMContainerAllocator extends 
RMContainerRequestor
   updateAMRMToken(response.getAMRMToken());
 }
 
-List finishedContainers = 
response.getCompletedContainersStatuses();
+List finishedContainers =
+response.getCompletedContainersStatuses();
 
 // propagate preemption requests
 final PreemptionMessage preemptReq = response.getPreemptionMessage();
@@ -877,19 +878,13 @@ public class RMContainerAllocator extends 
RMContainerRequestor
 
 handleUpdatedNodes(response);
 handleJobPriorityChange(response);
-// handle receiving the timeline collector address for this app
-String collectorAddr = null;
-if (response.getCollectorInfo() != null) {
-  collectorAddr = response.getCollectorInfo().getCollectorAddr();
-}
-
+// Handle receiving the timeline collector address and token for this app.
 MRAppMaster.RunningAppContext appContext =
 (MRAppMaster.RunningAppContext)this.getContext();
-if (collectorAddr != null && !collectorAddr.isEmpty()
-&& appContext.getTimelineV2Client() != null) {
-  
appContext.getTimelineV2Client().setTimelineServiceAddress(collectorAddr);
+if (appContext.getTimelineV2Client() != null) {
+  appContext.getTimelineV2Client().
+  setTimelineCollectorInfo(response.getCollectorInfo());
 }
-
 for (ContainerStatus cont : finishedContainers) {
   processFinishedContainer(cont);
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/08f40bcc/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
index 6c51626..6c74a7a 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/rm/TestRMContainerAllocator.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.mapreduce.v2.app.rm;
 
+import static org.junit.Assert.assertEquals;
 import static org.mockito.Matchers.any;

[01/50] [abbrv] hadoop git commit: YARN-5922. Remove direct references of HBaseTimelineWriter/Reader in core ATS classes. Contributed by Haibo Chen.

2017-08-29 Thread varunsaxena
Repository: hadoop
Updated Branches:
  refs/heads/trunk 4cae120c6 -> 32cba6c30


YARN-5922. Remove direct references of HBaseTimelineWriter/Reader in core ATS 
classes. Contributed by Haibo Chen.

(cherry picked from commit a5a55a54ab1568e941062ea3dabdd237f71f15c4)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ecbc8d48
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ecbc8d48
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ecbc8d48

Branch: refs/heads/trunk
Commit: ecbc8d481659ded00d23de1ab366722932792e95
Parents: 4481561
Author: Sangjin Lee 
Authored: Thu Dec 8 12:31:12 2016 -0800
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:51 2017 +0530

--
 .../main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ecbc8d48/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 457873a..1004cd1 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -2112,8 +2112,8 @@ public class YarnConfiguration extends Configuration {
   TIMELINE_SERVICE_PREFIX + "reader.class";
 
   public static final String DEFAULT_TIMELINE_SERVICE_READER_CLASS =
-  "org.apache.hadoop.yarn.server.timelineservice" +
-  ".storage.HBaseTimelineReaderImpl";
+  "org.apache.hadoop.yarn.server.timelineservice.storage" + 
+  ".HBaseTimelineReaderImpl";
 
   /**
* default schema prefix for hbase tables.


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[35/50] [abbrv] hadoop git commit: YARN-5648. [ATSv2 Security] Client side changes for authentication. Contributed by Varun Saxena

2017-08-29 Thread varunsaxena
YARN-5648. [ATSv2 Security] Client side changes for authentication. Contributed 
by Varun Saxena


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ac7f52df
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ac7f52df
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ac7f52df

Branch: refs/heads/trunk
Commit: ac7f52df83d2b4758e7debe9416be7db0ec69d2b
Parents: d3f11e3
Author: Jian He 
Authored: Fri Jun 23 10:44:12 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../client/api/impl/TimelineV2ClientImpl.java   |  38 ++-
 .../hadoop-yarn-server-tests/pom.xml|  11 +
 .../hadoop/yarn/server/TestRMNMSecretKeys.java  |  34 +-
 .../security/TestTimelineAuthFilterForV2.java   | 309 +++
 .../src/test/resources/krb5.conf|  28 --
 5 files changed, 381 insertions(+), 39 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac7f52df/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
index 5d88f70..128ae7a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
@@ -19,7 +19,10 @@
 package org.apache.hadoop.yarn.client.api.impl;
 
 import java.io.IOException;
+import java.io.InterruptedIOException;
+import java.lang.reflect.UndeclaredThrowableException;
 import java.net.URI;
+import java.security.PrivilegedExceptionAction;
 import java.util.concurrent.BlockingQueue;
 import java.util.concurrent.Callable;
 import java.util.concurrent.ExecutionException;
@@ -69,6 +72,8 @@ public class TimelineV2ClientImpl extends TimelineV2Client {
 
   private ApplicationId contextAppId;
 
+  private UserGroupInformation authUgi;
+
   public TimelineV2ClientImpl(ApplicationId appId) {
 super(TimelineV2ClientImpl.class.getName());
 this.contextAppId = appId;
@@ -88,7 +93,6 @@ public class TimelineV2ClientImpl extends TimelineV2Client {
 UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
 UserGroupInformation realUgi = ugi.getRealUser();
 String doAsUser = null;
-UserGroupInformation authUgi = null;
 if (realUgi != null) {
   authUgi = realUgi;
   doAsUser = ugi.getShortUserName();
@@ -192,19 +196,33 @@ public class TimelineV2ClientImpl extends 
TimelineV2Client {
 }
   }
 
+  private ClientResponse doPutObjects(URI base, String path,
+  MultivaluedMap params, Object obj) {
+return connector.getClient().resource(base).path(path).queryParams(params)
+.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON)
+.put(ClientResponse.class, obj);
+  }
+
   protected void putObjects(URI base, String path,
   MultivaluedMap params, Object obj)
   throws IOException, YarnException {
-ClientResponse resp;
+ClientResponse resp = null;
 try {
-  resp = 
connector.getClient().resource(base).path(path).queryParams(params)
-  .accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON)
-  .put(ClientResponse.class, obj);
-} catch (RuntimeException re) {
-  // runtime exception is expected if the client cannot connect the server
-  String msg = "Failed to get the response from the timeline server.";
-  LOG.error(msg, re);
-  throw new IOException(re);
+  resp = authUgi.doAs(new PrivilegedExceptionAction() {
+@Override
+public ClientResponse run() throws Exception {
+  return doPutObjects(base, path, params, obj);
+}
+  });
+} catch (UndeclaredThrowableException ue) {
+  Throwable cause = ue.getCause();
+  if (cause instanceof IOException) {
+throw (IOException)cause;
+  } else {
+throw new IOException(cause);
+  }
+} catch (InterruptedException ie) {
+  throw (IOException) new InterruptedIOException().initCause(ie);
 }
 if (resp == null || resp.getStatusInfo()
 .getStatusCode() != ClientResponse.Status.OK.getStatusCode()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ac7f52df/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml
--
diff --git 
a/hadoop-ya

[38/50] [abbrv] hadoop git commit: YARN-6734. Ensure sub-application user is extracted & sent to timeline service (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6734. Ensure sub-application user is extracted & sent to timeline service 
(Rohith Sharma K S via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9f654053
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9f654053
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9f654053

Branch: refs/heads/trunk
Commit: 9f6540535d9148abbea836d54a9e94d25319c5d5
Parents: 3fb71b1
Author: Varun Saxena 
Authored: Fri Jul 28 22:02:19 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 ...stTimelineReaderWebServicesHBaseStorage.java |  26 ++-
 .../storage/DataGeneratorForTest.java   |  49 ++--
 .../storage/TestHBaseTimelineStorageApps.java   |  22 +-
 .../TestHBaseTimelineStorageEntities.java   | 128 +-
 .../flow/TestHBaseStorageFlowActivity.java  |  33 ++-
 .../storage/flow/TestHBaseStorageFlowRun.java   |  84 +--
 .../flow/TestHBaseStorageFlowRunCompaction.java |  14 +-
 .../storage/HBaseTimelineWriterImpl.java| 232 +++
 .../SubApplicationRowKeyPrefix.java |  20 --
 .../collector/TimelineCollector.java|  11 +-
 .../storage/FileSystemTimelineWriterImpl.java   |  15 +-
 .../timelineservice/storage/TimelineWriter.java |  28 +--
 .../collector/TestTimelineCollector.java|  12 +-
 .../TestFileSystemTimelineWriterImpl.java   |   8 +-
 14 files changed, 465 insertions(+), 217 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9f654053/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index 302f8e0..b589206 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -36,6 +36,7 @@ import java.util.Set;
 import javax.ws.rs.core.MediaType;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.timelineservice.ApplicationEntity;
 import org.apache.hadoop.yarn.api.records.timelineservice.FlowActivityEntity;
@@ -47,6 +48,7 @@ import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric.Type;
 import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants;
+import 
org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
 import org.apache.hadoop.yarn.server.utils.BuilderUtils;
@@ -334,16 +336,21 @@ public class TestTimelineReaderWebServicesHBaseStorage
 
 HBaseTimelineWriterImpl hbi = null;
 Configuration c1 = getHBaseTestingUtility().getConfiguration();
+UserGroupInformation remoteUser =
+UserGroupInformation.createRemoteUser(user);
 try {
   hbi = new HBaseTimelineWriterImpl();
   hbi.init(c1);
-  hbi.write(cluster, user, flow, flowVersion, runid, entity.getId(), te);
-  hbi.write(cluster, user, flow, flowVersion, runid, entity1.getId(), te1);
-  hbi.write(cluster, user, flow, flowVersion, runid1, entity4.getId(), 
te4);
-  hbi.write(cluster, user, flow2,
-  flowVersion2, runid2, entity3.getId(), te3);
-  hbi.write(cluster, user, flow, flowVersion, runid,
-  "application_11_", userEntities);
+  hbi.write(new TimelineCollectorContext(cluster, user, flow, flowVersion,
+  runid, entity.getId()), te, remoteUser);
+  hbi.write(new TimelineCollectorContext(cluster, user, flow, flowVersion,
+  runid, ent

[12/50] [abbrv] hadoop git commit: YARN-6658. Remove columnFor() methods of Columns in HBaseTimeline backend (Haibo Chen via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6658. Remove columnFor() methods of Columns in HBaseTimeline backend 
(Haibo Chen via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f6a51dce
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f6a51dce
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f6a51dce

Branch: refs/heads/trunk
Commit: f6a51dce186dacbcda0a444eac7df465577f2a8b
Parents: a8f082a
Author: Varun Saxena 
Authored: Wed May 31 10:14:35 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../storage/application/ApplicationColumn.java  | 48 -
 .../application/ApplicationColumnPrefix.java| 52 --
 .../storage/apptoflow/AppToFlowColumn.java  | 47 
 .../storage/entity/EntityColumn.java| 48 -
 .../storage/entity/EntityColumnPrefix.java  | 51 --
 .../storage/flow/FlowActivityColumnPrefix.java  | 56 
 .../storage/flow/FlowRunColumn.java | 51 --
 .../storage/flow/FlowRunColumnPrefix.java   | 51 --
 8 files changed, 404 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f6a51dce/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
index dde3911..00eaa7e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumn.java
@@ -105,52 +105,4 @@ public enum ApplicationColumn implements 
Column {
 return column.getValueConverter();
   }
 
-  /**
-   * Retrieve an {@link ApplicationColumn} given a name, or null if there is no
-   * match. The following holds true: {@code columnFor(x) == columnFor(y)} if
-   * and only if {@code x.equals(y)} or {@code (x == y == null)}.
-   *
-   * @param columnQualifier Name of the column to retrieve
-   * @return the corresponding {@link ApplicationColumn} or null
-   */
-  public static final ApplicationColumn columnFor(String columnQualifier) {
-
-// Match column based on value, assume column family matches.
-for (ApplicationColumn ac : ApplicationColumn.values()) {
-  // Find a match based only on name.
-  if (ac.getColumnQualifier().equals(columnQualifier)) {
-return ac;
-  }
-}
-
-// Default to null
-return null;
-  }
-
-  /**
-   * Retrieve an {@link ApplicationColumn} given a name, or null if there is no
-   * match. The following holds true: {@code columnFor(a,x) == columnFor(b,y)}
-   * if and only if {@code a.equals(b) & x.equals(y)} or
-   * {@code (x == y == null)}
-   *
-   * @param columnFamily The columnFamily for which to retrieve the column.
-   * @param name Name of the column to retrieve
-   * @return the corresponding {@link ApplicationColumn} or null if both
-   * arguments don't match.
-   */
-  public static final ApplicationColumn columnFor(
-  ApplicationColumnFamily columnFamily, String name) {
-
-for (ApplicationColumn ac : ApplicationColumn.values()) {
-  // Find a match based column family and on name.
-  if (ac.columnFamily.equals(columnFamily)
-  && ac.getColumnQualifier().equals(name)) {
-return ac;
-  }
-}
-
-// Default to null
-return null;
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f6a51dce/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/application/ApplicationColumnPrefix.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/

[44/50] [abbrv] hadoop git commit: YARN-7041. Nodemanager NPE running jobs with security off. Contributed by Varun Saxena.

2017-08-29 Thread varunsaxena
YARN-7041. Nodemanager NPE running jobs with security off. Contributed by Varun 
Saxena.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/e276c75e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/e276c75e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/e276c75e

Branch: refs/heads/trunk
Commit: e276c75ec17634fc3b521fdb15b6ac141b001274
Parents: 32188d3
Author: Rohith Sharma K S 
Authored: Sat Aug 19 11:37:17 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 .../records/impl/pb/CollectorInfoPBImpl.java|  8 +++--
 .../impl/pb/NodeHeartbeatRequestPBImpl.java | 22 +
 .../impl/pb/NodeHeartbeatResponsePBImpl.java| 23 -
 .../records/impl/pb/AppCollectorDataPBImpl.java |  8 +++--
 .../hadoop/yarn/TestYarnServerApiClasses.java   | 34 +---
 5 files changed, 71 insertions(+), 24 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/e276c75e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/CollectorInfoPBImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/CollectorInfoPBImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/CollectorInfoPBImpl.java
index bb54133..5835d1a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/CollectorInfoPBImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/CollectorInfoPBImpl.java
@@ -114,9 +114,13 @@ public class CollectorInfoPBImpl extends CollectorInfo {
   @Override
   public Token getCollectorToken() {
 CollectorInfoProtoOrBuilder p = viaProto ? proto : builder;
-if (this.collectorToken == null && p.hasCollectorToken()) {
-  this.collectorToken = convertFromProtoFormat(p.getCollectorToken());
+if (this.collectorToken != null) {
+  return this.collectorToken;
+}
+if (!p.hasCollectorToken()) {
+  return null;
 }
+this.collectorToken = convertFromProtoFormat(p.getCollectorToken());
 return this.collectorToken;
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/e276c75e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
index c07a6eb..1ffd223 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/protocolrecords/impl/pb/NodeHeartbeatRequestPBImpl.java
@@ -168,12 +168,17 @@ public class NodeHeartbeatRequestPBImpl extends 
NodeHeartbeatRequest {
 for (Map.Entry entry :
 registeringCollectors.entrySet()) {
   AppCollectorData data = entry.getValue();
-  builder.addRegisteringCollectors(AppCollectorDataProto.newBuilder()
-  .setAppId(convertToProtoFormat(entry.getKey()))
-  .setAppCollectorAddr(data.getCollectorAddr())
-  .setAppCollectorToken(convertToProtoFormat(data.getCollectorToken()))
-  .setRmIdentifier(data.getRMIdentifier())
-  .setVersion(data.getVersion()));
+  AppCollectorDataProto.Builder appCollectorDataBuilder =
+  AppCollectorDataProto.newBuilder()
+  .setAppId(convertToProtoFormat(entry.getKey()))
+  .setAppCollectorAddr(data.getCollectorAddr())
+  .setRmIdentifier(data.getRMIdentifier())
+  .setVersion(data.getVersion());
+  if (data.getCollectorToken() != null) {
+appCollectorDataBuilder.setAppCollectorToken(
+convertToProtoFormat(data.getCollectorToken()));
+  }
+  builder.addRegisteringCollectors(appCollectorDataBuilder);
 }
   }
 
@@ -274,7 +279,10 @@ public class NodeHeartbeatRequestPBImpl extends 
NodeHeartbeatRequest {
   this.registeringCollectors = new HashMap<>();
   for (AppCollect

[46/50] [abbrv] hadoop git commit: YARN-6861. Reader API for sub application entities (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6861. Reader API for sub application entities (Rohith Sharma K S via Varun 
Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b2efebdd
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b2efebdd
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b2efebdd

Branch: refs/heads/trunk
Commit: b2efebdd077ecb7b6ffe7fb8a957dadb0e78290f
Parents: 7fd6ae2
Author: Varun Saxena 
Authored: Sun Aug 20 00:35:14 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 ...stTimelineReaderWebServicesHBaseStorage.java | 104 ++--
 .../storage/reader/GenericEntityReader.java |   4 +-
 .../reader/SubApplicationEntityReader.java  | 488 +++
 .../reader/TimelineEntityReaderFactory.java |   3 +
 .../reader/TimelineReaderContext.java   |  19 +-
 .../reader/TimelineReaderManager.java   |   9 +-
 .../reader/TimelineReaderWebServices.java   | 158 ++
 .../reader/TimelineReaderWebServicesUtils.java  |  10 +
 .../reader/TimelineUIDConverter.java|  35 ++
 .../reader/TestTimelineUIDConverter.java|   9 +
 10 files changed, 787 insertions(+), 52 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b2efebdd/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index b589206..5acf1f4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -71,6 +71,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
   private static long ts = System.currentTimeMillis();
   private static long dayTs =
   HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(ts);
+  private static String doAsUser = "remoteuser";
 
   @BeforeClass
   public static void setupBeforeClass() throws Exception {
@@ -337,7 +338,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
 HBaseTimelineWriterImpl hbi = null;
 Configuration c1 = getHBaseTestingUtility().getConfiguration();
 UserGroupInformation remoteUser =
-UserGroupInformation.createRemoteUser(user);
+UserGroupInformation.createRemoteUser(doAsUser);
 try {
   hbi = new HBaseTimelineWriterImpl();
   hbi.init(c1);
@@ -2263,60 +2264,69 @@ public class TestTimelineReaderWebServicesHBaseStorage
   public void testGenericEntitiesForPagination() throws Exception {
 Client client = createClient();
 try {
-  int limit = 10;
-  String queryParam = "?limit=" + limit;
   String resourceUri = "http://localhost:"; + getServerPort() + "/ws/v2/"
   + "timeline/clusters/cluster1/apps/application_11_/"
   + "entities/entitytype";
-  URI uri = URI.create(resourceUri + queryParam);
-
-  ClientResponse resp = getResponse(client, uri);
-  List entities =
-  resp.getEntity(new GenericType>() {
-  });
-  // verify for entity-10 to entity-1 in descending order.
-  verifyPaginatedEntites(entities, limit, limit);
-
-  limit = 4;
-  queryParam = "?limit=" + limit;
-  uri = URI.create(resourceUri + queryParam);
-  resp = getResponse(client, uri);
-  entities = resp.getEntity(new GenericType>() {
-  });
-  // verify for entity-10 to entity-7 in descending order.
-  TimelineEntity entity = verifyPaginatedEntites(entities, limit, 10);
-
-  queryParam = "?limit=" + limit + "&fromid="
-  + entity.getInfo().get(TimelineReaderUtils.FROMID_KEY);
-  uri = URI.create(resourceUri + queryParam);
-  resp = getResponse(client, uri);
-  entities = resp.getEntity(new GenericType>() {
-  });
-  // verify for entity-7 to entity-4 in descending order.
-  entity = verifyPaginatedEntites(entities, limit, 7);
-
-  queryParam = "?limit=" + limit + "&fromid="
-  + entity.getInfo().g

[28/50] [abbrv] hadoop git commit: YARN-5647. [ATSv2 Security] Collector side changes for loading auth filters and principals. Contributed by Varun Saxena

2017-08-29 Thread varunsaxena
http://git-wip-us.apache.org/repos/asf/hadoop/blob/879de512/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/TimelineServerUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/TimelineServerUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/TimelineServerUtils.java
new file mode 100644
index 000..78bf20f
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/util/timeline/TimelineServerUtils.java
@@ -0,0 +1,92 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.util.timeline;
+
+import java.util.LinkedHashSet;
+import java.util.Set;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.AuthenticationFilterInitializer;
+import 
org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilter;
+import 
org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer;
+import 
org.apache.hadoop.yarn.server.timeline.security.TimelineDelgationTokenSecretManagerService;
+
+/**
+ * Set of utility methods to be used across timeline reader and collector.
+ */
+public final class TimelineServerUtils {
+  private static final Log LOG = LogFactory.getLog(TimelineServerUtils.class);
+
+  private TimelineServerUtils() {
+  }
+
+  /**
+   * Sets filter initializers configuration based on existing configuration and
+   * default filters added by timeline service(such as timeline auth filter and
+   * CORS filter).
+   * @param conf Configuration object.
+   * @param configuredInitializers Comma separated list of filter initializers.
+   * @param defaultInitializers Set of initializers added by default by 
timeline
+   * service.
+   */
+  public static void setTimelineFilters(Configuration conf,
+  String configuredInitializers, Set defaultInitializers) {
+String[] parts = configuredInitializers.split(",");
+Set target = new LinkedHashSet();
+for (String filterInitializer : parts) {
+  filterInitializer = filterInitializer.trim();
+  if (filterInitializer.equals(
+  AuthenticationFilterInitializer.class.getName()) ||
+  filterInitializer.isEmpty()) {
+continue;
+  }
+  target.add(filterInitializer);
+}
+target.addAll(defaultInitializers);
+String actualInitializers =
+org.apache.commons.lang.StringUtils.join(target, ",");
+LOG.info("Filter initializers set for timeline service: " +
+actualInitializers);
+conf.set("hadoop.http.filter.initializers", actualInitializers);
+  }
+
+  /**
+   * Adds timeline authentication filter to the set of default filter
+   * initializers and assigns the delegation token manager service to it.
+   * @param initializers Comma separated list of filter initializers.
+   * @param defaultInitializers Set of initializers added by default by 
timeline
+   * service.
+   * @param delegationTokenMgrService Delegation token manager service.
+   * This will be used by timeline authentication filter to assign
+   * delegation tokens.
+   */
+  public static void addTimelineAuthFilter(String initializers,
+  Set defaultInitializers,
+  TimelineDelgationTokenSecretManagerService delegationTokenMgrService) {
+TimelineAuthenticationFilter.setTimelineDelegationTokenSecretManager(
+delegationTokenMgrService.getTimelineDelegationTokenSecretManager());
+if (!initializers.contains(
+TimelineAuthenticationFilterInitializer.class.getName())) {
+  defaultInitializers.add(
+  TimelineAuthenticationFilterInitializer.class.getName());
+}
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/879de512/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-ser

[47/50] [abbrv] hadoop git commit: YARN-7038. [Atsv2 Security] CollectorNodemanagerProtocol RPC interface doesn't work when service authorization is enabled. Contributed by Varun Saxena.

2017-08-29 Thread varunsaxena
YARN-7038. [Atsv2 Security] CollectorNodemanagerProtocol RPC interface doesn't 
work when service authorization is enabled. Contributed by Varun Saxena.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/32188d32
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/32188d32
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/32188d32

Branch: refs/heads/trunk
Commit: 32188d32954d94ec2efeec2b2fcc5b2abff4c1ea
Parents: b664569
Author: Rohith Sharma K S 
Authored: Fri Aug 18 13:32:36 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 .../hadoop-common/src/main/conf/hadoop-policy.xml| 11 +++
 .../org/apache/hadoop/yarn/conf/YarnConfiguration.java   |  4 
 .../hadoop/yarn/conf/TestYarnConfigurationFields.java|  2 ++
 .../nodemanager/collectormanager/NMCollectorService.java |  7 +++
 .../nodemanager/security/authorize/NMPolicyProvider.java |  6 +-
 5 files changed, 29 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/32188d32/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
--
diff --git 
a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
index 2bf5c02..d282c58 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-policy.xml
@@ -223,4 +223,15 @@
 group list is separated by a blank. For e.g. "alice,bob users,wheel".
 A special value of "*" means all users are allowed.
   
+
+  
+security.collector-nodemanager.protocol.acl
+*
+ACL for CollectorNodemanagerProtocol, used by nodemanager
+if timeline service v2 is enabled, for the timeline collector and 
nodemanager
+to communicate with each other.
+The ACL is a comma-separated list of user and group names. The user and
+group list is separated by a blank. For e.g. "alice,bob users,wheel".
+A special value of "*" means all users are allowed.
+  
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/32188d32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index ff9632a..8a513ac 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -1782,6 +1782,10 @@ public class YarnConfiguration extends Configuration {
   YARN_SECURITY_SERVICE_AUTHORIZATION_APPLICATIONHISTORY_PROTOCOL =
   "security.applicationhistory.protocol.acl";
 
+  public static final String
+  YARN_SECURITY_SERVICE_AUTHORIZATION_COLLECTOR_NODEMANAGER_PROTOCOL =
+  "security.collector-nodemanager.protocol.acl";
+
   /** No. of milliseconds to wait between sending a SIGTERM and SIGKILL
* to a running container */
   public static final String NM_SLEEP_DELAY_BEFORE_SIGKILL_MS =

http://git-wip-us.apache.org/repos/asf/hadoop/blob/32188d32/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
index d97c6eb..bd7bf93 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/test/java/org/apache/hadoop/yarn/conf/TestYarnConfigurationFields.java
@@ -66,6 +66,8 @@ public class TestYarnConfigurationFields extends 
TestConfigurationFieldsBase {
 configurationPropsToSkipCompare
 .add(YarnConfiguration
 .YARN_SECURITY_SERVICE_AUTHORIZATION_RESOURCETRACKER_PROTOCOL);
+configurationPropsToSkipCompare.add(YarnConfiguration
+.YARN_SECURITY_SERVICE_AUTHORIZATION_COLLECTOR_NODEMANAGER_PROTOCOL);
 
configurationPropsToSkipCompare.add(YarnConfiguration.CURATOR_LEADER_ELECTOR);
 
 // Federation default configs to be ignored

http://git-wip-us.apache.org/repos/asf/hado

[22/50] [abbrv] hadoop git commit: YARN-6069. CORS support in timeline v2 (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6069. CORS support in timeline v2 (Rohith Sharma K S via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ff43b8db
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ff43b8db
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ff43b8db

Branch: refs/heads/trunk
Commit: ff43b8db7aad28e913a6fbf017e9de622a39677e
Parents: 684de1a
Author: Varun Saxena 
Authored: Thu Feb 23 11:15:51 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../src/main/resources/yarn-default.xml  | 11 +++
 1 file changed, 11 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ff43b8db/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index 7c810a8..ae022e4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -3188,6 +3188,17 @@
 yarn.timeline-service.http-cross-origin.enabled
 false
   
+  
+  
+
+  Flag to enable cross-origin (CORS) support for timeline service v1.x or
+  Timeline Reader in timeline service v2. For timeline service v2, also add
+  org.apache.hadoop.security.HttpCrossOriginFilterInitializer to the
+  configuration hadoop.http.filter.initializers in core-site.xml.
+
+yarn.timeline-service.http-cross-origin.enabled
+false
+  
 
   
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[19/50] [abbrv] hadoop git commit: YARN-6094. Update the coprocessor to be a dynamically loaded one. Contributed by Vrushali C.

2017-08-29 Thread varunsaxena
YARN-6094. Update the coprocessor to be a dynamically loaded one. Contributed 
by Vrushali C.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/580d8849
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/580d8849
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/580d8849

Branch: refs/heads/trunk
Commit: 580d884913c200f6c6cae475d8c183a4b61ff710
Parents: 5e0acee
Author: Sangjin Lee 
Authored: Thu Jan 19 14:52:47 2017 -0800
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../hadoop/yarn/conf/YarnConfiguration.java |  12 +
 .../src/main/resources/yarn-default.xml |   9 +
 ...stTimelineReaderWebServicesHBaseStorage.java |   4 +-
 .../storage/DataGeneratorForTest.java   | 364 ---
 .../storage/TestHBaseTimelineStorageApps.java   |   6 +-
 .../TestHBaseTimelineStorageEntities.java   |   6 +-
 .../storage/TestHBaseTimelineStorageSchema.java |  12 +-
 .../storage/flow/TestFlowDataGenerator.java |  28 +-
 .../flow/TestHBaseStorageFlowActivity.java  |   8 +-
 .../storage/flow/TestHBaseStorageFlowRun.java   |  46 ++-
 .../flow/TestHBaseStorageFlowRunCompaction.java |   8 +-
 .../storage/flow/FlowRunCoprocessor.java|  36 +-
 .../storage/flow/FlowRunTable.java  |  33 +-
 .../src/site/markdown/TimelineServiceV2.md  |  24 +-
 14 files changed, 321 insertions(+), 275 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/580d8849/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index 1004cd1..de90c69 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -2146,6 +2146,18 @@ public class YarnConfiguration extends Configuration {
   + "hbase.coprocessor.app-final-value-retention-milliseconds";
 
   /**
+   * The name of the setting for the location of the coprocessor
+   * jar on hdfs.
+   */
+  public static final String FLOW_RUN_COPROCESSOR_JAR_HDFS_LOCATION =
+  TIMELINE_SERVICE_PREFIX
+  + "hbase.coprocessor.jar.hdfs.location";
+
+  /** default hdfs location for flowrun coprocessor jar. */
+  public static final String DEFAULT_HDFS_LOCATION_FLOW_RUN_COPROCESSOR_JAR =
+  "/hbase/coprocessor/hadoop-yarn-server-timelineservice.jar";
+
+/**
* The name for setting that points to an optional HBase configuration
* (hbase-site.xml file) with settings that will override the ones found on
* the classpath.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/580d8849/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
index 37dae63..7c810a8 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml
@@ -2343,6 +2343,15 @@
 
   
 
+The default hdfs location for flowrun coprocessor jar.
+
+yarn.timeline-service.hbase.coprocessor.jar.hdfs.location
+
+/hbase/coprocessor/hadoop-yarn-server-timelineservice.jar
+  
+
+  
+
 The value of this parameter sets the prefix for all tables that are part of
 timeline service in the hbase storage schema. It can be set to "dev."
 or "staging." if it is to be used for development or staging instances.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/580d8849/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservic

[50/50] [abbrv] hadoop git commit: YARN-5355: YARN Timeline Service v.2: alpha 2 (varunsaxena)

2017-08-29 Thread varunsaxena
YARN-5355: YARN Timeline Service v.2: alpha 2 (varunsaxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/32cba6c3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/32cba6c3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/32cba6c3

Branch: refs/heads/trunk
Commit: 32cba6c3036dfb1dcb0a4fd77a68ddad17dd4082
Parents: 4cae120 3d00c8f
Author: Varun Saxena 
Authored: Wed Aug 30 11:41:10 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:41:10 2017 +0530

--
 .../src/main/conf/hadoop-policy.xml |   11 +
 .../jobhistory/JobHistoryEventHandler.java  |   27 +-
 .../v2/app/job/impl/TaskAttemptImpl.java|   58 +-
 .../mapreduce/v2/app/job/impl/TaskImpl.java |   19 +-
 .../v2/app/rm/RMContainerAllocator.java |   14 +-
 .../hadoop/mapreduce/jobhistory/TestEvents.java |4 +-
 .../jobhistory/TestJobHistoryEventHandler.java  |8 +-
 .../v2/app/rm/TestRMContainerAllocator.java |  136 +++
 .../jobhistory/MapAttemptFinishedEvent.java |   87 +-
 .../jobhistory/ReduceAttemptFinishedEvent.java  |   83 +-
 .../jobhistory/TaskAttemptFinishedEvent.java|   47 +-
 .../TaskAttemptUnsuccessfulCompletionEvent.java |   48 +-
 .../mapreduce/jobhistory/TaskFailedEvent.java   |   51 +-
 .../mapreduce/jobhistory/TaskFinishedEvent.java |   42 +-
 .../mapred/TestMRTimelineEventHandling.java |   30 +-
 .../mapreduce/JobHistoryFileReplayMapperV1.java |3 +-
 .../hadoop/mapreduce/SimpleEntityWriterV1.java  |3 +-
 hadoop-project/src/site/markdown/index.md.vm|8 +-
 .../api/protocolrecords/AllocateResponse.java   |   32 +-
 .../hadoop/yarn/api/records/CollectorInfo.java  |   59 +
 .../timelineservice/ApplicationEntity.java  |   28 +
 .../records/timelineservice/TimelineEntity.java |   52 +-
 .../hadoop/yarn/conf/YarnConfiguration.java |   75 +-
 .../hadoop/yarn/util/TimelineServiceHelper.java |8 +
 .../src/main/proto/yarn_protos.proto|5 +
 .../src/main/proto/yarn_service_protos.proto|2 +-
 .../timelineservice/TestApplicationEntity.java  |   71 ++
 .../yarn/conf/TestYarnConfigurationFields.java  |2 +
 .../distributedshell/ApplicationMaster.java |   42 +-
 .../distributedshell/TestDistributedShell.java  |  173 +--
 .../api/async/impl/AMRMClientAsyncImpl.java |   19 +-
 .../ApplicationMasterServiceProtoTestBase.java  |   72 ++
 .../hadoop/yarn/client/ProtocolHATestBase.java  |   20 +-
 ...ationMasterServiceProtocolForTimelineV2.java |   71 ++
 ...estApplicationMasterServiceProtocolOnHA.java |   46 +-
 .../api/async/impl/TestAMRMClientAsync.java |2 +-
 .../impl/pb/AllocateResponsePBImpl.java |   37 +-
 .../records/impl/pb/CollectorInfoPBImpl.java|  152 +++
 .../yarn/client/api/TimelineV2Client.java   |   10 +-
 .../client/api/impl/TimelineV2ClientImpl.java   |  117 +-
 .../src/main/resources/yarn-default.xml |   44 +
 .../hadoop/yarn/api/TestPBImplRecords.java  |2 +
 .../api/impl/TestTimelineClientV2Impl.java  |   56 +-
 .../ApplicationHistoryServer.java   |   79 +-
 .../security/TimelineAuthenticationFilter.java  |   49 -
 ...TimelineAuthenticationFilterInitializer.java |  129 ---
 ...lineDelegationTokenSecretManagerService.java |  240 -
 ...neV1DelegationTokenSecretManagerService.java |  225 
 .../TestTimelineAuthenticationFilter.java   |  323 --
 .../TestTimelineAuthenticationFilterForV1.java  |  332 ++
 ...TimelineAuthenticationFilterInitializer.java |   76 --
 .../protocolrecords/NodeHeartbeatRequest.java   |   13 +-
 .../protocolrecords/NodeHeartbeatResponse.java  |8 +-
 .../ReportNewCollectorInfoRequest.java  |   13 +-
 .../impl/pb/NodeHeartbeatRequestPBImpl.java |   74 +-
 .../impl/pb/NodeHeartbeatResponsePBImpl.java|   61 +-
 .../pb/ReportNewCollectorInfoRequestPBImpl.java |   36 +-
 .../server/api/records/AppCollectorData.java|  125 +++
 .../server/api/records/AppCollectorsMap.java|   46 -
 .../records/impl/pb/AppCollectorDataPBImpl.java |  227 
 .../records/impl/pb/AppCollectorsMapPBImpl.java |  152 ---
 .../api/records/impl/pb/package-info.java   |   19 +
 .../security/TimelineAuthenticationFilter.java  |   55 +
 ...TimelineAuthenticationFilterInitializer.java |  137 +++
 ...elineDelgationTokenSecretManagerService.java |   83 ++
 .../server/timeline/security/package-info.java  |   26 +
 .../util/timeline/TimelineServerUtils.java  |   92 ++
 .../yarn/server/util/timeline/package-info.java |   25 +
 .../yarn_server_common_service_protos.proto |   16 +-
 .../java/org/apache/hadoop/yarn/TestRPC.java|   36 +-
 .../hadoop/yarn/TestYarnServerApiClasses.java   |   48 +-
 ...TimelineAuthenticationFilterInitializer.java |   78 ++
 .../hadoop/yarn/server/nodemanager/Context.java |   14 +-
 .../yarn/server/nodemanager/NodeManager.java|   64 +-

[48/50] [abbrv] hadoop git commit: MAPREDUCE-6838. Addendum to fix code comment

2017-08-29 Thread varunsaxena
MAPREDUCE-6838. Addendum to fix code comment


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/16ba4f54
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/16ba4f54
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/16ba4f54

Branch: refs/heads/trunk
Commit: 16ba4f544f13d614c1ebd6101ee14f7714e0fc54
Parents: 08f40bc
Author: Varun Saxena 
Authored: Tue Aug 22 18:36:21 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 .../apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java  | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/16ba4f54/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
index 97d1364..220d6af 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/client/api/impl/TimelineV2ClientImpl.java
@@ -187,8 +187,7 @@ public class TimelineV2ClientImpl extends TimelineV2Client {
 if (collectorAddr == null || collectorAddr.isEmpty()) {
   collectorAddr = timelineServiceAddress;
 }
-// Token need not be updated if either address or token service does not
-// exist.
+// Token need not be updated if both address and token service do not 
exist.
 String service = delegationToken.getService();
 if ((service == null || service.isEmpty()) &&
 (collectorAddr == null || collectorAddr.isEmpty())) {


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[43/50] [abbrv] hadoop git commit: YARN-6047 Documentation updates for TimelineService v2 (Contributed by Rohith Sharma)

2017-08-29 Thread varunsaxena
YARN-6047 Documentation updates for TimelineService v2 (Contributed by Rohith 
Sharma)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/bea3e4df
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/bea3e4df
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/bea3e4df

Branch: refs/heads/trunk
Commit: bea3e4df76dfcae7de804def7bd39b2b1d6639c9
Parents: 9b08f36
Author: Vrushali C 
Authored: Mon Aug 21 20:29:05 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 hadoop-project/src/site/markdown/index.md.vm|   8 +-
 .../src/site/markdown/TimelineServiceV2.md  | 333 ++-
 2 files changed, 324 insertions(+), 17 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/bea3e4df/hadoop-project/src/site/markdown/index.md.vm
--
diff --git a/hadoop-project/src/site/markdown/index.md.vm 
b/hadoop-project/src/site/markdown/index.md.vm
index 62e21b2..bb7bda2 100644
--- a/hadoop-project/src/site/markdown/index.md.vm
+++ b/hadoop-project/src/site/markdown/index.md.vm
@@ -55,17 +55,15 @@ documentation.
 YARN Timeline Service v.2
 ---
 
-We are introducing an early preview (alpha 1) of a major revision of YARN
+We are introducing an early preview (alpha 2) of a major revision of YARN
 Timeline Service: v.2. YARN Timeline Service v.2 addresses two major
 challenges: improving scalability and reliability of Timeline Service, and
 enhancing usability by introducing flows and aggregation.
 
-YARN Timeline Service v.2 alpha 1 is provided so that users and developers
+YARN Timeline Service v.2 alpha 2 is provided so that users and developers
 can test it and provide feedback and suggestions for making it a ready
 replacement for Timeline Service v.1.x. It should be used only in a test
-capacity. Most importantly, security is not enabled. Do not set up or use
-Timeline Service v.2 until security is implemented if security is a
-critical requirement.
+capacity.
 
 More details are available in the
 [YARN Timeline Service 
v.2](./hadoop-yarn/hadoop-yarn-site/TimelineServiceV2.html)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/bea3e4df/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
index 678c406..2de305d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
@@ -73,12 +73,8 @@ The following diagram illustrates the design at a high level.
 
 ### Current Status and Future Plans
 
-YARN Timeline Service v.2 is currently in alpha ("alpha 1"). It is very much 
work in progress, and
-many things can and will change rapidly. Users must enable Timeline Service 
v.2 only on a test or
-experimental cluster to test the feature.
-
-Most importantly, **security is not enabled**. Do not set up or use Timeline 
Service v.2 until
-security is implemented if security is a requirement.
+YARN Timeline Service v.2 is currently in alpha ("alpha 2"). It is a work in 
progress, and
+many things can and will change rapidly.
 
 A complete end-to-end flow of writes and reads is functional, with Apache 
HBase as the backend.
 You should be able to start generating data. When enabled, all YARN-generic 
events are
@@ -95,16 +91,19 @@ resource manager also has its dedicated in-process 
collector. The reader is curr
 instance. Currently, it is not possible to write to Timeline Service outside 
the context of a YARN
 application (i.e. no off-cluster client).
 
+Starting from alpha2, Timeline Service v.2 supports simple authorization in 
terms of a
+configurable whitelist of users and groups who can read timeline data. Cluster 
admins are
+allowed by default to read timeline data.
+
 When YARN Timeline Service v.2 is disabled, one can expect no functional or 
performance impact
 on any other existing functionality.
 
 The work to make it truly production-ready continues. Some key items include
 
 * More robust storage fault tolerance
-* Security
 * Support for off-cluster clients
-* More complete and integrated web UI
 * Better support for long-running apps
+* Support for ACLs
 * Offline (time-based periodic) aggregation for flows, users, and queues for 
reporting and
 analysis
 * Timeline collectors as separate instances from node managers
@@ -144,6 +143,27 @@ New configuration parameters that are introduced with v.2 
are marked bold.
 | 
**`yarn.timeline-serv

[29/50] [abbrv] hadoop git commit: YARN-5647. [ATSv2 Security] Collector side changes for loading auth filters and principals. Contributed by Varun Saxena

2017-08-29 Thread varunsaxena
YARN-5647. [ATSv2 Security] Collector side changes for loading auth filters and 
principals. Contributed by Varun Saxena


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/879de512
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/879de512
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/879de512

Branch: refs/heads/trunk
Commit: 879de51206ddef132c092ee21e8b6c6e5976a56e
Parents: 24447b3
Author: Jian He 
Authored: Wed Jun 7 13:45:34 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../ApplicationHistoryServer.java   |  79 ++---
 .../security/TimelineAuthenticationFilter.java  |  49 ---
 ...TimelineAuthenticationFilterInitializer.java | 129 ---
 ...lineDelegationTokenSecretManagerService.java | 240 --
 ...neV1DelegationTokenSecretManagerService.java | 225 +
 .../TestTimelineAuthenticationFilter.java   | 323 --
 .../TestTimelineAuthenticationFilterForV1.java  | 332 +++
 ...TimelineAuthenticationFilterInitializer.java |  76 -
 .../security/TimelineAuthenticationFilter.java  |  55 +++
 ...TimelineAuthenticationFilterInitializer.java | 129 +++
 ...elineDelgationTokenSecretManagerService.java |  83 +
 .../server/timeline/security/package-info.java  |  26 ++
 .../util/timeline/TimelineServerUtils.java  |  92 +
 .../yarn/server/util/timeline/package-info.java |  25 ++
 ...TimelineAuthenticationFilterInitializer.java |  76 +
 .../collector/NodeTimelineCollectorManager.java |  66 +++-
 .../PerNodeTimelineCollectorsAuxService.java|   5 +-
 .../collector/TimelineCollectorManager.java |   6 +-
 ...neV2DelegationTokenSecretManagerService.java |  78 +
 .../timelineservice/security/package-info.java  |  25 ++
 20 files changed, 1227 insertions(+), 892 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/879de512/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
index 85e5f2d..4e3a1e6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/ApplicationHistoryServer.java
@@ -20,14 +20,14 @@ package 
org.apache.hadoop.yarn.server.applicationhistoryservice;
 
 import java.io.IOException;
 import java.net.InetSocketAddress;
-import java.util.ArrayList;
+import java.util.LinkedHashSet;
+import java.util.Set;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.HttpServer2;
 import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
 import org.apache.hadoop.metrics2.source.JvmMetrics;
-import org.apache.hadoop.security.AuthenticationFilterInitializer;
 import org.apache.hadoop.security.HttpCrossOriginFilterInitializer;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.service.CompositeService;
@@ -47,10 +47,9 @@ import 
org.apache.hadoop.yarn.server.timeline.LeveldbTimelineStore;
 import org.apache.hadoop.yarn.server.timeline.TimelineDataManager;
 import org.apache.hadoop.yarn.server.timeline.TimelineStore;
 import org.apache.hadoop.yarn.server.timeline.security.TimelineACLsManager;
-import 
org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilter;
-import 
org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer;
-import 
org.apache.hadoop.yarn.server.timeline.security.TimelineDelegationTokenSecretManagerService;
+import 
org.apache.hadoop.yarn.server.timeline.security.TimelineV1DelegationTokenSecretManagerService;
 import 
org.apache.hadoop.yarn.server.timeline.webapp.CrossOriginFilterInitializer;
+import org.apache.hadoop.yarn.server.util.timeline.TimelineServerUtils;
 import org.apache.hadoop.yarn.webapp.WebApp;
 import org.apache.hadoop.yarn.webapp.WebApps;
 import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
@@ -75,7 +74,7 @@ public class A

[40/50] [abbrv] hadoop git commit: YARN-6905. Addendum to fix TestTimelineReaderWebServicesHBaseStorage due to missing FastNumberFormat

2017-08-29 Thread varunsaxena
YARN-6905. Addendum to fix TestTimelineReaderWebServicesHBaseStorage due to 
missing FastNumberFormat


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/512068a0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/512068a0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/512068a0

Branch: refs/heads/trunk
Commit: 512068a01f8a74843d2e17c0d1c7c71cf3224a93
Parents: 16ba4f5
Author: Varun Saxena 
Authored: Tue Aug 22 18:48:18 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:54 2017 +0530

--
 .../reader/TestTimelineReaderWebServicesHBaseStorage.java | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/512068a0/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index 5acf1f4..b2029ca 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -370,7 +370,8 @@ public class TestTimelineReaderWebServicesHBaseStorage
 ApplicationId appId =
 BuilderUtils.newApplicationId(timestamp, count++);
 ApplicationEntity appEntity = new ApplicationEntity();
-appEntity.setId(appId.toString());
+appEntity.setId(
+HBaseTimelineStorageUtils.convertApplicationIdToString(appId));
 appEntity.setCreatedTime(timestamp);
 
 TimelineEvent created = new TimelineEvent();


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[37/50] [abbrv] hadoop git commit: YARN-6850 Ensure that supplemented timestamp is stored only for flow run metrics (Contributed by Varun Saxena via Vrushali C)

2017-08-29 Thread varunsaxena
YARN-6850 Ensure that supplemented timestamp is stored only for flow run 
metrics (Contributed by Varun Saxena via Vrushali C)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/61136d03
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/61136d03
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/61136d03

Branch: refs/heads/trunk
Commit: 61136d03f25377c62aefde859c82df18e37b975e
Parents: 70078e9
Author: Vrushali C 
Authored: Mon Jul 24 15:54:52 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../storage/common/ColumnHelper.java| 44 +++-
 .../common/HBaseTimelineStorageUtils.java   | 10 +
 .../storage/flow/FlowRunColumnPrefix.java   |  2 +-
 .../storage/reader/ApplicationEntityReader.java |  8 ++--
 .../storage/reader/GenericEntityReader.java |  8 ++--
 5 files changed, 44 insertions(+), 28 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/61136d03/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
index 46e427e..9f95d44 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/ColumnHelper.java
@@ -52,11 +52,28 @@ public class ColumnHelper {
 
   private final ValueConverter converter;
 
+  private final boolean supplementTs;
+
   public ColumnHelper(ColumnFamily columnFamily) {
 this(columnFamily, GenericConverter.getInstance());
   }
 
   public ColumnHelper(ColumnFamily columnFamily, ValueConverter converter) {
+this(columnFamily, converter, false);
+  }
+
+  /**
+   * @param columnFamily column family implementation.
+   * @param converter converter use to encode/decode values stored in the 
column
+   * or column prefix.
+   * @param needSupplementTs flag to indicate if cell timestamp needs to be
+   * modified for this column by calling
+   * {@link TimestampGenerator#getSupplementedTimestamp(long, String)}. 
This
+   * would be required for columns(such as metrics in flow run table) where
+   * potential collisions can occur due to same timestamp.
+   */
+  public ColumnHelper(ColumnFamily columnFamily, ValueConverter converter,
+  boolean needSupplementTs) {
 this.columnFamily = columnFamily;
 columnFamilyBytes = columnFamily.getBytes();
 if (converter == null) {
@@ -64,6 +81,7 @@ public class ColumnHelper {
 } else {
   this.converter = converter;
 }
+this.supplementTs = needSupplementTs;
   }
 
   /**
@@ -106,18 +124,24 @@ public class ColumnHelper {
   }
 
   /*
-   * Figures out the cell timestamp used in the Put For storing into flow run
-   * table. We would like to left shift the timestamp and supplement it with 
the
-   * AppId id so that there are no collisions in the flow run table's cells
+   * Figures out the cell timestamp used in the Put For storing.
+   * Will supplement the timestamp if required. Typically done for flow run
+   * table.If we supplement the timestamp, we left shift the timestamp and
+   * supplement it with the AppId id so that there are no collisions in the 
flow
+   * run table's cells.
*/
   private long getPutTimestamp(Long timestamp, Attribute[] attributes) {
 if (timestamp == null) {
   timestamp = System.currentTimeMillis();
 }
-String appId = getAppIdFromAttributes(attributes);
-long supplementedTS = TimestampGenerator.getSupplementedTimestamp(
-timestamp, appId);
-return supplementedTS;
+if (!this.supplementTs) {
+  return timestamp;
+} else {
+  String appId = getAppIdFromAttributes(attributes);
+  long supplementedTS = TimestampGenerator.getSupplementedTimestamp(
+  timestamp, appId);
+  return supplementedTS;
+}
   }
 
   private String getAppIdFromAttributes(Attribute[] attributes) {
@@ -234,9 +258,9 @@ public class ColumnHelper {
   for (Entry cell : cells.entrySet()) {
 V value =
 

[23/50] [abbrv] hadoop git commit: YARN-6133. [ATSv2 Security] Renew delegation token for app automatically if an app collector is active. Contributed by Varun Saxena.

2017-08-29 Thread varunsaxena
YARN-6133. [ATSv2 Security] Renew delegation token for app automatically if an 
app collector is active. Contributed by Varun Saxena.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/354be99d
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/354be99d
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/354be99d

Branch: refs/heads/trunk
Commit: 354be99dbf3b6effb45032b574210fd7161d83d4
Parents: 7594d1d
Author: Rohith Sharma K S 
Authored: Thu Aug 10 11:12:57 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../security/TestTimelineAuthFilterForV2.java   | 27 +-
 .../collector/AppLevelTimelineCollector.java| 17 +++-
 .../collector/NodeTimelineCollectorManager.java | 88 +++-
 .../collector/TimelineCollector.java|  7 ++
 .../collector/TimelineCollectorManager.java |  8 +-
 ...neV2DelegationTokenSecretManagerService.java |  6 ++
 6 files changed, 139 insertions(+), 14 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/354be99d/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
index 0ddf287..f1d5185 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/security/TestTimelineAuthFilterForV2.java
@@ -24,6 +24,7 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 import static org.mockito.Matchers.any;
 import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.atLeastOnce;
 import static org.mockito.Mockito.mock;
 import static org.mockito.Mockito.spy;
 import static org.mockito.Mockito.verify;
@@ -183,6 +184,13 @@ public class TestTimelineAuthFilterForV2 {
   conf.set(YarnConfiguration.YARN_HTTP_POLICY_KEY,
   HttpConfig.Policy.HTTP_ONLY.name());
 }
+if (!withKerberosLogin) {
+  // For timeline delegation token based access, set delegation token renew
+  // interval to 100 ms. to test if timeline delegation token for the app 
is
+  // renewed automatically if app is still alive.
+  conf.setLong(
+  YarnConfiguration.TIMELINE_DELEGATION_TOKEN_RENEW_INTERVAL, 100);
+}
 UserGroupInformation.setConfiguration(conf);
 collectorManager = new DummyNodeTimelineCollectorManager();
 auxService = PerNodeTimelineCollectorsAuxService.launchServer(
@@ -282,12 +290,12 @@ public class TestTimelineAuthFilterForV2 {
   }
 
   private void publishAndVerifyEntity(ApplicationId appId, File entityTypeDir,
-  String entityType) throws Exception {
+  String entityType, int numEntities) throws Exception {
 TimelineV2Client client = createTimelineClientForUGI(appId);
 try {
 // Sync call. Results available immediately.
   client.putEntities(createEntity("entity1", entityType));
-  assertEquals(1, entityTypeDir.listFiles().length);
+  assertEquals(numEntities, entityTypeDir.listFiles().length);
   verifyEntity(entityTypeDir, "entity1", entityType);
   // Async call.
   client.putEntitiesAsync(createEntity("entity2", entityType));
@@ -312,12 +320,22 @@ public class TestTimelineAuthFilterForV2 {
 KerberosTestUtils.doAs(HTTP_USER + "/localhost", new Callable() {
   @Override
   public Void call() throws Exception {
-publishAndVerifyEntity(appId, entityTypeDir, entityType);
+publishAndVerifyEntity(appId, entityTypeDir, entityType, 1);
 return null;
   }
 });
   } else {
-publishAndVerifyEntity(appId, entityTypeDir, entityType);
+publishAndVerifyEntity(appId, entityTypeDir, entityType, 1);
+// Verify if token is renewed automatically and entities can still be
+// published.
+Thread.sleep(1000);
+publishAndVerifyEntity(appId, entityTypeDir, entityType, 2);
+AppLevelTimelineCollector collector =
+(AppLevelTimelineCollector) collectorManager.get(appId);
+assertNotNull(collector);
+verify(collect

[27/50] [abbrv] hadoop git commit: YARN-6874. Supplement timestamp for min start/max end time columns in flow run table to avoid overwrite (Vrushali C via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6874. Supplement timestamp for min start/max end time columns in flow run 
table to avoid overwrite (Vrushali C via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/60765aff
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/60765aff
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/60765aff

Branch: refs/heads/trunk
Commit: 60765aff9b57270566b6ec5b5b71433261a168e3
Parents: 354be99
Author: Varun Saxena 
Authored: Thu Aug 10 11:01:19 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../yarn/server/timelineservice/storage/flow/FlowRunColumn.java| 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/60765aff/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java
index 7a39120..3797faf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowRunColumn.java
@@ -76,7 +76,7 @@ public enum FlowRunColumn implements Column {
 // Future-proof by ensuring the right column prefix hygiene.
 this.columnQualifierBytes = Bytes.toBytes(Separator.SPACE
 .encode(columnQualifier));
-this.column = new ColumnHelper(columnFamily, converter);
+this.column = new ColumnHelper(columnFamily, converter, 
true);
   }
 
   /**


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[21/50] [abbrv] hadoop git commit: YARN-6253. FlowAcitivityColumnPrefix.store(byte[] rowKey, ...) drops timestamp. Contributed by Haibo Chen.

2017-08-29 Thread varunsaxena
YARN-6253. FlowAcitivityColumnPrefix.store(byte[] rowKey, ...) drops timestamp. 
Contributed by Haibo Chen.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/cf30b3b9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/cf30b3b9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/cf30b3b9

Branch: refs/heads/trunk
Commit: cf30b3b9144b559a5f6fdfcf98bf0a15ebd17474
Parents: ff43b8d
Author: Sangjin Lee 
Authored: Tue Feb 28 16:10:25 2017 -0800
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../timelineservice/storage/flow/FlowActivityColumnPrefix.java | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/cf30b3b9/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
index 439e0c8..5e7a5d6 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/FlowActivityColumnPrefix.java
@@ -271,7 +271,7 @@ public enum FlowActivityColumnPrefix
 byte[] columnQualifier = getColumnPrefixBytes(qualifier);
 Attribute[] combinedAttributes =
 HBaseTimelineStorageUtils.combineAttributes(attributes, this.aggOp);
-column.store(rowKey, tableMutator, columnQualifier, null, inputValue,
+column.store(rowKey, tableMutator, columnQualifier, timestamp, inputValue,
 combinedAttributes);
   }
 }
\ No newline at end of file


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[09/50] [abbrv] hadoop git commit: YARN-6146. Add Builder methods for TimelineEntityFilters (Haibo Chen via Varun Saxena)

2017-08-29 Thread varunsaxena
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b87b72b4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
index 4a9e53e..4d3e769 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
@@ -76,12 +76,44 @@ final class TimelineReaderWebServicesUtils {
   String isRelatedTo, String infofilters, String conffilters,
   String metricfilters, String eventfilters,
   String fromid) throws TimelineParseException {
-return new TimelineEntityFilters(parseLongStr(limit),
-parseLongStr(createdTimeStart), parseLongStr(createdTimeEnd),
-parseRelationFilters(relatesTo), parseRelationFilters(isRelatedTo),
-parseKVFilters(infofilters, false), parseKVFilters(conffilters, true),
-parseMetricFilters(metricfilters), parseEventFilters(eventfilters),
-parseStr(fromid));
+return createTimelineEntityFilters(
+limit, parseLongStr(createdTimeStart),
+parseLongStr(createdTimeEnd),
+relatesTo, isRelatedTo, infofilters,
+conffilters, metricfilters, eventfilters, fromid);
+  }
+
+  /**
+   * Parse the passed filters represented as strings and convert them into a
+   * {@link TimelineEntityFilters} object.
+   * @param limit Limit to number of entities to return.
+   * @param createdTimeStart Created time start for the entities to return.
+   * @param createdTimeEnd Created time end for the entities to return.
+   * @param relatesTo Entities to return must match relatesTo.
+   * @param isRelatedTo Entities to return must match isRelatedTo.
+   * @param infofilters Entities to return must match these info filters.
+   * @param conffilters Entities to return must match these metric filters.
+   * @param metricfilters Entities to return must match these metric filters.
+   * @param eventfilters Entities to return must match these event filters.
+   * @return a {@link TimelineEntityFilters} object.
+   * @throws TimelineParseException if any problem occurs during parsing.
+   */
+  static TimelineEntityFilters createTimelineEntityFilters(String limit,
+  Long createdTimeStart, Long createdTimeEnd, String relatesTo,
+  String isRelatedTo, String infofilters, String conffilters,
+  String metricfilters, String eventfilters,
+  String fromid) throws TimelineParseException {
+return new TimelineEntityFilters.Builder()
+.entityLimit(parseLongStr(limit))
+.createdTimeBegin(createdTimeStart)
+.createTimeEnd(createdTimeEnd)
+.relatesTo(parseRelationFilters(relatesTo))
+.isRelatedTo(parseRelationFilters(isRelatedTo))
+.infoFilters(parseKVFilters(infofilters, false))
+.configFilters(parseKVFilters(conffilters, true))
+.metricFilters(parseMetricFilters(metricfilters))
+.eventFilters(parseEventFilters(eventfilters))
+.fromId(parseStr(fromid)).build();
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b87b72b4/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
index 35af169..1bc66db 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestFileSystemTimelineReaderImpl.java
@@ -398,7 +398,7 @@ public class Te

[13/50] [abbrv] hadoop git commit: YARN-6316 Provide help information and documentation for TimelineSchemaCreator (Contributed by Haibo Chen via Vrushali C)

2017-08-29 Thread varunsaxena
YARN-6316 Provide help information and documentation for TimelineSchemaCreator 
(Contributed by Haibo Chen via Vrushali C)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/24447b36
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/24447b36
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/24447b36

Branch: refs/heads/trunk
Commit: 24447b36260f930acec60b9306f7e381f816a99e
Parents: f6a51dc
Author: vrushali 
Authored: Thu Jun 1 14:18:43 2017 -0700
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md   | 3 +++
 1 file changed, 3 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/24447b36/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
index d32e56d..678c406 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/TimelineServiceV2.md
@@ -241,6 +241,9 @@ are testing. For example, you can use `-skipExistingTable` 
(`-s` for short) to s
 and continue to create other tables rather than failing the schema creation. 
By default, the tables
 will have a schema prefix of "prod.". When no option or '-help' ('-h' for 
short) is provided, the
 command usage is printed.
+and continue to create other tables rather than failing the schema creation. 
When no option or '-help'
+('-h' for short) is provided, the command usage is printed. By default, the 
tables
+will have a schema prefix of "prod."
 
  Enabling Timeline Service v.2
 Following are the basic configurations to start Timeline service v.2:


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[39/50] [abbrv] hadoop git commit: YARN-6733. Add table for storing sub-application entities. Contributed by Vrushali C.

2017-08-29 Thread varunsaxena
YARN-6733. Add table for storing sub-application entities. Contributed by 
Vrushali C.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a990ff70
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a990ff70
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a990ff70

Branch: refs/heads/trunk
Commit: a990ff70c25e2ab746578500720c531f23e0851e
Parents: 61136d0
Author: Rohith Sharma K S 
Authored: Tue Jul 25 15:25:21 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../storage/TimelineSchemaCreator.java  |  44 +++
 .../storage/application/ApplicationTable.java   |   2 +-
 .../storage/entity/EntityRowKey.java|   6 +-
 .../subapplication/SubApplicationColumn.java| 108 +++
 .../SubApplicationColumnFamily.java |  68 +
 .../SubApplicationColumnPrefix.java | 250 
 .../subapplication/SubApplicationRowKey.java| 290 +++
 .../SubApplicationRowKeyPrefix.java |  89 ++
 .../subapplication/SubApplicationTable.java | 174 +++
 .../storage/subapplication/package-info.java|  28 ++
 .../storage/common/TestKeyConverters.java   |   4 +
 .../storage/common/TestRowKeys.java |  26 ++
 .../storage/common/TestRowKeysAsString.java |  29 ++
 13 files changed, 1114 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a990ff70/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
index 15885ce..210fd85 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
@@ -43,6 +43,7 @@ import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelin
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowActivityTable;
 import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable;
+import 
org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTable;
 
 import com.google.common.annotations.VisibleForTesting;
 import org.slf4j.Logger;
@@ -63,7 +64,9 @@ public final class TimelineSchemaCreator {
   LoggerFactory.getLogger(TimelineSchemaCreator.class);
   private static final String SKIP_EXISTING_TABLE_OPTION_SHORT = "s";
   private static final String APP_METRICS_TTL_OPTION_SHORT = "ma";
+  private static final String SUB_APP_METRICS_TTL_OPTION_SHORT = "msa";
   private static final String APP_TABLE_NAME_SHORT = "a";
+  private static final String SUB_APP_TABLE_NAME_SHORT = "sa";
   private static final String APP_TO_FLOW_TABLE_NAME_SHORT = "a2f";
   private static final String ENTITY_METRICS_TTL_OPTION_SHORT = "me";
   private static final String ENTITY_TABLE_NAME_SHORT = "e";
@@ -121,6 +124,21 @@ public final class TimelineSchemaCreator {
 new ApplicationTable().setMetricsTTL(appMetricsTTL, hbaseConf);
   }
 
+  // Grab the subApplicationTableName argument
+  String subApplicationTableName = commandLine.getOptionValue(
+  SUB_APP_TABLE_NAME_SHORT);
+  if (StringUtils.isNotBlank(subApplicationTableName)) {
+hbaseConf.set(SubApplicationTable.TABLE_NAME_CONF_NAME,
+subApplicationTableName);
+  }
+  // Grab the subApplication metrics TTL
+  String subApplicationTableMetricsTTL = commandLine
+  .getOptionValue(SUB_APP_METRICS_TTL_OPTION_SHORT);
+  if (StringUtils.isNotBlank(subApplicationTableMetricsTTL)) {
+int subAppMetricsTTL = Integer.parseInt(subApplicationTableMetricsTTL);
+new SubApplicationTable().setMetricsTTL(subAppMetricsTTL, hbaseConf);
+  }
+
   // create all table schemas in hbase
   final boolean skipExisting = commandLine.hasOption(
   SKIP_EXISTING_TABLE_OPTION_SHORT);
@@ -182,6 +200,18 @@ public final class TimelineSchemaCreat

[02/50] [abbrv] hadoop git commit: YARN-5739. Provide timeline reader API to list available timeline entity types for one application. Contributed by Li Lu.

2017-08-29 Thread varunsaxena
YARN-5739. Provide timeline reader API to list available timeline entity types 
for one application. Contributed by Li Lu.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4481561e
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4481561e
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4481561e

Branch: refs/heads/trunk
Commit: 4481561e4a3433197dd8e73f38856eef84f0fd03
Parents: 092fead
Author: Sangjin Lee 
Authored: Tue Dec 6 08:28:43 2016 -0800
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:51 2017 +0530

--
 .../storage/DataGeneratorForTest.java   |  47 -
 .../TestHBaseTimelineStorageEntities.java   |  23 +++
 .../storage/HBaseTimelineReaderImpl.java|   8 +
 .../common/HBaseTimelineStorageUtils.java   |  36 
 .../reader/AbstractTimelineStorageReader.java   | 145 +++
 .../storage/reader/ApplicationEntityReader.java |  14 +-
 .../storage/reader/EntityTypeReader.java| 181 +++
 .../storage/reader/GenericEntityReader.java |  77 +---
 .../storage/reader/TimelineEntityReader.java|  29 +--
 .../reader/TimelineEntityReaderFactory.java |  13 ++
 .../reader/TimelineReaderManager.java   |  20 ++
 .../reader/TimelineReaderWebServices.java   | 102 +++
 .../storage/FileSystemTimelineReaderImpl.java   |  21 +++
 .../timelineservice/storage/TimelineReader.java |  13 ++
 14 files changed, 614 insertions(+), 115 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4481561e/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java
index 5cbb781..cafacab 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/DataGeneratorForTest.java
@@ -31,6 +31,7 @@ import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEvent;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric;
 import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric.Type;
+import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants;
 
 final class DataGeneratorForTest {
   static void loadApps(HBaseTestingUtility util) throws IOException {
@@ -358,6 +359,46 @@ final class DataGeneratorForTest {
 relatesTo3.put("container2", relatesToSet14);
 entity2.setRelatesToEntities(relatesTo3);
 te.addEntity(entity2);
+
+// For listing types
+for (int i = 0; i < 10; i++) {
+  TimelineEntity entity3 = new TimelineEntity();
+  String id3 = "typeTest" + i;
+  entity3.setId(id3);
+  StringBuilder typeName = new StringBuilder("newType");
+  for (int j = 0; j < (i % 3); j++) {
+typeName.append(" ").append(j);
+  }
+  entity3.setType(typeName.toString());
+  entity3.setCreatedTime(cTime + 80L + i);
+  te.addEntity(entity3);
+}
+
+// Create app entity for app to flow table
+TimelineEntities appTe1 = new TimelineEntities();
+TimelineEntity entityApp1 = new TimelineEntity();
+String appName1 = "application_123111_";
+entityApp1.setId(appName1);
+entityApp1.setType(TimelineEntityType.YARN_APPLICATION.toString());
+entityApp1.setCreatedTime(cTime + 40L);
+TimelineEvent appCreationEvent1 = new TimelineEvent();
+appCreationEvent1.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
+appCreationEvent1.setTimestamp(cTime);
+entityApp1.addEvent(appCreationEvent1);
+appTe1.addEntity(entityApp1);
+
+TimelineEntities appTe2 = new TimelineEntities();
+TimelineEntity entityApp2 = new TimelineEntity();
+String appName2 = "application_123111_1112";
+entityApp2.setId(appName2);
+entityApp2.setType(TimelineEntityType.YARN_APPLICATION.toString());
+entityApp2.setCreatedTime(cTime + 50L);
+TimelineEvent appCreationEvent2 = new TimelineEvent();
+appCreat

[04/50] [abbrv] hadoop git commit: YARN-5585. [Atsv2] Reader side changes for entity prefix and support for pagination via additional filters (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
http://git-wip-us.apache.org/repos/asf/hadoop/blob/02a9710a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
index 08e5405..780cfd0 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineUIDConverter.java
@@ -155,12 +155,14 @@ enum TimelineUIDConverter {
 // Flow information exists.
 String[] entityTupleArr = {context.getClusterId(), context.getUserId(),
 context.getFlowName(), context.getFlowRunId().toString(),
-context.getAppId(), context.getEntityType(), 
context.getEntityId()};
+context.getAppId(), context.getEntityType(),
+context.getEntityIdPrefix().toString(), context.getEntityId() };
 return joinAndEscapeUIDParts(entityTupleArr);
   } else {
 // Only entity and app information exists. Flow info does not exist.
 String[] entityTupleArr = {context.getClusterId(), context.getAppId(),
-context.getEntityType(), context.getEntityId()};
+context.getEntityType(), context.getEntityIdPrefix().toString(),
+context.getEntityId() };
 return joinAndEscapeUIDParts(entityTupleArr);
   }
 }
@@ -171,20 +173,21 @@ enum TimelineUIDConverter {
 return null;
   }
   List entityTupleList = splitUID(uId);
-  // Should have 7 parts i.e. cluster, user, flow name, flowrun id, app id,
-  // entity type and entity id OR should have 4 parts i.e. cluster, app id,
+  // Should have 8 parts i.e. cluster, user, flow name, flowrun id, app id,
+  // entity type and entity id OR should have 5 parts i.e. cluster, app id,
   // entity type and entity id.
-  if (entityTupleList.size() == 7) {
+  if (entityTupleList.size() == 8) {
 // Flow information exists.
 return new TimelineReaderContext(entityTupleList.get(0),
 entityTupleList.get(1), entityTupleList.get(2),
 Long.parseLong(entityTupleList.get(3)), entityTupleList.get(4),
-entityTupleList.get(5), entityTupleList.get(6));
-  } else if (entityTupleList.size() == 4) {
+entityTupleList.get(5), Long.parseLong(entityTupleList.get(6)),
+entityTupleList.get(7));
+  } else if (entityTupleList.size() == 5) {
 // Flow information does not exist.
 return new TimelineReaderContext(entityTupleList.get(0), null, null,
 null, entityTupleList.get(1), entityTupleList.get(2),
-entityTupleList.get(3));
+Long.parseLong(entityTupleList.get(3)), entityTupleList.get(4));
   } else {
 return null;
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/02a9710a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
index d7c1552..1e77155 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineReader.java
@@ -125,8 +125,8 @@ public interface TimelineReader extends Service {
*flowRunId - Context flow run id.
*appId - Context app id.
*
-   *Although entityId is also part of context, it has no meaning for
-   *getEntities.
+   *Although entityIdPrefix and entityId are also part of context, 
+   *it has no meaning for getEntities.
*Fields in context which are mandatory depends on entity type. Entity
*type is always m

[34/50] [abbrv] hadoop git commit: YARN-4455. Support fetching metrics by time range. Contributed by Varun Saxena.

2017-08-29 Thread varunsaxena
YARN-4455. Support fetching metrics by time range. Contributed by Varun Saxena.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/70078e91
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/70078e91
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/70078e91

Branch: refs/heads/trunk
Commit: 70078e91e3287aad51f6ddf6acd9ed75e7c6760d
Parents: 6604131
Author: Rohith Sharma K S 
Authored: Thu Jul 20 12:16:06 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 ...stTimelineReaderWebServicesHBaseStorage.java | 207 -
 .../storage/DataGeneratorForTest.java   |  27 +--
 .../storage/TestHBaseTimelineStorageApps.java   | 209 -
 .../TestHBaseTimelineStorageEntities.java   | 166 ++
 .../storage/flow/TestHBaseStorageFlowRun.java   |  18 +-
 .../storage/common/ColumnHelper.java|   1 -
 .../common/HBaseTimelineStorageUtils.java   |  17 ++
 .../storage/reader/ApplicationEntityReader.java |  14 ++
 .../storage/reader/GenericEntityReader.java |  12 +-
 .../reader/TimelineDataToRetrieve.java  |  35 ++-
 .../reader/TimelineReaderWebServices.java   | 226 ---
 .../reader/TimelineReaderWebServicesUtils.java  |   6 +-
 .../TestFileSystemTimelineReaderImpl.java   |  15 +-
 13 files changed, 777 insertions(+), 176 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/70078e91/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index 6b0f95e..302f8e0 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -328,7 +328,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
   userEntity.setType("entitytype");
   userEntity.setId("entityid-" + i);
   userEntity.setIdPrefix(11 - i);
-  userEntity.setCreatedTime(System.currentTimeMillis());
+  userEntity.setCreatedTime(ts);
   userEntities.addEntity(userEntity);
 }
 
@@ -344,7 +344,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
   flowVersion2, runid2, entity3.getId(), te3);
   hbi.write(cluster, user, flow, flowVersion, runid,
   "application_11_", userEntities);
-  writeApplicationEntities(hbi);
+  writeApplicationEntities(hbi, ts);
   hbi.flush();
 } finally {
   if (hbi != null) {
@@ -353,26 +353,25 @@ public class TestTimelineReaderWebServicesHBaseStorage
 }
   }
 
-  static void writeApplicationEntities(HBaseTimelineWriterImpl hbi)
-  throws IOException {
-long currentTimeMillis = System.currentTimeMillis();
+  static void writeApplicationEntities(HBaseTimelineWriterImpl hbi,
+  long timestamp) throws IOException {
 int count = 1;
 for (long i = 1; i <= 3; i++) {
   for (int j = 1; j <= 5; j++) {
 TimelineEntities te = new TimelineEntities();
 ApplicationId appId =
-BuilderUtils.newApplicationId(currentTimeMillis, count++);
+BuilderUtils.newApplicationId(timestamp, count++);
 ApplicationEntity appEntity = new ApplicationEntity();
 appEntity.setId(appId.toString());
-appEntity.setCreatedTime(currentTimeMillis);
+appEntity.setCreatedTime(timestamp);
 
 TimelineEvent created = new TimelineEvent();
 created.setId(ApplicationMetricsConstants.CREATED_EVENT_TYPE);
-created.setTimestamp(currentTimeMillis);
+created.setTimestamp(timestamp);
 appEntity.addEvent(created);
 TimelineEvent finished = new TimelineEvent();
 finished.setId(ApplicationMetricsConstants.FINISHED_EVENT_TYPE);
-finished.setTimestamp(currentTimeMillis + i * j);
+finished.setTimestamp(timestamp + i * j);
 
 appEntity.addEvent(finished);
 

[11/50] [abbrv] hadoop git commit: Addendum for YARN-6064. Support fromId for flowRuns and flow/flowRun apps REST API's

2017-08-29 Thread varunsaxena
Addendum for YARN-6064. Support fromId for flowRuns and flow/flowRun apps REST 
API's


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5e0acee7
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5e0acee7
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5e0acee7

Branch: refs/heads/trunk
Commit: 5e0acee75e259c4e241c89b8227efb85f6ea953a
Parents: 6f65cf2
Author: Varun Saxena 
Authored: Thu Jan 19 10:15:28 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../storage/reader/ApplicationEntityReader.java| 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5e0acee7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
index 8a331c3..4e8286d 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/reader/ApplicationEntityReader.java
@@ -375,9 +375,9 @@ class ApplicationEntityReader extends GenericEntityReader {
   Long flowRunId = context.getFlowRunId();
   if (flowRunId == null) {
 AppToFlowRowKey appToFlowRowKey = new AppToFlowRowKey(
-context.getClusterId(), getFilters().getFromId());
-FlowContext flowContext =
-lookupFlowContext(appToFlowRowKey, hbaseConf, conn);
+getFilters().getFromId());
+FlowContext flowContext = lookupFlowContext(appToFlowRowKey,
+context.getClusterId(), hbaseConf, conn);
 flowRunId = flowContext.getFlowRunId();
   }
 


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[07/50] [abbrv] hadoop git commit: YARN-6318. timeline service schema creator fails if executed from a remote machine (Sangjin Lee via Varun Saxena)

2017-08-29 Thread varunsaxena
YARN-6318. timeline service schema creator fails if executed from a remote 
machine (Sangjin Lee via Varun Saxena)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/44999aab
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/44999aab
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/44999aab

Branch: refs/heads/trunk
Commit: 44999aabc27636706851f1c71f3ce500ee6eb027
Parents: 18b3a80
Author: Varun Saxena 
Authored: Tue Mar 14 02:05:01 2017 +0530
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../storage/TimelineSchemaCreator.java  |  5 ++-
 .../common/HBaseTimelineStorageUtils.java   | 18 +--
 .../common/TestHBaseTimelineStorageUtils.java   | 33 
 3 files changed, 46 insertions(+), 10 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/44999aab/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
index 89be4f6..15885ce 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/TimelineSchemaCreator.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.client.Admin;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.util.GenericOptionsParser;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.apptoflow.AppToFlowTable;
 import 
org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
@@ -71,8 +72,10 @@ public final class TimelineSchemaCreator {
 
   public static void main(String[] args) throws Exception {
 
+LOG.info("Starting the schema creation");
 Configuration hbaseConf =
-HBaseTimelineStorageUtils.getTimelineServiceHBaseConf(null);
+HBaseTimelineStorageUtils.getTimelineServiceHBaseConf(
+new YarnConfiguration());
 // Grab input args and allow for -Dxyz style arguments
 String[] otherArgs = new GenericOptionsParser(hbaseConf, args)
 .getRemainingArgs();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/44999aab/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
index f99637e..feef6af 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/src/main/java/org/apache/hadoop/yarn/server/timelineservice/storage/common/HBaseTimelineStorageUtils.java
@@ -17,14 +17,18 @@
 
 package org.apache.hadoop.yarn.server.timelineservice.storage.common;
 
+import java.io.IOException;
 import java.net.MalformedURLException;
 import java.net.URL;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HConstants;
-import org.apache.hadoop.hbase.HRegionInfo;
 import org.apache.hadoop.hbase.KeyValue;
 import org.apache.hadoop

[14/50] [abbrv] hadoop git commit: YARN-6256. Add FROM_ID info key for timeline entities in reader response (Rohith Sharma K S via Varun Saxena)

2017-08-29 Thread varunsaxena
http://git-wip-us.apache.org/repos/asf/hadoop/blob/c3bd8d6a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
index 1a518d0..4a9e53e 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderWebServicesUtils.java
@@ -74,14 +74,14 @@ final class TimelineReaderWebServicesUtils {
   static TimelineEntityFilters createTimelineEntityFilters(String limit,
   String createdTimeStart, String createdTimeEnd, String relatesTo,
   String isRelatedTo, String infofilters, String conffilters,
-  String metricfilters, String eventfilters, String fromidprefix,
+  String metricfilters, String eventfilters,
   String fromid) throws TimelineParseException {
 return new TimelineEntityFilters(parseLongStr(limit),
 parseLongStr(createdTimeStart), parseLongStr(createdTimeEnd),
 parseRelationFilters(relatesTo), parseRelationFilters(isRelatedTo),
 parseKVFilters(infofilters, false), parseKVFilters(conffilters, true),
 parseMetricFilters(metricfilters), parseEventFilters(eventfilters),
-parseLongStr(fromidprefix), parseStr(fromid));
+parseStr(fromid));
   }
 
   /**


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[30/50] [abbrv] hadoop git commit: YARN-6820. Restrict read access to timelineservice v2 data. Contributed by Vrushali C

2017-08-29 Thread varunsaxena
YARN-6820. Restrict read access to timelineservice v2 data. Contributed by 
Vrushali C


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/d5ff965f
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/d5ff965f
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/d5ff965f

Branch: refs/heads/trunk
Commit: d5ff965fee41fed28d3b94e11e546c1eb4c78a35
Parents: 60765af
Author: Jason Lowe 
Authored: Fri Aug 11 13:05:05 2017 -0500
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:53 2017 +0530

--
 .../hadoop/yarn/conf/YarnConfiguration.java |  31 ++
 .../reader/TimelineReaderServer.java|   5 +
 .../reader/TimelineReaderWebServicesUtils.java  |  29 +-
 ...elineReaderWhitelistAuthorizationFilter.java | 123 ++
 ...WhitelistAuthorizationFilterInitializer.java |  66 
 ...elineReaderWhitelistAuthorizationFilter.java | 380 +++
 6 files changed, 630 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/d5ff965f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
index de90c69..ff9632a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
@@ -2167,6 +2167,37 @@ public class YarnConfiguration extends Configuration {
   + "hbase.configuration.file";
 
   /**
+   * The name for setting that enables or disables authentication checks
+   * for reading timeline service v2 data.
+   */
+  public static final String TIMELINE_SERVICE_READ_AUTH_ENABLED =
+  TIMELINE_SERVICE_PREFIX + "read.authentication.enabled";
+
+  /**
+   * The default setting for authentication checks for reading timeline
+   * service v2 data.
+   */
+  public static final Boolean DEFAULT_TIMELINE_SERVICE_READ_AUTH_ENABLED =
+  false;
+
+  /**
+   * The name for setting that lists the users and groups who are allowed
+   * to read timeline service v2 data. It is a comma separated list of
+   * user, followed by space, then comma separated list of groups.
+   * It will allow this list of users and groups to read the data
+   * and reject everyone else.
+   */
+  public static final String TIMELINE_SERVICE_READ_ALLOWED_USERS =
+  TIMELINE_SERVICE_PREFIX + "read.allowed.users";
+
+  /**
+   * The default value for list of the users who are allowed to read
+   * timeline service v2 data.
+   */
+  public static final String DEFAULT_TIMELINE_SERVICE_READ_ALLOWED_USERS =
+  "";
+
+  /**
* The setting that controls how long the final value of a metric of a
* completed app is retained before merging into the flow sum. Up to this 
time
* after an application is completed out-of-order values that arrive can be

http://git-wip-us.apache.org/repos/asf/hadoop/blob/d5ff965f/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
index 61f2425..5c049ea 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice/src/main/java/org/apache/hadoop/yarn/server/timelineservice/reader/TimelineReaderServer.java
@@ -40,6 +40,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import 
org.apache.hadoop.yarn.server.timelineservice.reader.security.TimelineReaderAuthenticationFilterInitializer;
+import 
org.apache.hadoop.yarn.server.timelineservice.reader.security.TimelineReaderWhitelistAuthorizationFilterInitializer;
 import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
 import org.ap

[18/50] [abbrv] hadoop git commit: YARN-4675. Reorganize TimelineClient and TimelineClientImpl into separate classes for ATSv1.x and ATSv2. Contributed by Naganarasimha G R.

2017-08-29 Thread varunsaxena
YARN-4675. Reorganize TimelineClient and TimelineClientImpl into separate 
classes for ATSv1.x and ATSv2. Contributed by Naganarasimha G R.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/684de1a9
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/684de1a9
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/684de1a9

Branch: refs/heads/trunk
Commit: 684de1a9025261dcb6ab3b5ec9ba69738c947ecc
Parents: ccb38c1
Author: Sangjin Lee 
Authored: Thu Feb 16 18:43:31 2017 -0800
Committer: Varun Saxena 
Committed: Wed Aug 30 11:29:52 2017 +0530

--
 .../apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java | 2 --
 1 file changed, 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/684de1a9/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
--
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
index a1a31f9..cfa91f5 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
@@ -79,8 +79,6 @@ import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.exceptions.YarnException;
 import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
 import org.apache.hadoop.yarn.util.TimelineServiceHelper;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.node.JsonNodeFactory;
 
 import com.google.common.annotations.VisibleForTesting;
 import com.sun.jersey.api.client.ClientHandlerException;


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



hadoop git commit: YARN-6982. Potential issue on setting AMContainerSpec#tokenConf to null before app is completed. Contributed by Manikandan R.

2017-08-29 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 870ef0b2f -> 877d96b83


YARN-6982. Potential issue on setting AMContainerSpec#tokenConf to null before 
app is completed. Contributed by Manikandan R.

(cherry picked from commit 4cae120c619811006b26b9a95680a98732572af6)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/877d96b8
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/877d96b8
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/877d96b8

Branch: refs/heads/branch-2
Commit: 877d96b83d67a4d1c4ccdd4a5333f4ade483d8ec
Parents: 870ef0b
Author: Rohith Sharma K S 
Authored: Wed Aug 30 10:45:11 2017 +0530
Committer: Rohith Sharma K S 
Committed: Wed Aug 30 10:50:57 2017 +0530

--
 .../server/resourcemanager/rmapp/RMAppImpl.java |   4 -
 .../yarn/server/resourcemanager/MockRM.java |  11 ++
 .../server/resourcemanager/TestRMRestart.java   | 115 +++
 3 files changed, 126 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/877d96b8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
index 181c5f4..a9351f7 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
@@ -1035,8 +1035,6 @@ public class RMAppImpl implements RMApp, Recoverable {
   app.submissionContext.getCancelTokensWhenComplete(),
   app.getUser(),
   BuilderUtils.parseTokensConf(app.submissionContext));
-  // set the memory free
-  app.submissionContext.getAMContainerSpec().setTokensConf(null);
 } catch (Exception e) {
   String msg = "Failed to fetch user credentials from application:"
   + e.getMessage();
@@ -1089,8 +1087,6 @@ public class RMAppImpl implements RMApp, Recoverable {
   app.submissionContext, false, app.applicationPriority));
   // send the ATS create Event
   app.sendATSCreateEvent();
-  // Set the memory free after submission context is persisted
-  app.submissionContext.getAMContainerSpec().setTokensConf(null);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/877d96b8/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
index 98e96f5..90507f3 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
@@ -513,6 +513,17 @@ public class MockRM extends ResourceManager {
 false, false, null, 0, null, true, priority);
   }
 
+  public RMApp submitApp(int masterMemory, Priority priority,
+  Credentials cred, ByteBuffer tokensConf) throws Exception {
+Resource resource = Resource.newInstance(masterMemory, 0);
+return submitApp(resource, "", UserGroupInformation.getCurrentUser()
+.getShortUserName(), null, false, null,
+super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
+YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), cred, null, true,
+false, false, null, 0, null, true, priority, null, null,
+tokensConf);
+  }
+
   public RMApp submitApp(int masterMemory, boolean unmanaged)
   throws Exception {
 return submitApp(masterMemory, "", UserGroupInformation.getCurrentUser()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/877d96b8/hadoop-yarn-project/hadoop

hadoop git commit: YARN-6982. Potential issue on setting AMContainerSpec#tokenConf to null before app is completed. Contributed by Manikandan R.

2017-08-29 Thread rohithsharmaks
Repository: hadoop
Updated Branches:
  refs/heads/trunk f3661fd08 -> 4cae120c6


YARN-6982. Potential issue on setting AMContainerSpec#tokenConf to null before 
app is completed. Contributed by Manikandan R.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/4cae120c
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/4cae120c
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/4cae120c

Branch: refs/heads/trunk
Commit: 4cae120c619811006b26b9a95680a98732572af6
Parents: f3661fd
Author: Rohith Sharma K S 
Authored: Wed Aug 30 10:45:11 2017 +0530
Committer: Rohith Sharma K S 
Committed: Wed Aug 30 10:45:11 2017 +0530

--
 .../server/resourcemanager/rmapp/RMAppImpl.java |   4 -
 .../yarn/server/resourcemanager/MockRM.java |  11 ++
 .../server/resourcemanager/TestRMRestart.java   | 115 +++
 3 files changed, 126 insertions(+), 4 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/4cae120c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
index 03be793..af7cec4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
@@ -1086,8 +1086,6 @@ public class RMAppImpl implements RMApp, Recoverable {
   app.submissionContext.getCancelTokensWhenComplete(),
   app.getUser(),
   BuilderUtils.parseTokensConf(app.submissionContext));
-  // set the memory free
-  app.submissionContext.getAMContainerSpec().setTokensConf(null);
 } catch (Exception e) {
   String msg = "Failed to fetch user credentials from application:"
   + e.getMessage();
@@ -1140,8 +1138,6 @@ public class RMAppImpl implements RMApp, Recoverable {
   app.submissionContext, false, app.applicationPriority));
   // send the ATS create Event
   app.sendATSCreateEvent();
-  // Set the memory free after submission context is persisted
-  app.submissionContext.getAMContainerSpec().setTokensConf(null);
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4cae120c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
index e967807..1235774 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
@@ -513,6 +513,17 @@ public class MockRM extends ResourceManager {
 false, false, null, 0, null, true, priority);
   }
 
+  public RMApp submitApp(int masterMemory, Priority priority,
+  Credentials cred, ByteBuffer tokensConf) throws Exception {
+Resource resource = Resource.newInstance(masterMemory, 0);
+return submitApp(resource, "", UserGroupInformation.getCurrentUser()
+.getShortUserName(), null, false, null,
+super.getConfig().getInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
+YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS), cred, null, true,
+false, false, null, 0, null, true, priority, null, null,
+tokensConf);
+  }
+
   public RMApp submitApp(int masterMemory, boolean unmanaged)
   throws Exception {
 return submitApp(masterMemory, "", UserGroupInformation.getCurrentUser()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/4cae120c/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/or

hadoop git commit: HDFS-12191. Provide option to not capture the accessTime change of a file to snapshot if no other modification has been done to this file. Contributed by Yongjun Zhang.

2017-08-29 Thread yjzhangal
Repository: hadoop
Updated Branches:
  refs/heads/branch-2 036cbd745 -> 870ef0b2f


HDFS-12191. Provide option to not capture the accessTime change of a file to 
snapshot if no other modification has been done to this file. Contributed by 
Yongjun Zhang.

(cherry picked from commit cf93d60d3f032000e5b78a08d320793d78799f3d)

Conflicts:
hadoop-hdfs-project/hadoop-hdfs/src/main/resources/hdfs-default.xml

hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSDirAttrOp.java


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/870ef0b2
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/870ef0b2
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/870ef0b2

Branch: refs/heads/branch-2
Commit: 870ef0b2fd26c9f748e9b309cf1a5235e31fc8ab
Parents: 036cbd7
Author: Yongjun Zhang 
Authored: Tue Aug 29 16:57:03 2017 -0700
Committer: Yongjun Zhang 
Committed: Tue Aug 29 21:57:25 2017 -0700

--
 .../org/apache/hadoop/hdfs/DFSConfigKeys.java   |   3 +
 .../hdfs/server/namenode/FSDirAttrOp.java   |   4 +-
 .../hdfs/server/namenode/FSEditLogLoader.java   |   4 +-
 .../hadoop/hdfs/server/namenode/INode.java  |   7 +-
 .../namenode/snapshot/SnapshotManager.java  |  28 
 .../src/main/resources/hdfs-default.xml |  44 +++--
 .../hdfs/server/namenode/TestFSDirAttrOp.java   |   7 +
 .../snapshot/TestSnapshotDiffReport.java| 163 ++-
 8 files changed, 233 insertions(+), 27 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/870ef0b2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
index d5192c2..185c882 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
@@ -331,6 +331,9 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
   public static final boolean DFS_NAMENODE_SNAPSHOT_CAPTURE_OPENFILES_DEFAULT =
   HdfsClientConfigKeys.DFS_NAMENODE_SNAPSHOT_CAPTURE_OPENFILES_DEFAULT;
 
+  public static final String 
DFS_NAMENODE_SNAPSHOT_SKIP_CAPTURE_ACCESSTIME_ONLY_CHANGE = 
"dfs.namenode.snapshot.skip.capture.accesstime-only-change";
+  public static final boolean 
DFS_NAMENODE_SNAPSHOT_SKIP_CAPTURE_ACCESSTIME_ONLY_CHANGE_DEFAULT = false;
+
   // Whether to enable datanode's stale state detection and usage for reads
   public static final String DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_KEY = 
"dfs.namenode.avoid.read.stale.datanode";
   public static final boolean 
DFS_NAMENODE_AVOID_STALE_DATANODE_FOR_READ_DEFAULT = false;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/870ef0b2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAttrOp.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAttrOp.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAttrOp.java
index 9e714af..478d5ac 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAttrOp.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirAttrOp.java
@@ -493,7 +493,9 @@ public class FSDirAttrOp {
 // then no need to store access time
 if (atime != -1 && (status || force
 || atime > inode.getAccessTime() + fsd.getAccessTimePrecision())) {
-  inode.setAccessTime(atime, latest);
+  inode.setAccessTime(atime, latest,
+  fsd.getFSNamesystem().getSnapshotManager().
+  getSkipCaptureAccessTimeOnlyChange());
   status = true;
 }
 return status;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/870ef0b2/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
index 616c2e3..d603559 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSEditLogLoader.ja

hadoop git commit: HDFS-12336. Listing encryption zones still fails when deleted EZ is not a direct child of snapshottable directory. Contributed by Wellington Chevreuil.

2017-08-29 Thread xiao
Repository: hadoop
Updated Branches:
  refs/heads/trunk 8201ed800 -> f3661fd08


HDFS-12336. Listing encryption zones still fails when deleted EZ is not a 
direct child of snapshottable directory. Contributed by Wellington Chevreuil.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/f3661fd0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/f3661fd0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/f3661fd0

Branch: refs/heads/trunk
Commit: f3661fd08e03440d02452b46ba3ae3cff2b75ba7
Parents: 8201ed8
Author: Xiao Chen 
Authored: Tue Aug 29 21:28:02 2017 -0700
Committer: Xiao Chen 
Committed: Tue Aug 29 21:30:02 2017 -0700

--
 .../server/namenode/EncryptionZoneManager.java  |  2 +-
 .../hadoop/hdfs/server/namenode/INode.java  | 11 -
 .../apache/hadoop/hdfs/TestEncryptionZones.java | 47 
 .../namenode/TestEncryptionZoneManager.java | 26 +++
 .../src/test/resources/testCryptoConf.xml   | 32 +
 5 files changed, 116 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/f3661fd0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
index d6302ba..f4cf8f2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/EncryptionZoneManager.java
@@ -573,7 +573,7 @@ public class EncryptionZoneManager {
   return false;
 }
 INode lastINode = null;
-if (inode.getParent() != null || inode.isRoot()) {
+if (INode.isValidAbsolutePath(zonePath)) {
   INodesInPath iip = dir.getINodesInPath(zonePath, DirOp.READ_LINK);
   lastINode = iip.getLastINode();
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f3661fd0/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
index 874563d..34bfe10 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java
@@ -783,8 +783,17 @@ public abstract class INode implements INodeAttributes, 
Diff.Element {
 return StringUtils.split(path, Path.SEPARATOR_CHAR);
   }
 
+  /**
+   * Verifies if the path informed is a valid absolute path.
+   * @param path the absolute path to validate.
+   * @return true if the path is valid.
+   */
+  static boolean isValidAbsolutePath(final String path){
+return path != null && path.startsWith(Path.SEPARATOR);
+  }
+
   private static void checkAbsolutePath(final String path) {
-if (path == null || !path.startsWith(Path.SEPARATOR)) {
+if (!isValidAbsolutePath(path)) {
   throw new AssertionError("Absolute path required, but got '"
   + path + "'");
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/f3661fd0/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
--
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
index bf02db3..870023b 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestEncryptionZones.java
@@ -1867,4 +1867,51 @@ public class TestEncryptionZones {
 // Read them back in and compare byte-by-byte
 verifyFilesEqual(fs, baseFile, encFile1, len);
   }
+
+  /**
+   * Test listing encryption zones after zones had been deleted,
+   * but still exist under snapshots. This test first moves EZs
+   * to trash folder, so that an inodereference is created for the EZ,
+   * then it removes the EZ from trash folder to emulate condition where
+   * the EZ inode will not be complete.
+   */
+  @Test
+  public void testListEncryptionZonesWithSnapshots() throws Exception {
+final Path snapshottable = new Pa

[46/75] [abbrv] hadoop git commit: YARN-7100. Added JsonIgnore annotation to Resource#getMemoryMB. Contributed by Eric Yang

2017-08-29 Thread jianhe
YARN-7100. Added JsonIgnore annotation to Resource#getMemoryMB. Contributed by 
Eric Yang


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/6e7ddba0
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/6e7ddba0
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/6e7ddba0

Branch: refs/heads/yarn-native-services
Commit: 6e7ddba0c5e80b509f708167b40d547eb4504d00
Parents: ef230a3
Author: Jian He 
Authored: Tue Aug 29 20:37:38 2017 -0700
Committer: Jian He 
Committed: Tue Aug 29 20:45:12 2017 -0700

--
 .../java/org/apache/hadoop/yarn/service/api/records/Resource.java  | 2 ++
 1 file changed, 2 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/6e7ddba0/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Resource.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Resource.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Resource.java
index cec9de9..dfdf92a 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Resource.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Resource.java
@@ -22,6 +22,7 @@ import io.swagger.annotations.ApiModelProperty;
 
 import java.util.Objects;
 
+import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -103,6 +104,7 @@ public class Resource extends BaseResource implements 
Cloneable {
 this.memory = memory;
   }
 
+  @JsonIgnore
   public long getMemoryMB() {
 if (this.memory == null) {
   return 0;


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[22/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java
deleted file mode 100644
index d96d13e..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/RoleLaunchService.java
+++ /dev/null
@@ -1,168 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.core.launch.ContainerLauncher;
-import org.apache.hadoop.yarn.service.provider.ProviderService;
-import org.apache.hadoop.yarn.service.provider.ProviderFactory;
-import org.apache.slider.server.appmaster.actions.QueueAccess;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
-import org.apache.slider.server.appmaster.state.ContainerAssignment;
-import org.apache.slider.server.services.workflow.ServiceThreadFactory;
-import org.apache.slider.server.services.workflow.WorkflowExecutorService;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-
-import static 
org.apache.hadoop.yarn.service.conf.SliderKeys.KEY_CONTAINER_LAUNCH_DELAY;
-
-/**
- * A service for launching containers
- */
-public class RoleLaunchService
-extends WorkflowExecutorService {
-  protected static final Logger log =
-LoggerFactory.getLogger(RoleLaunchService.class);
-
-  public static final String ROLE_LAUNCH_SERVICE = "RoleLaunchService";
-
-
-  /**
-   * Queue submission API
-   */
-  private  QueueAccess actionQueue;
-
-  /**
-   * Filesystem to use for the launch
-   */
-  private  SliderFileSystem fs;
-
-
-  private Map envVars = new HashMap<>();
-
-  /**
-   * Construct an instance of the launcher
-   * @param queueAccess
-   * @param fs filesystem
-   * @param envVars environment variables
-   */
-  public RoleLaunchService(QueueAccess queueAccess, SliderFileSystem fs,
-  Map envVars) {
-super(ROLE_LAUNCH_SERVICE);
-this.actionQueue = queueAccess;
-this.fs = fs;
-this.envVars = envVars;
-  }
-
-  public RoleLaunchService(SliderFileSystem fs) {
-super(ROLE_LAUNCH_SERVICE);
-this.fs = fs;
-  }
-
-  @Override
-  public void init(Configuration conf) {
-super.init(conf);
-setExecutor(Executors.newCachedThreadPool(
-new ServiceThreadFactory(ROLE_LAUNCH_SERVICE, true)));
-  }
-
-  /**
-   * Start an asychronous launch operation
-   * @param assignment container assignment
-   * @param credentials credentials to use
-   */
-  public void launchRole(ContainerAssignment assignment,
-  Application application, Credentials credentials) {
-  }
-
-  public void launchComponent(Application application,
-  ComponentInstance instance, Container container) {
-RoleLaunchService.RoleLauncher launcher =
-new RoleLaunchService.RoleLauncher(application, instance,
-container);
-execute(launcher);
-  }
-
-  /**
-   * Thread that runs on the AM to launch a container
-   */
-  private class RoleLauncher implements Runnable {
-// Allocated container
-public final Container container;
-public final Application application;
-public ComponentInstance inst

[66/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/CommandLineBuilder.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/CommandLineBuilder.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/CommandLineBuilder.java
new file mode 100644
index 000..7baa284
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/CommandLineBuilder.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.containerlaunch;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Build a single command line to include in the container commands;
+ * Special support for JVM command buildup.
+ */
+public class CommandLineBuilder {
+  protected final List argumentList = new ArrayList<>(20);
+
+  /**
+   * Add an entry to the command list
+   * @param args arguments -these will be converted strings
+   */
+  public void add(Object... args) {
+for (Object arg : args) {
+  argumentList.add(arg.toString());
+}
+  }
+
+  // Get the number of arguments
+  public int size() {
+return argumentList.size();
+  }
+  
+  /**
+   * Append the output and error files to the tail of the command
+   * @param stdout out
+   * @param stderr error. Set this to null to append into stdout
+   */
+  public void addOutAndErrFiles(String stdout, String stderr) {
+Preconditions.checkNotNull(stdout, "Null output file");
+Preconditions.checkState(!stdout.isEmpty(), "output filename invalid");
+// write out the path output
+argumentList.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/" +
+ stdout);
+if (stderr != null) {
+  argumentList.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/" 
+
+   stderr);
+} else {
+  argumentList.add("2>&1");
+}
+  }
+
+  /**
+   * This just returns the command line
+   * @see #build()
+   * @return the command line
+   */
+  @Override
+  public String toString() {
+return build();
+  }
+
+  /**
+   * Build the command line
+   * @return the command line
+   */
+  public String build() {
+return SliderUtils.join(argumentList, " ");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/ContainerLaunchService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/ContainerLaunchService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/ContainerLaunchService.java
new file mode 100644
index 000..0e51a62
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/ContainerLaunchService.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (t

[08/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/common/tools/TestSliderUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/common/tools/TestSliderUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/common/tools/TestSliderUtils.java
deleted file mode 100644
index 057f6c5..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/common/tools/TestSliderUtils.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.common.tools;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl;
-import org.junit.Assert;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-/** Test slider util methods. */
-public class TestSliderUtils {
-  protected static final Logger log =
-  LoggerFactory.getLogger(TestSliderUtils.class);
-  @Rule
-  public TemporaryFolder folder = new TemporaryFolder();
-
-  //@Test
-  public void testTruncate() {
-Assert.assertEquals(SliderUtils.truncate(null, 5), null);
-Assert.assertEquals(SliderUtils.truncate("323", -1), "323");
-Assert.assertEquals(SliderUtils.truncate("3232", 5), "3232");
-Assert.assertEquals(SliderUtils.truncate("1234567890", 0), "1234567890");
-Assert.assertEquals(SliderUtils.truncate("123456789012345", 15), 
"123456789012345");
-Assert.assertEquals(SliderUtils.truncate("123456789012345", 14), 
"12345678901...");
-Assert.assertEquals(SliderUtils.truncate("1234567890", 1), "1");
-Assert.assertEquals(SliderUtils.truncate("1234567890", 10), "1234567890");
-Assert.assertEquals(SliderUtils.truncate("", 10), "");
-  }
-
-  //@Test
-  public void testApplicationReportComparison() {
-List instances = getApplicationReports();
-
-SliderUtils.sortApplicationsByMostRecent(instances);
-
-Assert.assertEquals(1000, instances.get(0).getStartTime());
-Assert.assertEquals(1000, instances.get(1).getStartTime());
-Assert.assertEquals(1000, instances.get(2).getStartTime());
-Assert.assertEquals(1000, instances.get(3).getStartTime());
-
-instances = getApplicationReports();
-
-SliderUtils.sortApplicationReport(instances);
-Assert.assertEquals(1000, instances.get(0).getStartTime());
-Assert.assertEquals(1000, instances.get(1).getStartTime());
-Assert.assertEquals(1000, instances.get(2).getStartTime());
-Assert.assertEquals(1000, instances.get(3).getStartTime());
-
-Assert.assertTrue(instances.get(0).getYarnApplicationState() == 
YarnApplicationState.ACCEPTED ||
-  instances.get(0).getYarnApplicationState() == 
YarnApplicationState.RUNNING);
-Assert.assertTrue(instances.get(1).getYarnApplicationState() == 
YarnApplicationState.ACCEPTED ||
-  instances.get(1).getYarnApplicationState() == 
YarnApplicationState.RUNNING);
-Assert.assertTrue(instances.get(2).getYarnApplicationState() == 
YarnApplicationState.ACCEPTED ||
-  instances.get(2).getYarnApplicationState() == 
YarnApplicationState.RUNNING);
-Assert.assertTrue(instances.get(3).getYarnApplicationState() == 
YarnApplicationState.KILLED);
-  }
-
-  private List getApplicationReports() {
-List instances = new ArrayList();
-instances.add(getApplicationReport(1000, 0, "app1", 
YarnApplicationState.ACCEPTED));
-instances.add(getApplicationReport(900, 998, "app1", 
YarnApplicationState.KILLED

[15/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/state/RoleHistory.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/state/RoleHistory.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/state/RoleHistory.java
deleted file mode 100644
index 15333e4..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/state/RoleHistory.java
+++ /dev/null
@@ -1,1123 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.state;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.NodeReport;
-import org.apache.hadoop.yarn.api.records.NodeState;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.slider.api.types.NodeInformation;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.exceptions.BadConfigException;
-import org.apache.slider.providers.ProviderRole;
-import org.apache.slider.server.appmaster.management.BoolMetric;
-import org.apache.slider.server.appmaster.management.Timestamp;
-import org.apache.slider.server.appmaster.operations.AbstractRMOperation;
-import org.apache.slider.server.appmaster.operations.UpdateBlacklistOperation;
-import org.apache.slider.server.avro.LoadedRoleHistory;
-import org.apache.slider.server.avro.NodeEntryRecord;
-import org.apache.slider.server.avro.RoleHistoryHeader;
-import org.apache.slider.server.avro.RoleHistoryWriter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-
-/**
- * The Role History.
- * 
- * Synchronization policy: all public operations are synchronized.
- * Protected methods are in place for testing -no guarantees are made.
- * 
- * Inner classes have no synchronization guarantees; they should be 
manipulated 
- * in these classes and not externally.
- * 
- * Note that as well as some methods marked visible for testing, there
- * is the option for the time generator method, {@link #now()} to
- * be overridden so that a repeatable time series can be used.
- * 
- */
-public class RoleHistory {
-  protected static final Logger log =
-LoggerFactory.getLogger(RoleHistory.class);
-  private final List providerRoles;
-  /** the roles in here are shared with App State */
-  private final Map roleStatusMap = new HashMap<>();
-  private final AbstractClusterServices recordFactory;
-
-  private long startTime;
-
-  /** Time when saved */
-  private final Timestamp saveTime = new Timestamp(0);
-
-  /** If the history was loaded, the time at which the history was saved.
-   * That is: the time the data was valid */
-  private final Timestamp thawedDataTime = new Timestamp(0);
-  
-  private NodeMap nodemap;
-  private int roleSize;
-  private final BoolMetric dirty = new BoolMetric(false);
-  private FileSystem filesystem;
-  private Path historyPath;
-  private RoleHistoryWriter historyWriter = new RoleHistoryWriter();
-
-  /**
-   * When were the nodes updated in a {@link #onNodesUpdated(List)} call?
-   * If zero: never.
-   */
-  private final Timestamp nodesUpdatedTime = new Timestamp(0);
-  private final BoolMetric nodeUpdateReceived = new BoolMetric(false);
-
-  private OutstandingRequestTracker outstandingRequests =
-new Outstan

[43/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/slideram-log4j.properties
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/slideram-log4j.properties
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/slideram-log4j.properties
deleted file mode 100644
index 333859e..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/slideram-log4j.properties
+++ /dev/null
@@ -1,68 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-#
-
-# This is the log4j configuration for Slider Application Master
-
-# Log rotation based on size (100KB) with a max of 10 backup files
-log4j.rootLogger=INFO, amlog
-log4j.threshhold=ALL
-log4j.appender.amlog=org.apache.log4j.RollingFileAppender
-log4j.appender.amlog.layout=org.apache.log4j.PatternLayout
-log4j.appender.amlog.File=${LOG_DIR}/slider.log
-log4j.appender.amlog.MaxFileSize=1MB
-log4j.appender.amlog.MaxBackupIndex=10
-
-# log layout skips stack-trace creation operations by avoiding line numbers 
and method
-log4j.appender.amlog.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} - 
%m%n
-
-# debug edition is much more expensive
-#log4j.appender.amlog.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} 
(%F:%M(%L)) - %m%n
-
-# configure stderr
-# set the conversion pattern of stderr
-# Print the date in ISO 8601 format
-log4j.appender.stderr=org.apache.log4j.ConsoleAppender
-log4j.appender.stderr.Target=System.err
-log4j.appender.stderr.layout=org.apache.log4j.PatternLayout
-log4j.appender.stderr.layout.ConversionPattern=%d{ISO8601} [%t] %-5p %c{2} - 
%m%n
-
-log4j.appender.subprocess=org.apache.log4j.ConsoleAppender
-log4j.appender.subprocess.layout=org.apache.log4j.PatternLayout
-log4j.appender.subprocess.layout.ConversionPattern=[%c{1}]: %m%n
-#log4j.logger.org.apache.slider.yarn.appmaster.SliderAppMasterer.master=INFO,subprocess
-
-# for debugging Slider
-#log4j.logger.org.apache.slider=DEBUG
-
-# uncomment to debug service lifecycle issues
-#log4j.logger.org.apache.hadoop.yarn.service.launcher=DEBUG
-#log4j.logger.org.apache.hadoop.yarn.service=DEBUG
-
-# uncomment for YARN operations
-#log4j.logger.org.apache.hadoop.yarn.client=DEBUG
-
-# uncomment this to debug security problems
-#log4j.logger.org.apache.hadoop.security=DEBUG
-
-#crank back on some noise
-log4j.logger.org.apache.hadoop.util.NativeCodeLoader=ERROR
-log4j.logger.org.apache.hadoop.hdfs=WARN
-log4j.logger.org.apache.hadoop.hdfs.shortcircuit=ERROR
-
-log4j.logger.org.apache.zookeeper=WARN
-log4j.logger.org.apache.curator.framework.state=ERROR
-log4j.logger.org.apache.curator.framework.imps=WARN

http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/yarnservice-log4j.properties
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/yarnservice-log4j.properties
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/yarnservice-log4j.properties
new file mode 100644
index 000..58c8e27
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/conf/yarnservice-log4j.properties
@@ -0,0 +1,62 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in wr

[38/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
index 647bfe9..a044838 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
@@ -24,30 +24,24 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.LocalResource;
 import org.apache.hadoop.yarn.api.records.LocalResourceType;
-import org.apache.slider.api.ClusterNode;
-import org.apache.slider.api.ResourceKeys;
-import org.apache.slider.api.RoleKeys;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.api.resource.ConfigFile;
-import org.apache.slider.api.resource.Configuration;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.exceptions.BadCommandArgumentsException;
-import org.apache.slider.core.exceptions.SliderException;
-import org.apache.slider.core.launch.AbstractLauncher;
-import org.apache.slider.core.launch.ContainerLauncher;
-import org.apache.slider.core.registry.docstore.ConfigFormat;
-import org.apache.slider.core.registry.docstore.PublishedConfiguration;
-import 
org.apache.slider.core.registry.docstore.PublishedConfigurationOutputter;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
 import org.apache.hadoop.yarn.service.ServiceContext;
-import org.apache.slider.server.appmaster.state.StateAccessForProviders;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.api.records.ConfigFormat;
+import org.apache.hadoop.yarn.service.api.records.Configuration;
+import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
+import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+import org.apache.hadoop.yarn.service.utils.PublishedConfiguration;
+import org.apache.hadoop.yarn.service.utils.PublishedConfigurationOutputter;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -55,23 +49,18 @@ import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collection;
 import java.util.HashMap;
-import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.ExecutionException;
 import java.util.regex.Pattern;
 
-import static org.apache.slider.api.ServiceApiConstants.*;
-import static org.apache.hadoop.yarn.service.utils.ServiceApiUtil.$;
+import static 
org.apache.hadoop.yarn.service.api.constants.ServiceApiConstants.*;
 
 /**
  * This is a factoring out of methods handy for providers. It's bonded to a log
  * at construction time.
  */
-public class ProviderUtils implements RoleKeys, SliderKeys {
+public class ProviderUtils implements YarnServiceConstants {
 
   protected static final Logger log =
   LoggerFactory.getLogger(ProviderUtils.class);
@@ -174,46 +163,22 @@ public class ProviderUtils implements RoleKeys, 
SliderKeys {
   }
 
   /**
-   * Get resource requirements from a String value. If value isn't specified,
-   * use the default value. If value is greater than max, use the max value.
-   * @param val string value
-   * @param defVal default value
-   * @param maxVal maximum value

[03/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/mock/MockYarnEngine.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/mock/MockYarnEngine.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/mock/MockYarnEngine.java
deleted file mode 100644
index 9c5708f..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/mock/MockYarnEngine.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.model.mock;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.NodeReport;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.client.api.AMRMClient;
-import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
-import org.apache.slider.server.appmaster.operations.AbstractRMOperation;
-import org.apache.slider.server.appmaster.operations.CancelSingleRequest;
-import org.apache.slider.server.appmaster.operations.ContainerReleaseOperation;
-import org.apache.slider.server.appmaster.operations.ContainerRequestOperation;
-import org.junit.Assert;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static org.junit.Assert.assertNotNull;
-
-/**
- * This is an evolving engine to mock YARN operations.
- */
-public class MockYarnEngine {
-  private static final Logger LOG =
-  LoggerFactory.getLogger(MockYarnEngine.class);
-
-  private MockYarnCluster cluster;
-  private Allocator allocator;
-  private List pending = new ArrayList<>();
-
-  private ApplicationId appId = new MockApplicationId(0, 0);
-
-  private ApplicationAttemptId attemptId = new MockApplicationAttemptId(appId,
-  1);
-
-  @Override
-  public String toString() {
-return "MockYarnEngine " + cluster + " + pending=" + pending.size();
-  }
-
-  public int containerCount() {
-return cluster.containersInUse();
-  }
-
-  public MockYarnEngine(int clusterSize, int containersPerNode) {
-cluster = new MockYarnCluster(clusterSize, containersPerNode);
-allocator = new Allocator(cluster);
-  }
-
-  public MockYarnCluster getCluster() {
-return cluster;
-  }
-
-  public Allocator getAllocator() {
-return allocator;
-  }
-
-  /**
-   * Allocate a container from a request. The containerID will be
-   * unique, nodeId and other fields chosen internally with
-   * no such guarantees; resource and priority copied over
-   * @param request request
-   * @return container
-   */
-  public Container allocateContainer(AMRMClient.ContainerRequest request) {
-MockContainer allocated = allocator.allocate(request);
-if (allocated != null) {
-  MockContainerId id = (MockContainerId)allocated.getId();
-  id.setApplicationAttemptId(attemptId);
-}
-return allocated;
-  }
-
-  MockYarnCluster.MockYarnClusterContainer releaseContainer(ContainerId
-  containerId) {
-return cluster.release(containerId);
-  }
-
-  /**
-   * Process a list of operations -release containers to be released,
-   * allocate those for which there is space (but don't rescan the list after
-   * the scan).
-   * @param ops
-   * @return
-   */
-  public List execute(List ops) {
-return execute(ops, new ArrayList<>());
-  }
-
-  /**
-   * Process a list of operations -release containers to be released,
-   * allocate those for which there is space (but don't rescan the list after
-   * the scan). Unsatisifed entries a

[54/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/CommandLineBuilder.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/CommandLineBuilder.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/CommandLineBuilder.java
deleted file mode 100644
index 7baa284..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/CommandLineBuilder.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.containerlaunch;
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.service.utils.SliderUtils;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Build a single command line to include in the container commands;
- * Special support for JVM command buildup.
- */
-public class CommandLineBuilder {
-  protected final List argumentList = new ArrayList<>(20);
-
-  /**
-   * Add an entry to the command list
-   * @param args arguments -these will be converted strings
-   */
-  public void add(Object... args) {
-for (Object arg : args) {
-  argumentList.add(arg.toString());
-}
-  }
-
-  // Get the number of arguments
-  public int size() {
-return argumentList.size();
-  }
-  
-  /**
-   * Append the output and error files to the tail of the command
-   * @param stdout out
-   * @param stderr error. Set this to null to append into stdout
-   */
-  public void addOutAndErrFiles(String stdout, String stderr) {
-Preconditions.checkNotNull(stdout, "Null output file");
-Preconditions.checkState(!stdout.isEmpty(), "output filename invalid");
-// write out the path output
-argumentList.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/" +
- stdout);
-if (stderr != null) {
-  argumentList.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/" 
+
-   stderr);
-} else {
-  argumentList.add("2>&1");
-}
-  }
-
-  /**
-   * This just returns the command line
-   * @see #build()
-   * @return the command line
-   */
-  @Override
-  public String toString() {
-return build();
-  }
-
-  /**
-   * Build the command line
-   * @return the command line
-   */
-  public String build() {
-return SliderUtils.join(argumentList, " ");
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/ContainerLaunchService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/ContainerLaunchService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/ContainerLaunchService.java
deleted file mode 100644
index fcbb69b..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/ContainerLaunchService.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you m

[63/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java
new file mode 100644
index 000..7b22e3e
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/JsonSerDeser.java
@@ -0,0 +1,249 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.codehaus.jackson.JsonGenerationException;
+import org.codehaus.jackson.JsonParseException;
+import org.codehaus.jackson.map.DeserializationConfig;
+import org.codehaus.jackson.map.JsonMappingException;
+import org.codehaus.jackson.map.ObjectMapper;
+import org.codehaus.jackson.map.PropertyNamingStrategy;
+import org.codehaus.jackson.map.SerializationConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+/**
+ * Support for marshalling objects to and from JSON.
+ * This class is NOT thread safe; it constructs an object mapper
+ * as an instance field.
+ * @param 
+ */
+public class JsonSerDeser {
+
+  private static final Logger log = 
LoggerFactory.getLogger(JsonSerDeser.class);
+  private static final String UTF_8 = "UTF-8";
+
+  private final Class classType;
+  private final ObjectMapper mapper;
+
+  /**
+   * Create an instance bound to a specific type
+   * @param classType class type
+   */
+  public JsonSerDeser(Class classType) {
+this.classType = classType;
+this.mapper = new ObjectMapper();
+mapper.configure(DeserializationConfig.Feature.FAIL_ON_UNKNOWN_PROPERTIES, 
false);
+  }
+
+  public JsonSerDeser(Class classType, PropertyNamingStrategy 
namingStrategy) {
+this(classType);
+mapper.setPropertyNamingStrategy(namingStrategy);
+  }
+
+  /**
+   * Convert from JSON
+   * @param json input
+   * @return the parsed JSON
+   * @throws IOException IO
+   * @throws JsonMappingException failure to map from the JSON to this class
+   */
+  public T fromJson(String json)
+throws IOException, JsonParseException, JsonMappingException {
+try {
+  return mapper.readValue(json, classType);
+} catch (IOException e) {
+  log.error("Exception while parsing json : " + e + "\n" + json, e);
+  throw e;
+}
+  }
+
+  /**
+   * Convert from a JSON file
+   * @param jsonFile input file
+   * @return the parsed JSON
+   * @throws IOException IO problems
+   * @throws JsonMappingException failure to map from the JSON to this class
+   */
+  public T fromFile(File jsonFile)
+throws IOException, JsonParseException, JsonMappingException {
+File absoluteFile = jsonFile.getAbsoluteFile();
+try {
+  return mapper.readValue(absoluteFile, classType);
+} catch (IOException e) {
+  log.error("Exception while parsing json file {}", absoluteFile, e);
+  throw e;
+}
+  }
+
+  /**
+   * Convert from a JSON file
+   * @param resource input file
+   * @return the parsed JSON
+   * @throws IOException IO problems
+   * @throws JsonMappingException failure to map from the JSON to this class
+   */
+ public T fromResource(String resource)
+throws IOException, JsonParseException, JsonMappingException {
+tr

[75/75] [abbrv] hadoop git commit: YARN-7113. Clean up packaging and dependencies for yarn-native-services. Contributed by Billie Rinaldi

2017-08-29 Thread jianhe
YARN-7113. Clean up packaging and dependencies for yarn-native-services. 
Contributed by Billie Rinaldi


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/ef230a3a
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/ef230a3a
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/ef230a3a

Branch: refs/heads/yarn-native-services
Commit: ef230a3a27dafa04bb2d379168dad0de478ab9c0
Parents: 5dc3432
Author: Jian He 
Authored: Tue Aug 29 11:09:00 2017 -0700
Committer: Jian He 
Committed: Tue Aug 29 20:45:12 2017 -0700

--
 NOTICE.txt  |   15 +
 .../resources/assemblies/hadoop-yarn-dist.xml   |8 -
 .../assemblies/hadoop-yarn-services-api.xml |   36 -
 .../assemblies/hadoop-yarn-services-dist.xml|   30 -
 hadoop-project/pom.xml  |   19 +-
 hadoop-yarn-project/hadoop-yarn/bin/yarn|   37 +-
 .../hadoop-yarn-services-api/pom.xml|  104 +-
 .../yarn/service/webapp/ApiServerWebApp.java|4 +-
 .../src/main/resources/webapps/api-server/app   |   16 +
 .../resources/webapps/services-rest-api/app |   16 -
 .../hadoop-yarn-services-core/pom.xml   |  213 +---
 .../service/client/params/ActionKDiagArgs.java  |   76 --
 .../yarn/service/client/params/ClientArgs.java  |5 -
 .../registry/YarnRegistryViewForProviders.java  |8 +-
 .../yarn/service/utils/KerberosDiags.java   |  680 ---
 .../hadoop/yarn/service/utils/SliderUtils.java  | 1088 --
 .../hadoop/yarn/service/ServiceTestUtils.java   |   28 +
 .../hadoop/yarn/service/TestServiceApiUtil.java |   38 +-
 .../yarn/service/TestYarnNativeServices.java|   10 +-
 .../yarn/service/client/TestServiceCLI.java |1 -
 .../yarn/service/conf/TestAppJsonResolve.java   |   30 +-
 .../service/conf/TestLoadExampleAppJson.java|   11 +-
 .../providers/TestAbstractClientProvider.java   |   10 +-
 hadoop-yarn-project/pom.xml |4 +
 24 files changed, 176 insertions(+), 2311 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef230a3a/NOTICE.txt
--
diff --git a/NOTICE.txt b/NOTICE.txt
index c41972b..e40f3bf 100644
--- a/NOTICE.txt
+++ b/NOTICE.txt
@@ -576,3 +576,18 @@ The binary distribution of this product bundles binaries of
 software.amazon.ion:ion-java 1.0.1,
 which has the following notices:
  * Amazon Ion Java Copyright 2007-2016 Amazon.com, Inc. or its affiliates. All 
Rights Reserved.
+
+The binary distribution of this product bundles binaries of
+JCommander (https://github.com/cbeust/jcommander),
+which has the following notices:
+ * Copyright 2010 Cedric Beust ced...@beust.com
+
+The binary distribution of this product bundles binaries of
+snakeyaml (https://bitbucket.org/asomov/snakeyaml),
+which has the following notices:
+ * Copyright (c) 2008, http://www.snakeyaml.org
+
+The binary distribution of this product bundles binaries of
+swagger-annotations (https://github.com/swagger-api/swagger-core),
+which has the following notices:
+ * Copyright 2016 SmartBear Software

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef230a3a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
--
diff --git 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
index 8aeeabd..8b3d292 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
@@ -98,10 +98,6 @@
   etc/hadoop
 
 
-  
hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/target/hadoop-yarn-services-core-${project.version}
-  
/share/hadoop/${hadoop.component}/lib/services
-
-
   
hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/target
   
/share/hadoop/${hadoop.component}/sources
   
@@ -109,10 +105,6 @@
   
 
 
-  
hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/target/hadoop-yarn-services-api-${project.version}
-  
/share/hadoop/${hadoop.component}/lib/services-api
-
-
   
hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-unmanaged-am-launcher/target
   
/share/hadoop/${hadoop.component}/sources
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef230a3a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-api.xml
--
diff --git 
a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-services-api.xml 
b/hadoop-assemblies/src/main/resources/assemblies/hadoo

[06/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/appstate/TestMockAppStateRMOperations.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/appstate/TestMockAppStateRMOperations.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/appstate/TestMockAppStateRMOperations.java
deleted file mode 100644
index 8686479..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/appstate/TestMockAppStateRMOperations.java
+++ /dev/null
@@ -1,430 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.model.appstate;
-
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.client.api.AMRMClient;
-import org.apache.slider.server.appmaster.model.mock.BaseMockAppStateTest;
-import org.apache.slider.server.appmaster.model.mock.MockRMOperationHandler;
-import org.apache.slider.server.appmaster.model.mock.MockRoles;
-import org.apache.slider.server.appmaster.model.mock.MockYarnEngine;
-import org.apache.slider.server.appmaster.operations.AbstractRMOperation;
-import org.apache.slider.server.appmaster.operations.CancelSingleRequest;
-import org.apache.slider.server.appmaster.operations.ContainerReleaseOperation;
-import org.apache.slider.server.appmaster.operations.ContainerRequestOperation;
-import org.apache.slider.server.appmaster.state.AppState;
-import org.apache.slider.server.appmaster.state.ContainerAssignment;
-import org.apache.slider.server.appmaster.state.RoleInstance;
-import org.apache.slider.server.appmaster.state.RoleStatus;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import static 
org.apache.slider.server.appmaster.state.ContainerPriority.buildPriority;
-import static 
org.apache.slider.server.appmaster.state.ContainerPriority.extractRole;
-
-/**
- * Test app state RM operations.
- */
-public class TestMockAppStateRMOperations extends BaseMockAppStateTest
-implements MockRoles {
-  private static final Logger LOG =
-  LoggerFactory.getLogger(BaseMockAppStateTest.class);
-
-  @Override
-  public String getTestName() {
-return "TestMockAppStateRMOperations";
-  }
-
-  //@Test
-  public void testPriorityOnly() throws Throwable {
-assertEquals(5, extractRole(buildPriority(5, false)));
-  }
-
-  //@Test
-  public void testPriorityRoundTrip() throws Throwable {
-assertEquals(5, extractRole(buildPriority(5, false)));
-  }
-
-  //@Test
-  public void testPriorityRoundTripWithRequest() throws Throwable {
-int priority = buildPriority(5, false);
-assertEquals(5, extractRole(priority));
-  }
-
-  //@Test
-  public void testMockAddOp() throws Throwable {
-getRole0Status().setDesired(1);
-List ops = appState.reviewRequestAndReleaseNodes();
-assertListLength(ops, 1);
-ContainerRequestOperation operation = 
(ContainerRequestOperation)ops.get(0);
-int priority = operation.getRequest().getPriority().getPriority();
-assertEquals(extractRole(priority), getRole0Status().getKey());
-MockRMOperationHandler handler = new MockRMOperationHandler();
-handler.execute(ops);
-
-AbstractRMOperation op = handler.getFirstOp();
-assertTrue(op instanceof ContainerRequestOperation);
-  }
-
-  /**
-   * Test of a flex up and down op which verifies that outstanding
-   * requests are cancelled first.
-   * 
-   *   request 5 nodes, assert 5 request made
-   *   allocate 1 of them
-   *   flex cluster size to 3
-   *   assert this generates 2 cancel requests
-   * 
-   */
-  //@Test
-  public void testRequestThenCancelOps() 

[69/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
new file mode 100644
index 000..c3a2752
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
@@ -0,0 +1,892 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.client;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.curator.framework.CuratorFramework;
+import org.apache.curator.framework.CuratorFrameworkFactory;
+import org.apache.curator.retry.RetryNTimes;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.registry.client.api.RegistryConstants;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
+import org.apache.hadoop.registry.client.api.RegistryOperationsFactory;
+import org.apache.hadoop.registry.client.binding.RegistryUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.service.CompositeService;
+import org.apache.hadoop.util.VersionInfo;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
+import 
org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsRequest;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
+import org.apache.hadoop.yarn.api.records.ApplicationTimeout;
+import org.apache.hadoop.yarn.api.records.ApplicationTimeoutType;
+import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hadoop.yarn.api.records.LocalResourceType;
+import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.client.api.YarnClient;
+import org.apache.hadoop.yarn.client.api.YarnClientApplication;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.ipc.YarnRPC;
+import org.apache.hadoop.yarn.proto.ClientAMProtocol.ComponentCountProto;
+import 
org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto;
+import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto;
+import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto;
+import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopRequestProto;
+import org.apache.hadoop.yarn.service.ClientAMProtocol;
+import org.apache.hadoop.yarn.service.ServiceMaster;
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.ServiceState;
+import 
org.apache.hadoop.yarn.service.client.params.AbstractClusterBuildingActionArgs;
+import org.apache.hadoop.yarn.service.client.params.ActionDependencyArgs;
+import org.apache.hadoop.yarn.service.client.params.ActionFlexArgs;
+import org.apache.hadoop.yarn.service.client.params.Arguments;
+import org.apache.hadoop.yarn.service.client.params.Cli

[67/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/Component.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/Component.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/Component.java
new file mode 100644
index 000..cb7131e
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/Component.java
@@ -0,0 +1,494 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.component;
+
+import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
+import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
+import org.apache.hadoop.yarn.event.AsyncDispatcher;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceId;
+import org.apache.hadoop.yarn.service.ContainerFailureTracker;
+import org.apache.hadoop.yarn.service.ServiceContext;
+import org.apache.hadoop.yarn.service.ServiceScheduler;
+import 
org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEvent;
+import org.apache.hadoop.yarn.service.ServiceMetrics;
+import org.apache.hadoop.yarn.state.InvalidStateTransitionException;
+import org.apache.hadoop.yarn.state.MultipleArcTransition;
+import org.apache.hadoop.yarn.state.SingleArcTransition;
+import org.apache.hadoop.yarn.state.StateMachine;
+import org.apache.hadoop.yarn.state.StateMachineFactory;
+import org.apache.hadoop.yarn.util.Apps;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.apache.hadoop.yarn.service.monitor.probe.MonitorUtils;
+import org.apache.hadoop.yarn.service.monitor.probe.Probe;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.text.MessageFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import static org.apache.hadoop.yarn.api.records.ContainerExitStatus.*;
+import static org.apache.hadoop.yarn.service.component.ComponentEventType.*;
+import static 
org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType.START;
+import static 
org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType.STOP;
+import static org.apache.hadoop.yarn.service.component.ComponentState.*;
+import static 
org.apache.hadoop.yarn.service.conf.YarnServiceConf.CONTAINER_FAILURE_THRESHOLD;
+
+public class Component implements EventHandler {
+  private static final Logger LOG = LoggerFactory.getLogger(Component.class);
+
+  private org.apache.hadoop.yarn.service.api.records.Component componentSpec;
+  private long allocateId;
+  private Priority priority;
+  private ServiceMetrics componentMetrics;
+  private ServiceScheduler scheduler;
+  private ServiceContext context;
+  private AMRMClientAsync amrmClient;
+  private AtomicLong instanceIdCounter = new AtomicLong();
+  private Map compInstances =
+  new ConcurrentHashMap<>();
+  // component instances to be assigned with a container
+  private List pendingInstances = new LinkedList<>();
+  private ContainerFailureTracker failureTracker;
+  private Probe probe;
+  private

[32/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/ClientUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/ClientUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/ClientUtils.java
deleted file mode 100644
index b28257f..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/ClientUtils.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.slider.client;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.PathNotFoundException;
-import org.apache.hadoop.registry.client.api.RegistryOperations;
-import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
-import org.apache.hadoop.registry.client.exceptions.NoRecordException;
-import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.slider.core.exceptions.BadCommandArgumentsException;
-import org.apache.slider.core.exceptions.NotFoundException;
-import org.apache.slider.core.exceptions.SliderException;
-import org.apache.slider.core.registry.docstore.ConfigFormat;
-import org.apache.slider.core.registry.docstore.PublishedConfigSet;
-import org.apache.slider.core.registry.docstore.PublishedConfiguration;
-import 
org.apache.slider.core.registry.docstore.PublishedConfigurationOutputter;
-import org.apache.slider.core.registry.retrieve.RegistryRetriever;
-
-import java.io.File;
-import java.io.IOException;
-
-import static 
org.apache.hadoop.registry.client.binding.RegistryUtils.currentUser;
-import static 
org.apache.hadoop.registry.client.binding.RegistryUtils.servicePath;
-
-public class ClientUtils {
-  public static ServiceRecord lookupServiceRecord(RegistryOperations rops,
-  String user, String name) throws IOException, SliderException {
-return lookupServiceRecord(rops, user, null, name);
-  }
-
-  public static ServiceRecord lookupServiceRecord(RegistryOperations rops,
-  String user, String type, String name) throws IOException,
-  SliderException {
-if (StringUtils.isEmpty(user)) {
-  user = currentUser();
-} else {
-  user = RegistryPathUtils.encodeForRegistry(user);
-}
-if (StringUtils.isEmpty(type)) {
-  type = SliderKeys.APP_TYPE;
-}
-
-String path = servicePath(user, type, name);
-return resolve(rops, path);
-  }
-
-  public static ServiceRecord resolve(RegistryOperations rops, String path)
-  throws IOException, SliderException {
-try {
-  return rops.resolve(path);
-} catch (PathNotFoundException | NoRecordException e) {
-  throw new NotFoundException(e.getPath().toString(), e);
-}
-  }
-
-  public static PublishedConfiguration getConfigFromRegistry(
-  RegistryOperations rops, Configuration configuration,
-  String configName, String appName, String user, boolean external)
-  throws IOException, SliderException {
-ServiceRecord instance = lookupServiceRecord(rops, user, appName);
-
-RegistryRetriever retriever = new RegistryRetriever(configuration, 
instance);
-PublishedConfigSet configurations = retriever.getConfigurations(external);
-
-PublishedConfiguration published = retriever.retrieveConfiguration(
-configurations, configName, external);
-return published;
-  }
-
-  public static String saveOrReturnConfig(PublishedConfiguration published,
-  String format, File destPath, String fileName)
-  throws BadCommandArgumentsException, IOException {
-ConfigFormat configFormat = ConfigFormat.resolve(format);
-if (configFormat == null) {
-  throw new BadCommandArgumentsException(
-  "Unknown/Unsupported format %s 

[55/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/compinstance/ComponentInstance.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/compinstance/ComponentInstance.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/compinstance/ComponentInstance.java
deleted file mode 100644
index 982a114..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/compinstance/ComponentInstance.java
+++ /dev/null
@@ -1,493 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.compinstance;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
-import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
-import org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.client.api.NMClient;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.event.EventHandler;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.apache.hadoop.yarn.service.ServiceScheduler;
-import org.apache.hadoop.yarn.service.api.records.ContainerState;
-import org.apache.hadoop.yarn.service.component.Component;
-import org.apache.hadoop.yarn.state.InvalidStateTransitionException;
-import org.apache.hadoop.yarn.state.SingleArcTransition;
-import org.apache.hadoop.yarn.state.StateMachine;
-import org.apache.hadoop.yarn.state.StateMachineFactory;
-import org.apache.hadoop.yarn.util.BoundedAppender;
-import org.apache.hadoop.yarn.service.utils.SliderUtils;
-import org.apache.hadoop.yarn.service.timelineservice.ServiceTimelinePublisher;
-import org.apache.hadoop.yarn.service.servicemonitor.probe.ProbeStatus;
-import org.apache.hadoop.yarn.service.registry.YarnRegistryViewForProviders;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.text.MessageFormat;
-import java.util.Date;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
-import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
-
-import static 
org.apache.hadoop.yarn.api.records.ContainerExitStatus.KILLED_BY_APPMASTER;
-import static org.apache.hadoop.yarn.api.records.ContainerState.COMPLETE;
-import static 
org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEventType.*;
-import static 
org.apache.hadoop.yarn.service.compinstance.ComponentInstanceState.*;
-
-public class ComponentInstance implements EventHandler,
-Comparable {
-  private static final Logger LOG =
-  LoggerFactory.getLogger(ComponentInstance.class);
-
-  private  StateMachine stateMachine;
-  private Component component;
-  private final ReadLock readLock;
-  private final WriteLock writeLock;
-
-  private ComponentInstanceId compInstanceId = null;
-  private Path compInstanceDir;
-  private Container container;
-  private YarnRegistryViewForProviders yarnRegistryOperations;
-  private FileSystem fs;
-  private boolean timelineServiceEnabled = false;
-  private ServiceTimelinePublisher serviceTimelinePublisher;
-  private ServiceScheduler schedu

[50/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SerializedApplicationReport.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SerializedApplicationReport.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SerializedApplicationReport.java
deleted file mode 100644
index 405f690..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SerializedApplicationReport.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.utils;
-
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.service.utils.ApplicationReportSerDeser;
-import org.codehaus.jackson.annotate.JsonIgnoreProperties;
-import org.codehaus.jackson.map.annotate.JsonSerialize;
-
-import java.io.IOException;
-
-/**
- * Serialized form of an application report which can be persisted
- * and then parsed. It can not be converted back into a
- * real YARN application report
- * 
- * Useful for testing
- */
-
-@JsonIgnoreProperties(ignoreUnknown = true)
-@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
-
-public class SerializedApplicationReport {
-
-  public String applicationId;
-  public String applicationAttemptId;
-  public String name;
-  public String applicationType;
-  public String user;
-  public String queue;
-  public String host;
-  public Integer rpcPort;
-  public String state;
-  public String diagnostics;
-  public String url;
-  /**
-   * This value is non-null only when a report is generated from a submission 
context.
-   * The YARN {@link ApplicationReport} structure does not propagate this value
-   * from the RM.
-   */
-  public Long submitTime;
-  public Long startTime;
-  public Long finishTime;
-  public String finalStatus;
-  public String origTrackingUrl;
-  public Float progress;
-  
-  public SerializedApplicationReport() {
-  }
-  
-  public SerializedApplicationReport(ApplicationReport report) {
-this.applicationId = report.getApplicationId().toString();
-ApplicationAttemptId attemptId = report.getCurrentApplicationAttemptId();
-this.applicationAttemptId = attemptId != null ? attemptId.toString() : 
"N/A";
-this.name = report.getName();
-this.applicationType = report.getApplicationType();
-this.user = report.getUser();
-this.queue = report.getQueue();
-this.host = report.getHost();
-this.rpcPort = report.getRpcPort();
-this.state = report.getYarnApplicationState().toString();
-this.diagnostics = report.getDiagnostics();
-this.startTime = report.getStartTime();
-this.finishTime = report.getFinishTime();
-FinalApplicationStatus appStatus = report.getFinalApplicationStatus();
-this.finalStatus = appStatus == null ? "" : appStatus.toString();
-this.progress = report.getProgress();
-this.url = report.getTrackingUrl();
-this.origTrackingUrl= report.getOriginalTrackingUrl();
-  }
-
-  @Override
-  public String toString() {
-try {
-  return ApplicationReportSerDeser.toString(this);
-} catch (IOException e) {
-  return super.toString();
-}
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/ServiceApiUtil.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/

[60/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
new file mode 100644
index 000..b742553
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/test/java/org/apache/hadoop/yarn/service/timelineservice/TestServiceTimelinePublisher.java
@@ -0,0 +1,293 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing, software
+ *  distributed under the License is distributed on an "AS IS" BASIS,
+ *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ *  See the License for the specific language governing permissions and
+ *  limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.timelineservice;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
+import 
org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity.Identifier;
+import org.apache.hadoop.yarn.client.api.TimelineV2Client;
+import org.apache.hadoop.yarn.client.api.impl.TimelineV2ClientImpl;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.service.ServiceContext;
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.api.records.ServiceState;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.Container;
+import org.apache.hadoop.yarn.service.api.records.ContainerState;
+import org.apache.hadoop.yarn.service.api.records.PlacementPolicy;
+import org.apache.hadoop.yarn.service.api.records.Resource;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceId;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+/**
+ * Test class for ServiceTimelinePublisher.
+ */
+public class TestServiceTimelinePublisher {
+  private TimelineV2Client timelineClient;
+  private Configuration config;
+  private ServiceTimelinePublisher serviceTimelinePublisher;
+  private static String SERVICE_NAME = "HBASE";
+  private static String SERVICEID = "application_1490093646524_0005";
+  private static String ARTIFACTID = "ARTIFACTID";
+  private static String COMPONENT_NAME = "DEFAULT";
+  private static String CONTAINER_ID =
+  "container_e02_1490093646524_0005_01_01";
+  private static String CONTAINER_IP =
+  "localhost";
+  private static String CONTAINER_HOSTNAME =
+  "cnl124-localhost.site";
+  private static String CONTAINER_BAREHOST =
+  "localhost.com";
+
+  @Before
+  public void setUp() throws Exception {
+config = new Configuration();
+config.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
+config.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
+timelineClient =
+new DummyTimelineClient(ApplicationId.fromString(SERVICEID));
+serviceTimelinePublisher = new ServiceTimelinePublisher(timelineClient);
+timelineClient.init(config);
+serviceTimelinePublisher.init(config);
+timelineClient.start();
+serviceTimelinePublisher.start();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+if (s

[74/75] [abbrv] hadoop git commit: YARN-7113. Clean up packaging and dependencies for yarn-native-services. Contributed by Billie Rinaldi

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/ef230a3a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
index 6e6f4dd..7e53d18 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
@@ -24,35 +24,17 @@ import 
org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.GlobFilter;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.io.nativeio.NativeIO;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.Shell;
 import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.Container;
 import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.service.client.params.Arguments;
 import org.apache.hadoop.yarn.service.client.params.SliderActions;
 import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
 import org.apache.hadoop.yarn.service.containerlaunch.ClasspathConstructor;
 import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException;
-import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
-import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
-import org.apache.hadoop.yarn.service.exceptions.LauncherExitCodes;
 import org.apache.hadoop.yarn.service.exceptions.SliderException;
-import org.apache.zookeeper.server.util.KerberosUtil;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -63,32 +45,19 @@ import java.io.FileNotFoundException;
 import java.io.FileOutputStream;
 import java.io.FilenameFilter;
 import java.io.IOException;
-import java.io.Serializable;
-import java.net.InetSocketAddress;
 import java.net.ServerSocket;
-import java.net.Socket;
 import java.net.URL;
 import java.net.URLDecoder;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
 import java.util.Enumeration;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.regex.Pattern;
 import java.util.zip.GZIPOutputStream;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipOutputStream;
 
 /**
  * These are slider-specific Util methods
@@ -97,41 +66,6 @@ public final class SliderUtils {
 
   private static final Logger log = LoggerFactory.getLogger(SliderUtils.class);
 
-  /**
-   * Atomic bool to track whether or not process security has already been
-   * turned on (prevents re-entrancy)
-   */
-  private static final AtomicBoolean processSecurityAlreadyInitialized =
-  new AtomicBoolean(false);
-  public static final String JAVA_SECURITY_KRB5_REALM =
-  "java.security.krb5.realm";
-  public static final String JAVA_SECURITY_KRB5_KDC = "java.security.krb5.kdc";
-
-  /**
-   * Winutils
-   */
-  public static final String WINUTILS = "WINUTILS.EXE";
-  /**
-   * name of openssl program
-   */
-  public static final String OPENSSL = "openssl";
-
-  /**
-   * name of python program
-   */
-  public static final String PYTHON = "python";
-
-  /**
-   * type of docker standalone service
-   */
-  public static final String DOCKER = "docker";
-  /**
-   * type of docker on yarn service
-   */
-  public static final String DOCKER_YARN = "yarn_docker";
-

[17/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/state/AppState.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/state/AppState.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/state/AppState.java
deleted file mode 100644
index ba923bc..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/state/AppState.java
+++ /dev/null
@@ -1,2120 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.state;
-
-import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.metrics2.lib.MutableGaugeInt;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.api.records.NodeId;
-import org.apache.hadoop.yarn.api.records.NodeReport;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.client.api.AMRMClient;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.apache.hadoop.yarn.util.resource.Resources;
-import org.apache.slider.api.ClusterNode;
-import org.apache.slider.api.InternalKeys;
-import org.apache.slider.api.ServiceApiConstants;
-import org.apache.slider.api.StatusKeys;
-import org.apache.slider.api.proto.Messages;
-import org.apache.slider.api.proto.Messages.ComponentCountProto;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.ApplicationState;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.api.resource.ConfigFile;
-import org.apache.slider.api.types.ApplicationLivenessInformation;
-import org.apache.slider.api.types.ComponentInformation;
-import org.apache.slider.api.types.RoleStatistics;
-import org.apache.hadoop.yarn.service.conf.SliderExitCodes;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.exceptions.BadClusterStateException;
-import org.apache.slider.core.exceptions.BadConfigException;
-import org.apache.slider.core.exceptions.ErrorStrings;
-import org.apache.slider.core.exceptions.NoSuchNodeException;
-import org.apache.slider.core.exceptions.SliderInternalStateException;
-import org.apache.slider.core.exceptions.TriggerClusterTeardownException;
-import org.apache.slider.core.zk.ZKIntegration;
-import org.apache.slider.providers.PlacementPolicy;
-import org.apache.slider.providers.ProviderRole;
-import org.apache.slider.server.appmaster.management.MetricsAndMonitoring;
-import org.apache.slider.server.appmaster.management.MetricsConstants;
-import org.apache.hadoop.yarn.service.metrics.ServiceMetrics;
-import org.apache.slider.server.appmaster.operations.AbstractRMOperation;
-import org.apache.slider.server.appmaster.operations.ContainerReleaseOperation;
-import org.apache.slider.server.appmaster.operations.ContainerRequestOperation;
-import org.apache.slider.server.appmaster.operations.UpdateBlacklistOperation;
-import org.apache.hadoop.yarn.service.timelineservice.ServiceTimelinePublisher;
-import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;

[59/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
deleted file mode 100644
index 8c968dc..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
+++ /dev/null
@@ -1,655 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service;
-
-import com.google.common.cache.CacheBuilder;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.LoadingCache;
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
-import org.apache.hadoop.registry.client.api.RegistryOperations;
-import org.apache.hadoop.registry.client.api.RegistryOperationsFactory;
-import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
-import org.apache.hadoop.registry.client.binding.RegistryUtils;
-import org.apache.hadoop.registry.client.types.ServiceRecord;
-import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
-import org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.service.CompositeService;
-import 
org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.api.records.NodeReport;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.api.records.UpdatedContainer;
-import org.apache.hadoop.yarn.client.api.AMRMClient;
-import org.apache.hadoop.yarn.client.api.TimelineV2Client;
-import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
-import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.event.AsyncDispatcher;
-import org.apache.hadoop.yarn.event.EventHandler;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
-import org.apache.hadoop.yarn.service.api.constants.ServiceApiConstants;
-import org.apache.hadoop.yarn.service.api.records.Application;
-import org.apache.hadoop.yarn.service.api.records.ConfigFile;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEvent;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEventType;
-import org.apache.hadoop.yarn.service.component.Component;
-import org.apache.hadoop.yarn.service.component.ComponentEvent;
-import org.apache.hadoop.yarn.service.component.ComponentEventType;
-import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
-import org.apache.hadoop.yarn.service.containerlaunch.ContainerLaunchService;
-import org.apache.hadoop.yarn.service.metrics.ServiceMetrics;
-import org.apache.hadoop.yarn.service.provider.ProviderUtils;
-import org.apache.hadoop.yarn.service.registry.YarnRegistryViewForProviders;
-import org.apache.hadoop.yarn.service.timelineservice.ServiceMetricsSink;
-import org.apache.hadoop.yarn.service.timelineservice.ServiceTimelinePublisher;
-import org.apa

[64/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
new file mode 100644
index 000..add2475
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.registry;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.PathNotFoundException;
+import org.apache.hadoop.registry.client.api.RegistryConstants;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.registry.client.api.BindFlags;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
+import org.apache.hadoop.registry.client.binding.RegistryUtils;
+import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
+
+import org.apache.hadoop.registry.client.types.ServiceRecord;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstanceId;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+
+import java.io.IOException;
+import java.util.List;
+
+import static org.apache.hadoop.registry.client.binding.RegistryPathUtils.join;
+
+/**
+ * Registry view for providers. This tracks where the service
+ * is registered, offers access to the record and other things.
+ */
+public class YarnRegistryViewForProviders {
+  private static final Log LOG =
+  LogFactory.getLog(YarnRegistryViewForProviders.class);
+
+  private final RegistryOperations registryOperations;
+  private final String user;
+  private final String sliderServiceClass;
+  private final String instanceName;
+  /**
+   * Record used where the service registered itself.
+   * Null until the service is registered
+   */
+  private ServiceRecord selfRegistration;
+
+  /**
+   * Path where record was registered
+   * Null until the service is registered
+   */
+  private String selfRegistrationPath;
+
+  public YarnRegistryViewForProviders(RegistryOperations registryOperations,
+  String user,
+  String sliderServiceClass,
+  String instanceName,
+  ApplicationAttemptId applicationAttemptId) {
+Preconditions.checkArgument(registryOperations != null,
+"null registry operations");
+Preconditions.checkArgument(user != null, "null user");
+Preconditions.checkArgument(SliderUtils.isSet(sliderServiceClass),
+"unset service class");
+Preconditions.checkArgument(SliderUtils.isSet(instanceName),
+"instanceName");
+Preconditions.checkArgument(applicationAttemptId != null,
+"null applicationAttemptId");
+this.registryOperations = registryOperations;
+this.user = user;
+this.sliderServiceClass = sliderServiceClass;
+this.instanceName = instanceName;
+  }
+
+  public String getUser() {
+return user;
+  }
+
+
+  private void setSelfRegistration(ServiceRecord selfRegistration) {
+this.selfRegistration = selfRegistration;
+  }
+
+  /**
+   * Get the path to where the service has registered itself.
+   * Null until the service is registered
+   * @return the service registration path.
+   */
+  public String getSelfRegistrationPath() {
+return selfRegistrationPath;
+  }
+
+  /**
+   * Get the absolute path to where the service has registered itself.
+   * This includes the base registry path
+   * Null until the service is registered
+   * @return the service 

[56/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionRegistryArgs.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionRegistryArgs.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionRegistryArgs.java
deleted file mode 100644
index c2866cf..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionRegistryArgs.java
+++ /dev/null
@@ -1,218 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.client.params;
-
-import com.beust.jcommander.Parameter;
-import com.beust.jcommander.Parameters;
-import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
-import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
-import org.apache.hadoop.yarn.service.exceptions.UsageException;
-import org.apache.hadoop.yarn.service.api.records.ConfigFormat;
-
-import static 
org.apache.hadoop.yarn.service.client.params.SliderActions.ACTION_REGISTRY;
-import static 
org.apache.hadoop.yarn.service.client.params.SliderActions.DESCRIBE_ACTION_REGISTRY;
-import java.io.File;
-
-/**
- * Registry actions
- * 
- * --instance {app name}, if  a / is in it, refers underneath?
- * --dest {destfile}
- * --list : list instances of slider service
- * --listfiles 
- */
-@Parameters(commandNames = {ACTION_REGISTRY},
-commandDescription = DESCRIBE_ACTION_REGISTRY)
-
-public class ActionRegistryArgs extends AbstractActionArgs {
-
-  public static final String USAGE =
-  "Usage: " + SliderActions.ACTION_REGISTRY
-  + " ("
-  + Arguments.ARG_LIST + "|"
-  + Arguments.ARG_LISTCONF + "|"
-  + Arguments.ARG_LISTEXP + "|"
-  + Arguments.ARG_LISTFILES + "|"
-  + Arguments.ARG_GETCONF + "|"
-  + Arguments.ARG_GETEXP + "> "
-  + Arguments.ARG_NAME + "  "
-  + " )"
-  + "[" + Arguments.ARG_VERBOSE + "] "
-  + "[" + Arguments.ARG_USER + "] "
-  + "[" + Arguments.ARG_OUTPUT + "  ] "
-  + "[" + Arguments.ARG_SERVICETYPE + "  ] "
-  + "[" + Arguments.ARG_FORMAT + " ] "
-  + System.getProperty("line.separator")
-  + "Arguments.ARG_GETEXP only supports " + Arguments.ARG_FORMAT + " json"
-  ;
-  public ActionRegistryArgs() {
-  }
-
-  public ActionRegistryArgs(String name) {
-this.name = name;
-  }
-
-  @Override
-  public String getActionName() {
-return ACTION_REGISTRY;
-  }
-
-  /**
-   * Get the min #of params expected
-   * @return the min number of params in the {@link #parameters} field
-   */
-  @Override
-  public int getMinParams() {
-return 0;
-  }
-  
-  @Parameter(names = {ARG_LIST}, 
-  description = "list services")
-  public boolean list;
-
-  @Parameter(names = {ARG_LISTCONF}, 
-  description = "list configurations")
-  public boolean listConf;
-
-  @Parameter(names = {ARG_GETCONF},
-  description = "get configuration")
-  public String getConf;
-
-  @Parameter(names = {ARG_LISTEXP},
- description = "list exports")
-  public boolean listExports;
-
-  @Parameter(names = {ARG_GETEXP},
- description = "get export")
-  public String getExport;
-
-  @Parameter(names = {ARG_LISTFILES},
-  description = "list files")
-  public String listFiles;
-
-  @Parameter(names = {ARG_GETFILES},
-  description = "get files")
-  public String getFiles;
-
-  //--format 
-  @Parameter(names = ARG_FORMAT,
-  description = "Format for a response: ")
-  public String format = ConfigFormat.XML.toString() ;
-
-  @Parameter(names = {ARG_OUTPUT, ARG_OUTPUT_SHORT, ARG_DEST},
-  description = "Output destination")
-  public File out;
-
-  @Parameter(names = {ARG_NAME},
-  description = "name of

[30/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClientAPI.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClientAPI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClientAPI.java
deleted file mode 100644
index f1bf2ad..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/client/SliderClientAPI.java
+++ /dev/null
@@ -1,258 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.client;
-
-import org.apache.hadoop.registry.client.api.RegistryOperations;
-import org.apache.hadoop.service.Service;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.types.NodeInformationList;
-import org.apache.slider.common.params.AbstractClusterBuildingActionArgs;
-import org.apache.slider.common.params.ActionAMSuicideArgs;
-import org.apache.slider.common.params.ActionClientArgs;
-import org.apache.hadoop.yarn.service.client.params.ActionDependencyArgs;
-import org.apache.slider.common.params.ActionDiagnosticArgs;
-import org.apache.hadoop.yarn.service.client.params.ActionFlexArgs;
-import org.apache.slider.common.params.ActionFreezeArgs;
-import org.apache.slider.common.params.ActionKeytabArgs;
-import org.apache.slider.common.params.ActionNodesArgs;
-import org.apache.slider.common.params.ActionKillContainerArgs;
-import org.apache.slider.common.params.ActionListArgs;
-import org.apache.slider.common.params.ActionRegistryArgs;
-import org.apache.slider.common.params.ActionResolveArgs;
-import org.apache.slider.common.params.ActionResourceArgs;
-import org.apache.slider.common.params.ActionStatusArgs;
-import org.apache.slider.common.params.ActionThawArgs;
-import org.apache.slider.common.params.ActionUpgradeArgs;
-import org.apache.slider.core.exceptions.BadCommandArgumentsException;
-import org.apache.slider.core.exceptions.SliderException;
-
-import java.io.IOException;
-
-/**
- * Interface of those method calls in the slider API that are intended
- * for direct public invocation.
- * 
- * Stability: evolving
- */
-public interface SliderClientAPI extends Service {
-
-  int actionDestroy(String clustername) throws YarnException, IOException;
-
-  /**
-   * AM to commit an asynchronous suicide
-   */
-  int actionAmSuicide(String clustername,
-  ActionAMSuicideArgs args) throws YarnException, IOException;
-
-  /**
-   * Manage keytabs leveraged by slider
-   *
-   * @param keytabInfo the arguments needed to manage the keytab
-   * @throws YarnException Yarn problems
-   * @throws IOException other problems
-   * @throws BadCommandArgumentsException bad arguments.
-   */
-  int actionKeytab(ActionKeytabArgs keytabInfo)
-  throws YarnException, IOException;
-
-  /**
-   * Manage file resources leveraged by slider
-   *
-   * @param resourceInfo the arguments needed to manage the resource
-   * @throws YarnException Yarn problems
-   * @throws IOException other problems
-   * @throws BadCommandArgumentsException bad arguments.
-   */
-  int actionResource(ActionResourceArgs resourceInfo)
-  throws YarnException, IOException;
-
-  /**
-   * Perform client operations such as install or configure
-   *
-   * @param clientInfo the arguments needed for client operations
-   *
-   * @throws SliderException bad arguments.
-   * @throws IOException problems related to package and destination folders
-   */
-  int actionClient(ActionClientArgs clientInfo)
-  throws IOException, YarnException;
-
-  /**
-   * Update the cluster specification
-   *
-   * @param clustername cluster name
-   * @param buildInfo the arguments needed to update the cluster
-   * @throws YarnException Yarn problems
-   * @throws IOExcept

[23/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/restclient/UgiJerseyBinding.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/restclient/UgiJerseyBinding.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/restclient/UgiJerseyBinding.java
deleted file mode 100644
index bf71861..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/core/restclient/UgiJerseyBinding.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.core.restclient;
-
-import com.google.common.base.Preconditions;
-import com.sun.jersey.api.client.Client;
-import com.sun.jersey.api.client.UniformInterfaceException;
-import com.sun.jersey.api.client.config.ClientConfig;
-import com.sun.jersey.api.client.config.DefaultClientConfig;
-import com.sun.jersey.api.json.JSONConfiguration;
-import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
-import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
-import org.apache.hadoop.conf.Configuration;
-import 
org.apache.hadoop.security.authentication.client.AuthenticationException;
-import org.apache.slider.core.exceptions.ExceptionConverter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.net.HttpURLConnection;
-import java.net.URL;
-
-/**
- * Class to bond to a Jersey client, for UGI integration and SPNEGO.
- * 
- *   Usage: create an instance, then when creating a Jersey Client
- *   pass in to the constructor the handler provided by {@link #getHandler()}
- *
- * see https://jersey.java.net/apidocs/1.17/jersey/com/sun/jersey/client/urlconnection/HttpURLConnectionFactory.html";>Jersey
 docs
- */
-public class UgiJerseyBinding implements
-HttpURLConnectionFactory {
-  private static final Logger log =
-  LoggerFactory.getLogger(UgiJerseyBinding.class);
-
-  private final UrlConnectionOperations operations;
-  private final URLConnectionClientHandler handler;
-
-  /**
-   * Construct an instance
-   * @param operations operations instance
-   */
-  @SuppressWarnings("ThisEscapedInObjectConstruction")
-  public UgiJerseyBinding(UrlConnectionOperations operations) {
-Preconditions.checkArgument(operations != null, "Null operations");
-this.operations = operations;
-handler = new URLConnectionClientHandler(this);
-  }
-
-  /**
-   * Create an instance off the configuration. The SPNEGO policy
-   * is derived from the current UGI settings.
-   * @param conf config
-   */
-  public UgiJerseyBinding(Configuration conf) {
-this(new UrlConnectionOperations(conf));
-  }
-
-  /**
-   * Get a URL connection. 
-   * @param url URL to connect to
-   * @return the connection
-   * @throws IOException any problem. {@link AuthenticationException} 
-   * errors are wrapped
-   */
-  @Override
-  public HttpURLConnection getHttpURLConnection(URL url) throws IOException {
-try {
-  // open a connection handling status codes and so redirections
-  // but as it opens a connection, it's less useful than you think.
-
-  return operations.openConnection(url);
-} catch (AuthenticationException e) {
-  throw new IOException(e);
-}
-  }
-
-  public UrlConnectionOperations getOperations() {
-return operations;
-  }
-
-  public URLConnectionClientHandler getHandler() {
-return handler;
-  }
-  
-  /**
-   * Get the SPNEGO flag (as found in the operations instance
-   * @return the spnego policy
-   */
-  public boolean isUseSpnego() {
-return operations.isUseSpnego();
-  }
-
-
-  /**
-   * Uprate error codes 400 and up into faults; 
-   * 
-   * see {@link ExceptionConverter#convertJerseyException(String, String, 
UniformInterfaceException)}
-   */
-  public static I

[47/75] [abbrv] hadoop git commit: Rebased on to trunk, fix conflicts

2017-08-29 Thread jianhe
Rebased on to trunk, fix conflicts


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/5dc3432b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/5dc3432b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/5dc3432b

Branch: refs/heads/yarn-native-services
Commit: 5dc3432bd41fd66aa233012a19d2326fe86b5ffe
Parents: db5888e
Author: Jian He 
Authored: Mon Aug 28 15:05:01 2017 -0700
Committer: Jian He 
Committed: Tue Aug 29 20:45:12 2017 -0700

--
 .../main/java/org/apache/hadoop/yarn/util/BoundedAppender.java   | 4 
 .../server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java   | 2 +-
 2 files changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/5dc3432b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java
index 1a1593a..e2cb2ee 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BoundedAppender.java
@@ -115,6 +115,10 @@ public class BoundedAppender {
 return messages.length();
   }
 
+  public int getLimit() {
+return limit;
+  }
+
   /**
* Get a string representation of the actual contents, displaying also a
* header and ellipses when there was a truncate.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/5dc3432b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
index 44d6986..c2750c9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
@@ -1327,7 +1327,7 @@ public class RMAppAttemptImpl implements RMAppAttempt, 
Recoverable {
 // AFTER the initial saving on app-attempt-start
 // These fields can be visible from outside only after they are saved in
 // StateStore
-BoundedAppender diags = new BoundedAppender(diagnostics.limit);
+BoundedAppender diags = new BoundedAppender(diagnostics.getLimit());
 
 // don't leave the tracking URL pointing to a non-existent AM
 if (conf.getBoolean(YarnConfiguration.APPLICATION_HISTORY_ENABLED,


-
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org



[70/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Error.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Error.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Error.java
new file mode 100644
index 000..c64b1b5
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Error.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.api.records;
+
+import io.swagger.annotations.ApiModelProperty;
+
+import java.util.Objects;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+
+@InterfaceAudience.Public
+@InterfaceStability.Unstable
+@javax.annotation.Generated(value = "class 
io.swagger.codegen.languages.JavaClientCodegen", date = 
"2016-06-02T08:15:05.615-07:00")
+public class Error {
+
+  private Integer code = null;
+  private String message = null;
+  private String fields = null;
+
+  /**
+   **/
+  public Error code(Integer code) {
+this.code = code;
+return this;
+  }
+
+  @ApiModelProperty(example = "null", value = "")
+  @JsonProperty("code")
+  public Integer getCode() {
+return code;
+  }
+
+  public void setCode(Integer code) {
+this.code = code;
+  }
+
+  /**
+   **/
+  public Error message(String message) {
+this.message = message;
+return this;
+  }
+
+  @ApiModelProperty(example = "null", value = "")
+  @JsonProperty("message")
+  public String getMessage() {
+return message;
+  }
+
+  public void setMessage(String message) {
+this.message = message;
+  }
+
+  /**
+   **/
+  public Error fields(String fields) {
+this.fields = fields;
+return this;
+  }
+
+  @ApiModelProperty(example = "null", value = "")
+  @JsonProperty("fields")
+  public String getFields() {
+return fields;
+  }
+
+  public void setFields(String fields) {
+this.fields = fields;
+  }
+
+  @Override
+  public boolean equals(java.lang.Object o) {
+if (this == o) {
+  return true;
+}
+if (o == null || getClass() != o.getClass()) {
+  return false;
+}
+Error error = (Error) o;
+return Objects.equals(this.code, error.code)
+&& Objects.equals(this.message, error.message)
+&& Objects.equals(this.fields, error.fields);
+  }
+
+  @Override
+  public int hashCode() {
+return Objects.hash(code, message, fields);
+  }
+
+  @Override
+  public String toString() {
+StringBuilder sb = new StringBuilder();
+sb.append("class Error {\n");
+
+sb.append("code: ").append(toIndentedString(code)).append("\n");
+sb.append("message: ").append(toIndentedString(message)).append("\n");
+sb.append("fields: ").append(toIndentedString(fields)).append("\n");
+sb.append("}");
+return sb.toString();
+  }
+
+  /**
+   * Convert the given object to string with each line indented by 4 spaces
+   * (except the first line).
+   */
+  private String toIndentedString(java.lang.Object o) {
+if (o == null) {
+  return "null";
+}
+return o.toString().replace("\n", "\n");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/PlacementPolicy.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/api/records/Placemen

[71/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
new file mode 100644
index 000..fb2fd16
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/ServiceScheduler.java
@@ -0,0 +1,654 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service;
+
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
+import org.apache.commons.io.IOUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
+import org.apache.hadoop.registry.client.api.RegistryOperationsFactory;
+import org.apache.hadoop.registry.client.binding.RegistryTypeUtils;
+import org.apache.hadoop.registry.client.binding.RegistryUtils;
+import org.apache.hadoop.registry.client.types.ServiceRecord;
+import org.apache.hadoop.registry.client.types.yarn.PersistencePolicies;
+import org.apache.hadoop.registry.client.types.yarn.YarnRegistryAttributes;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.service.CompositeService;
+import 
org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.api.records.NodeReport;
+import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.UpdatedContainer;
+import org.apache.hadoop.yarn.client.api.AMRMClient;
+import org.apache.hadoop.yarn.client.api.TimelineV2Client;
+import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
+import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.event.AsyncDispatcher;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.exceptions.YarnException;
+import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
+import org.apache.hadoop.yarn.service.api.ServiceApiConstants;
+import org.apache.hadoop.yarn.service.api.records.Service;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
+import 
org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEvent;
+import 
org.apache.hadoop.yarn.service.component.instance.ComponentInstanceEventType;
+import org.apache.hadoop.yarn.service.component.Component;
+import org.apache.hadoop.yarn.service.component.ComponentEvent;
+import org.apache.hadoop.yarn.service.component.ComponentEventType;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.containerlaunch.ContainerLaunchService;
+import org.apache.hadoop.yarn.service.provider.ProviderUtils;
+import org.apache.hadoop.yarn.service.registry.YarnRegistryViewForProviders;
+import org.apache.hadoop.yarn.service.timelineservice.ServiceMetricsSink;
+import org.apache.hadoop.yarn.service.timelineservice.ServiceTimelinePublisher;
+import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;

[57/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
deleted file mode 100644
index 1049698..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
+++ /dev/null
@@ -1,872 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.client;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.RetryNTimes;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.registry.client.api.RegistryConstants;
-import org.apache.hadoop.registry.client.api.RegistryOperations;
-import org.apache.hadoop.registry.client.api.RegistryOperationsFactory;
-import org.apache.hadoop.registry.client.binding.RegistryUtils;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.service.CompositeService;
-import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
-import 
org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsRequest;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
-import org.apache.hadoop.yarn.api.records.ApplicationTimeout;
-import org.apache.hadoop.yarn.api.records.ApplicationTimeoutType;
-import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
-import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.hadoop.yarn.api.records.LocalResourceType;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.client.api.YarnClientApplication;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.ipc.YarnRPC;
-import org.apache.hadoop.yarn.proto.ClientAMProtocol.ComponentCountProto;
-import 
org.apache.hadoop.yarn.proto.ClientAMProtocol.FlexComponentsRequestProto;
-import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusRequestProto;
-import org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto;
-import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopRequestProto;
-import org.apache.hadoop.yarn.service.ClientAMProtocol;
-import org.apache.hadoop.yarn.service.ServiceMaster;
-import org.apache.hadoop.yarn.service.api.records.Application;
-import org.apache.hadoop.yarn.service.api.records.Component;
-import 
org.apache.hadoop.yarn.service.client.params.AbstractClusterBuildingActionArgs;
-import org.apache.hadoop.yarn.service.client.params.ActionDependencyArgs;
-import org.apache.hadoop.yarn.service.client.params.ActionFlexArgs;
-import org.apache.hadoop.yarn.service.client.params.Arguments;
-import org.apache.hadoop.yarn.service.client.params.ClientArgs;
-import org.apache.hadoop.yarn.service.client.params.CommonArgs;
-import org.apache.hadoop.yarn.service.conf.SliderExitCodes;
-impo

[45/75] [abbrv] hadoop git commit: YARN-5244. Documentation required for DNS Server implementation. Contributed by Jon Maron

2017-08-29 Thread jianhe
YARN-5244. Documentation required for DNS Server implementation. Contributed by 
Jon Maron


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/963f2d94
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/963f2d94
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/963f2d94

Branch: refs/heads/yarn-native-services
Commit: 963f2d94df45902d98df9a1466818ba8f6bfa238
Parents: 394183c
Author: Gour Saha 
Authored: Thu Aug 24 19:05:13 2017 -0700
Committer: Jian He 
Committed: Tue Aug 29 20:45:11 2017 -0700

--
 hadoop-project/src/site/site.xml|   7 +-
 .../native-services/NativeServicesDiscovery.md  | 127 +++
 .../native-services/NativeServicesIntro.md  |   0
 .../src/site/resources/images/dns_overview.png  | Bin 0 -> 41908 bytes
 .../resources/images/dns_record_creation.jpeg   | Bin 0 -> 51911 bytes
 .../resources/images/dns_record_removal.jpeg| Bin 0 -> 58041 bytes
 6 files changed, 133 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hadoop/blob/963f2d94/hadoop-project/src/site/site.xml
--
diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml
index e7af227..837ac3f 100644
--- a/hadoop-project/src/site/site.xml
+++ b/hadoop-project/src/site/site.xml
@@ -148,7 +148,12 @@
   
   
 
-
+
+
+  
+  
+
+
 
   
   

http://git-wip-us.apache.org/repos/asf/hadoop/blob/963f2d94/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/native-services/NativeServicesDiscovery.md
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/native-services/NativeServicesDiscovery.md
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/native-services/NativeServicesDiscovery.md
new file mode 100644
index 000..4a048af
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/markdown/native-services/NativeServicesDiscovery.md
@@ -0,0 +1,127 @@
+# YARN DNS Server
+## Introduction
+
+The YARN DNS Server provides a standard DNS interface to the information 
posted into the YARN Registry by deployed applications. The DNS service serves 
the following functions:
+
+1. **Exposing existing service­ discovery information via DNS**​­ - 
Information provided in
+the current YARN service registry’s records will be converted into DNS 
entries, thus
+allowing users to discover information about YARN applications using standard 
DNS
+client mechanisms (for e.g. a DNS SRV Record specifying the hostname and port
+number for services).
+2. **Enabling Container to IP mappings​­** - Enables discovery of the IPs 
of containers via
+standard DNS lookups. Given the availability of the records via DNS, container
+name­based communication will be facilitated (e.g. ‘curl
+http://myContainer.myDomain.com/endpoint’).
+
+## Service Properties
+
+The existing YARN Service Registry is leveraged as the source of information 
for the DNS Service.
+
+The following core functions are supported by the DNS­ Server:
+
+###Functional properties
+
+1. Supports creation of DNS records for end­points of the deployed YARN 
applications
+2. Record names remain unchanged during restart of containers and/or 
applications
+3. Supports reverse lookups (name based on IP).
+4. Supports security using the standards defined by The Domain Name System 
Security
+Extensions (DNSSEC)
+5. Highly available
+6. Scalable ­- The service provides the responsiveness (e.g. low­ latency) 
required to
+respond to DNS queries (timeouts yield attempts to invoke other configured name
+servers).
+
+###Deployment properties
+
+1. Supports integration with existing DNS assets (e.g. a corporate DNS server) 
by acting as
+a DNS server for a Hadoop cluster zone/domain. The server is not intended to 
act as a
+primary DNS server and does not forward requests to other servers.
+2. The DNS Server exposes a port that can receive both TCP and UDP requests per
+DNS standards. The default port for DNS protocols is in a restricted, 
administrative port
+range (53), so the port is configurable for deployments in which the service 
may
+not be managed via an administrative account.
+
+##DNS Record Name Structure
+
+The DNS names of generated records are composed from the following elements 
(labels). Note that these elements must be compatible with DNS conventions (see 
“Preferred Name Syntax” in RFC 1035):
+
+* **domain** -​­ the name of the cluster DNS domain. This name is provided 
as a
+configuration property. In addition, it is this name that is configured at a 
parent DNS
+server as the zone name for the defined yDNS zone (the zo

[19/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/management/MetricsBindingService.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/management/MetricsBindingService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/management/MetricsBindingService.java
deleted file mode 100644
index 864a1cf..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/management/MetricsBindingService.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.management;
-
-import com.codahale.metrics.JmxReporter;
-import com.codahale.metrics.Metric;
-import com.codahale.metrics.MetricRegistry;
-import com.codahale.metrics.MetricSet;
-import com.codahale.metrics.ScheduledReporter;
-import com.codahale.metrics.Slf4jReporter;
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.service.CompositeService;
-import org.apache.slider.server.services.workflow.ClosingService;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-/**
- * YARN service which hooks up Codahale metrics to 
- * JMX, and, if enabled Ganglia and/or an SLF4J log.
- */
-public class MetricsBindingService extends CompositeService
-implements MetricsKeys {
-  protected static final Logger log =
-  LoggerFactory.getLogger(MetricsBindingService.class);
-  private final MetricRegistry metrics;
-
-  private String reportingDetails = "not started";
-
-
-  public MetricsBindingService(String name,
-  MetricRegistry metrics) {
-super(name);
-Preconditions.checkArgument(metrics != null, "Null metrics");
-this.metrics = metrics;
-  }
-
-  /**
-   * Instantiate...create a metric registry in the process
-   * @param name service name
-   */
-  public MetricsBindingService(String name) {
-this(name, new MetricRegistry());
-  }
-
-  /**
-   * Accessor for the metrics instance
-   * @return the metrics
-   */
-  public MetricRegistry getMetrics() {
-return metrics;
-  }
-
-  @Override
-  protected void serviceStart() throws Exception {
-super.serviceStart();
-
-StringBuilder summary = new StringBuilder();
-Configuration conf = getConfig();
-
-summary.append("Reporting to JMX");
-// always start the JMX binding
-JmxReporter jmxReporter;
-jmxReporter = JmxReporter.forRegistry(metrics).build();
-jmxReporter.start();
-addService(new ClosingService<>(jmxReporter));
-
-
-// Ganglia
-if (conf.getBoolean(METRICS_GANGLIA_ENABLED, false)) {
-  log.warn("Ganglia integration is not implemented");
-/*
-  // This is all disabled due to transitive dependencies on an LGPL library
-  com.codahale.metrics.ganglia.GangliaReporter gangliaReporter;
-  String host = conf.getTrimmed(METRICS_GANGLIA_HOST, "");
-  int port = conf.getInt(METRICS_GANGLIA_PORT, DEFAULT_GANGLIA_PORT);
-  int interval = conf.getInt(METRICS_GANGLIA_REPORT_INTERVAL, 60);
-  int ttl = 1;
-  info.ganglia.gmetric4j.gmetric.GMetric.UDPAddressingMode
-  mcast = 
info.ganglia.gmetric4j.gmetric.GMetric.UDPAddressingMode.getModeForAddress(host);
-  boolean ganglia31 = conf.getBoolean(METRICS_GANGLIA_VERSION_31, true);
-
-  final info.ganglia.gmetric4j.gmetric.GMetric ganglia =
-  new info.ganglia.gmetric4j.gmetric.GMetric(
-  host,
-  port,
-  mcast,
-  ttl,
-  ganglia31);
-  gangliaReporter = 
com.codahale.metrics.ganglia.GangliaReporter.forRegistry(metrics)
-

[20/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/ActionKillContainer.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/ActionKillContainer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/ActionKillContainer.java
deleted file mode 100644
index 7446e82..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/ActionKillContainer.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.actions;
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.slider.server.appmaster.SliderAppMaster;
-import org.apache.slider.server.appmaster.operations.AbstractRMOperation;
-import org.apache.slider.server.appmaster.operations.ContainerReleaseOperation;
-import org.apache.slider.server.appmaster.operations.RMOperationHandler;
-import org.apache.slider.server.appmaster.operations.RMOperationHandlerActions;
-import org.apache.slider.server.appmaster.state.AppState;
-
-import java.util.LinkedList;
-import java.util.List;
-import java.util.concurrent.TimeUnit;
-
-/**
- * Kill a specific container
- */
-public class ActionKillContainer extends AsyncAction {
-
-  /**
-   *  container to kill
-   */
-  private final ContainerId containerId;
-
-  /**
-   *  handler for the operation
-   */
-  private final RMOperationHandlerActions operationHandler;
-
-  /**
-   * Kill a container
-   * @param containerId container to kill
-   * @param delay
-   * @param timeUnit
-   * @param operationHandler
-   */
-  public ActionKillContainer(
-  ContainerId containerId,
-  long delay,
-  TimeUnit timeUnit,
-  RMOperationHandlerActions operationHandler) {
-super("kill container", delay, timeUnit, ATTR_CHANGES_APP_SIZE);
-this.operationHandler = operationHandler;
-Preconditions.checkArgument(containerId != null);
-
-this.containerId = containerId;
-  }
-
-  /**
-   * Get the container ID to kill
-   * @return
-   */
-  public ContainerId getContainerId() {
-return containerId;
-  }
-
-  @Override
-  public void execute(SliderAppMaster appMaster,
-  QueueAccess queueService,
-  AppState appState) throws Exception {
-  List opsList = new LinkedList<>();
-ContainerReleaseOperation release = new 
ContainerReleaseOperation(containerId);
-opsList.add(release);
-//now apply the operations
-operationHandler.execute(opsList);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/ActionRegisterServiceInstance.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/ActionRegisterServiceInstance.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/ActionRegisterServiceInstance.java
deleted file mode 100644
index 0d7f7d4..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/actions/ActionRegisterServiceInstance.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional informat

[68/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionRegistryArgs.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionRegistryArgs.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionRegistryArgs.java
new file mode 100644
index 000..3e53418
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/client/params/ActionRegistryArgs.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.client.params;
+
+import com.beust.jcommander.Parameter;
+import com.beust.jcommander.Parameters;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
+import org.apache.hadoop.yarn.service.exceptions.UsageException;
+import org.apache.hadoop.yarn.service.api.records.ConfigFormat;
+
+import static 
org.apache.hadoop.yarn.service.client.params.SliderActions.ACTION_REGISTRY;
+import static 
org.apache.hadoop.yarn.service.client.params.SliderActions.DESCRIBE_ACTION_REGISTRY;
+import java.io.File;
+
+/**
+ * Registry actions
+ * 
+ * --instance {app name}, if  a / is in it, refers underneath?
+ * --dest {destfile}
+ * --list : list instances of slider service
+ * --listfiles 
+ */
+@Parameters(commandNames = {ACTION_REGISTRY},
+commandDescription = DESCRIBE_ACTION_REGISTRY)
+
+public class ActionRegistryArgs extends AbstractActionArgs {
+
+  public static final String USAGE =
+  "Usage: " + SliderActions.ACTION_REGISTRY
+  + " ("
+  + Arguments.ARG_LIST + "|"
+  + Arguments.ARG_LISTCONF + "|"
+  + Arguments.ARG_LISTEXP + "|"
+  + Arguments.ARG_LISTFILES + "|"
+  + Arguments.ARG_GETCONF + "|"
+  + Arguments.ARG_GETEXP + "> "
+  + Arguments.ARG_NAME + "  "
+  + " )"
+  + "[" + Arguments.ARG_VERBOSE + "] "
+  + "[" + Arguments.ARG_USER + "] "
+  + "[" + Arguments.ARG_OUTPUT + "  ] "
+  + "[" + Arguments.ARG_SERVICETYPE + "  ] "
+  + "[" + Arguments.ARG_FORMAT + " ] "
+  + System.getProperty("line.separator")
+  + "Arguments.ARG_GETEXP only supports " + Arguments.ARG_FORMAT + " json"
+  ;
+  public ActionRegistryArgs() {
+  }
+
+  public ActionRegistryArgs(String name) {
+this.name = name;
+  }
+
+  @Override
+  public String getActionName() {
+return ACTION_REGISTRY;
+  }
+
+  /**
+   * Get the min #of params expected
+   * @return the min number of params in the {@link #parameters} field
+   */
+  @Override
+  public int getMinParams() {
+return 0;
+  }
+  
+  @Parameter(names = {ARG_LIST}, 
+  description = "list services")
+  public boolean list;
+
+  @Parameter(names = {ARG_LISTCONF}, 
+  description = "list configurations")
+  public boolean listConf;
+
+  @Parameter(names = {ARG_GETCONF},
+  description = "get configuration")
+  public String getConf;
+
+  @Parameter(names = {ARG_LISTEXP},
+ description = "list exports")
+  public boolean listExports;
+
+  @Parameter(names = {ARG_GETEXP},
+ description = "get export")
+  public String getExport;
+
+  @Parameter(names = {ARG_LISTFILES},
+  description = "list files")
+  public String listFiles;
+
+  @Parameter(names = {ARG_GETFILES},
+  description = "get files")
+  public String getFiles;
+
+  //--format 
+  @Parameter(names = ARG_FORMAT,
+  description = "Format for a response: ")
+  public String format = ConfigFormat.XML.toString() ;
+
+  @Parameter(names = {ARG_OUTPUT, ARG_OUTPUT_SHORT, ARG_DEST},
+  description = "Output destination")
+  public File out;
+
+  @Parameter(names = {ARG_NAME},
+  descriptio

[35/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
new file mode 100644
index 000..415392a
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/SliderUtils.java
@@ -0,0 +1,1699 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
+import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
+import org.apache.commons.lang.ArrayUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.GlobFilter;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.io.nativeio.NativeIO;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.yarn.api.ApplicationConstants;
+import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.service.client.params.Arguments;
+import org.apache.hadoop.yarn.service.client.params.SliderActions;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.containerlaunch.ClasspathConstructor;
+import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException;
+import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
+import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
+import org.apache.hadoop.yarn.service.exceptions.LauncherExitCodes;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+import org.apache.zookeeper.server.util.KerberosUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.io.Serializable;
+import java.net.InetSocketAddress;
+import java.net.ServerSocket;
+import java.net.Socket;
+import java.net.URL;
+import java.net.URLDecoder;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.regex.Pattern;
+import java.util.zip.GZIPOutputStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+/**
+ * These are slider-specific Util methods
+ */
+public final class SliderUtils {
+
+  private static final Logger log = LoggerFactory.getLogger(SliderUtils.class);
+
+  /**
+   * Atomic boo

[13/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/publisher/PublisherResource.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/publisher/PublisherResource.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/publisher/PublisherResource.java
deleted file mode 100644
index 3e9b764..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/server/appmaster/web/rest/publisher/PublisherResource.java
+++ /dev/null
@@ -1,271 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.web.rest.publisher;
-
-import org.apache.hadoop.yarn.webapp.NotFoundException;
-import org.apache.slider.core.registry.docstore.ConfigFormat;
-import org.apache.slider.core.registry.docstore.PublishedConfigSet;
-import org.apache.slider.core.registry.docstore.PublishedConfiguration;
-import 
org.apache.slider.core.registry.docstore.PublishedConfigurationOutputter;
-import org.apache.slider.core.registry.docstore.PublishedExports;
-import org.apache.slider.core.registry.docstore.PublishedExportsSet;
-import org.apache.slider.core.registry.docstore.UriMap;
-import org.apache.slider.server.appmaster.state.StateAccessForProviders;
-import org.apache.slider.server.appmaster.web.WebAppApi;
-import org.apache.slider.server.appmaster.web.rest.AbstractSliderResource;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.servlet.http.HttpServletResponse;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.UriInfo;
-import java.io.IOException;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static 
org.apache.slider.server.appmaster.web.rest.RestPaths.PUBLISHED_CONFIGURATION_REGEXP;
-import static 
org.apache.slider.server.appmaster.web.rest.RestPaths.PUBLISHED_CONFIGURATION_SET_REGEXP;
-
-/**
- * This publishes configuration sets
- */
-public class PublisherResource extends AbstractSliderResource {
-  protected static final Logger log =
-  LoggerFactory.getLogger(PublisherResource.class);
-  public static final String EXPORTS_NAME = "exports";
-  public static final String EXPORTS_RESOURCES_PATH = "/" + EXPORTS_NAME;
-  public static final String EXPORT_RESOURCE_PATH = EXPORTS_RESOURCES_PATH + 
"/{exportname}" ;
-  public static final String SET_NAME =
-  "{setname: " + PUBLISHED_CONFIGURATION_SET_REGEXP + "}";
-  public static final String SETNAME = "setname";
-  public static final String CLASSPATH = "/classpath";
-  public static final String CONFIG = "config";
-  
-  public static final String SETNAME_PATTERN = 
-  "{"+ SETNAME+": " + PUBLISHED_CONFIGURATION_SET_REGEXP + "}";
-  private static final String CONFIG_PATTERN =
-  SETNAME_PATTERN + "/{"+ CONFIG +": " + PUBLISHED_CONFIGURATION_REGEXP + 
"}";
-  private final StateAccessForProviders appState;
-
-  public PublisherResource(WebAppApi slider) {
-super(slider);
-appState = slider.getAppState();
-  }
-
-  private void init(HttpServletResponse res, UriInfo uriInfo) {
-res.setContentType(null);
-log.debug(uriInfo.getRequestUri().toString());
-  }
- 
-  /**
-   * Get a named config set 
-   * @param setname name of the config set
-   * @return the config set
-   * @throws NotFoundException if there was no matching set
-   */
-  private PublishedConfigSet getConfigSet(String setname) {
-PublishedConfigSet configSet =
-appState.getPublishedConfigSet(setname);
-if (configSet == null) {
-  thr

[27/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/tools/SliderUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/tools/SliderUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/tools/SliderUtils.java
deleted file mode 100644
index fc57c82..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/tools/SliderUtils.java
+++ /dev/null
@@ -1,2548 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.common.tools;
-
-import com.google.common.base.Preconditions;
-import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
-import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.GlobFilter;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.io.nativeio.NativeIO;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.util.ExitUtil;
-import org.apache.hadoop.util.Shell;
-import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.hadoop.yarn.api.records.NodeReport;
-import org.apache.hadoop.yarn.api.records.YarnApplicationState;
-import org.apache.hadoop.yarn.client.api.AMRMClient;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.slider.Slider;
-import org.apache.slider.api.RoleKeys;
-import org.apache.slider.api.types.ContainerInformation;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys;
-import org.apache.hadoop.yarn.service.client.params.Arguments;
-import org.apache.hadoop.yarn.service.client.params.SliderActions;
-import org.apache.slider.core.exceptions.BadClusterStateException;
-import org.apache.slider.core.exceptions.BadCommandArgumentsException;
-import org.apache.slider.core.exceptions.BadConfigException;
-import org.apache.slider.core.exceptions.SliderException;
-import org.apache.slider.core.launch.ClasspathConstructor;
-import org.apache.slider.core.main.LauncherExitCodes;
-import org.apache.slider.server.services.utility.PatternValidator;
-import org.apache.slider.server.services.workflow.ForkedProcessService;
-import org.apache.zookeeper.server.util.KerberosUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.PrintWriter;
-import java.io.Serializable;
-import java.io.StringWriter;
-import java.net.InetSocketAddress;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.URL;
-import java.net.URLDecoder;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Date;
-import java.util.EnumSet;
-import ja

[28/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/tools/ConfigHelper.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/tools/ConfigHelper.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/tools/ConfigHelper.java
deleted file mode 100644
index 64fd8ae..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/slider/common/tools/ConfigHelper.java
+++ /dev/null
@@ -1,611 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.common.tools;
-
-import com.google.common.base.Preconditions;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IOUtils;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys;
-import org.apache.slider.core.exceptions.BadConfigException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Document;
-import org.xml.sax.SAXException;
-
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.ByteArrayInputStream;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.StringWriter;
-import java.net.MalformedURLException;
-import java.net.URL;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-
-/**
- * Methods to aid in config, both in the Configuration class and
- * with other parts of setting up Slider-initated processes.
- * 
- * Some of the methods take an argument of a map iterable for their sources; 
this allows
- * the same method
- */
-public class ConfigHelper {
-  private static final Logger log = 
LoggerFactory.getLogger(ConfigHelper.class);
-
-  /**
-   * Dump the (sorted) configuration
-   * @param conf config
-   * @return the sorted keyset
-   */
-  public static Set dumpConf(Configuration conf) {
-Set keys = sortedConfigKeys(conf);
-for (String key : keys) {
-  log.info("{}={}", key, conf.get(key));
-}
-return keys;
-  }
-
-  /**
-   * Take a configuration and return a sorted set
-   * @param conf config
-   * @return the sorted keyset
-
-   */
-  public static Set sortedConfigKeys(Iterable> conf) {
-TreeSet sorted = new TreeSet();
-for (Map.Entry entry : conf) {
-  sorted.add(entry.getKey());
-}
-return sorted;
-  }
-
-  /**
-   * Set an entire map full of values
-   *
-   * @param config config to patch
-   * @param map map of data
-   * @param origin origin data
-   */
-  public static void addConfigMap(Configuration config,
-  Map map,
-  String origin) throws BadConfigException {
-addConfigMap(config, map.entrySet(), origin);
-  }
-  
-  /**
-   * Set an entire map full of values
-   *
-   * @param config config to patch
-   * @param map map of data
-   * @param origin origin data
-   */
-  public static void addConfigMap(Configuration config,
-  Iterable> map,
-  String origin) throws BadConfigException {
-for (Map.Entry mapEntry : map) {
-  String key = mapEntry.getKey();
-  String value = mapEntry.getValue();
-  if (value == null) {
-throw new BadConfigException("Null value for property " + key);
-  }
-  config.set(key, value, origin);
-}
-  }
-
-
-  /**
-   * Save a config file in a destination 

[09/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/client/TestClientBadArgs.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/client/TestClientBadArgs.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/client/TestClientBadArgs.java
deleted file mode 100644
index 7b0586e..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/client/TestClientBadArgs.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing, software
- *  distributed under the License is distributed on an "AS IS" BASIS,
- *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- *  See the License for the specific language governing permissions and
- *  limitations under the License.
- */
-
-package org.apache.slider.client;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.service.client.params.Arguments;
-import org.apache.hadoop.yarn.service.client.params.SliderActions;
-import org.apache.slider.core.exceptions.BadCommandArgumentsException;
-import org.apache.slider.core.exceptions.ErrorStrings;
-import org.apache.slider.core.exceptions.UsageException;
-import org.apache.slider.utils.SliderTestBase;
-import org.junit.Ignore;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Arrays;
-
-/**
- * Test the argument parsing/validation logic.
- */
-public class TestClientBadArgs extends SliderTestBase {
-  private static final Logger LOG =
-  LoggerFactory.getLogger(TestClientBadArgs.class);
-
-  //@Test
-  public void testNoAction() throws Throwable {
-launchExpectingException(SliderClient.class,
- createTestConfig(),
- "Usage: slider COMMAND",
- EMPTY_LIST);
-
-  }
-
-  //@Test
-  public void testUnknownAction() throws Throwable {
-launchExpectingException(SliderClient.class,
- createTestConfig(),
- "not-a-known-action",
- Arrays.asList("not-a-known-action"));
-  }
-
-  //@Test
-  public void testActionWithoutOptions() throws Throwable {
-launchExpectingException(SliderClient.class,
- createTestConfig(),
- "Usage: slider build ",
- Arrays.asList(SliderActions.ACTION_BUILD));
-  }
-
-  //@Test
-  public void testActionWithoutEnoughArgs() throws Throwable {
-launchExpectingException(SliderClient.class,
- createTestConfig(),
- ErrorStrings.ERROR_NOT_ENOUGH_ARGUMENTS,
- Arrays.asList(SliderActions.ACTION_START));
-  }
-
-  //@Test
-  public void testActionWithTooManyArgs() throws Throwable {
-launchExpectingException(SliderClient.class,
- createTestConfig(),
- ErrorStrings.ERROR_TOO_MANY_ARGUMENTS,
- Arrays.asList(SliderActions.ACTION_HELP,
- "hello, world"));
-  }
-
-  //@Test
-  public void testBadImageArg() throws Throwable {
-launchExpectingException(SliderClient.class,
- createTestConfig(),
- "Unknown option: --image",
-Arrays.asList(SliderActions.ACTION_HELP,
- Arguments.ARG_IMAGE));
-  }
-
-  //@Test
-  public void testRegistryUsage() throws Throwable {
-Throwable exception = launchExpectingException(SliderClient.class,
-createTestConfig(),
-"org.apache.slider.core.exceptions.UsageException: Argument --name " +
-"missing",
-Arrays.asList(SliderActions.ACTION_REGISTRY));
-assertTrue(exception instanceof UsageException);
-LOG.info(exception.toString());
-  }
-
-  //@Test
-  public void testRegistryExportBadUsage1() throws Throwable {
-Throwabl

[36/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
new file mode 100644
index 000..c0712c3
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/utils/KerberosDiags.java
@@ -0,0 +1,680 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.utils;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.SaslPropertiesResolver;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.ExitUtil;
+import org.apache.hadoop.util.Shell;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.crypto.Cipher;
+import java.io.Closeable;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.lang.reflect.InvocationTargetException;
+import java.net.InetAddress;
+import java.security.NoSuchAlgorithmException;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+import java.util.regex.Pattern;
+
+import static org.apache.hadoop.security.UserGroupInformation.*;
+import static org.apache.hadoop.security.authentication.util.KerberosUtil.*;
+import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.*;
+
+/**
+ * Kerberos diagnostics
+ * At some point this may move to hadoop core, so please keep use of slider
+ * methods and classes to ~0.
+ *
+ * This operation expands some of the diagnostic output of the security code,
+ * but not all. For completeness
+ *
+ * Set the environment variable {@code HADOOP_JAAS_DEBUG=true}
+ * Set the log level for {@code org.apache.hadoop.security=DEBUG}
+ */
+public class KerberosDiags implements Closeable {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(KerberosDiags.class);
+  public static final String KRB5_CCNAME = "KRB5CCNAME";
+  public static final String JAVA_SECURITY_KRB5_CONF
+= "java.security.krb5.conf";
+  public static final String JAVA_SECURITY_KRB5_REALM
+= "java.security.krb5.realm";
+  public static final String SUN_SECURITY_KRB5_DEBUG
+= "sun.security.krb5.debug";
+  public static final String SUN_SECURITY_SPNEGO_DEBUG
+= "sun.security.spnego.debug";
+  public static final String SUN_SECURITY_JAAS_FILE
+= "java.security.auth.login.config";
+  public static final String KERBEROS_KINIT_COMMAND
+= "hadoop.kerberos.kinit.command";
+  public static final String HADOOP_AUTHENTICATION_IS_DISABLED
+  = "Hadoop authentication is disabled";
+  public static final String UNSET = "(unset)";
+  public static final String NO_DEFAULT_REALM = "Cannot locate default realm";
+
+  private final Configuration conf;
+  private final List services;
+  private final PrintStream out;
+  private final File keytab;
+  private final String principal;
+  private final long minKeyLength;
+  private final boolean securityRequired;
+
+  public static final String CAT_JVM = "JVM";
+  public static final String CAT_JAAS = "JAAS";
+  public static final String CAT_CONFIG = "CONFIG";
+  public static final String CAT_LOGIN = "LOGIN";
+  public static final String CAT_KERBEROS = "KERBEROS";
+  public static fina

[05/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/history/TestRoleHistoryFindNodesForNewInstances.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/history/TestRoleHistoryFindNodesForNewInstances.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/history/TestRoleHistoryFindNodesForNewInstances.java
deleted file mode 100644
index ece65ba..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/slider/server/appmaster/model/history/TestRoleHistoryFindNodesForNewInstances.java
+++ /dev/null
@@ -1,177 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.slider.server.appmaster.model.history;
-
-import org.apache.slider.core.exceptions.BadConfigException;
-import org.apache.slider.server.appmaster.model.mock.BaseMockAppStateTest;
-import org.apache.slider.server.appmaster.model.mock.MockFactory;
-import org.apache.slider.server.appmaster.model.mock.MockRoleHistory;
-import org.apache.slider.server.appmaster.state.ContainerOutcome;
-import org.apache.slider.server.appmaster.state.NodeEntry;
-import org.apache.slider.server.appmaster.state.NodeInstance;
-import org.apache.slider.server.appmaster.state.RoleHistory;
-import org.apache.slider.server.appmaster.state.RoleStatus;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-/**
- * Testing finding nodes for new instances.
- *
- * This stresses the non-AA codepath
- */
-public class TestRoleHistoryFindNodesForNewInstances extends
-BaseMockAppStateTest {
-  private static final Logger LOG =
-  LoggerFactory.getLogger(TestRoleHistoryFindNodesForNewInstances.class);
-
-  public TestRoleHistoryFindNodesForNewInstances() throws BadConfigException {
-  }
-
-  @Override
-  public String getTestName() {
-return "TestFindNodesForNewInstances";
-  }
-
-  private NodeInstance age1Active4;
-  private NodeInstance age2Active2;
-  private NodeInstance age3Active0;
-  private NodeInstance age4Active1;
-  private NodeInstance age2Active0;
-
-  private RoleHistory roleHistory = new MockRoleHistory(MockFactory.ROLES);
-
-  private RoleStatus roleStat;
-  private RoleStatus roleStat2;
-
-  @Override
-  public void setup() throws Exception {
-super.setup();
-
-age1Active4 = nodeInstance(1, 4, 0, 0);
-age2Active2 = nodeInstance(2, 2, 0, 1);
-age3Active0 = nodeInstance(3, 0, 0, 0);
-age4Active1 = nodeInstance(4, 1, 0, 0);
-age2Active0 = nodeInstance(2, 0, 0, 0);
-
-roleHistory.insert(Arrays.asList(age2Active2, age2Active0, age4Active1,
-age1Active4, age3Active0));
-roleHistory.buildRecentNodeLists();
-
-roleStat = getRole0Status();
-roleStat2 = getRole2Status();
-  }
-
-  public List findNodes(int count) {
-return findNodes(count, roleStat);
-  }
-
-  public List findNodes(int count, RoleStatus roleStatus) {
-List  found = new ArrayList<>();
-for (int i = 0; i < count; i++) {
-  NodeInstance f = roleHistory.findRecentNodeForNewInstance(roleStatus);
-  if (f != null) {
-found.add(f);
-  }
-}
-return found;
-  }
-
-  //@Test
-  public void testFind1NodeR0() throws Throwable {
-NodeInstance found = roleHistory.findRecentNodeForNewInstance(roleStat);
-LOG.info("found: {}", found);
-assertTrue(Arrays.asList(age3Active0).contains(found));
-  }
-
-  //@Test
-  public void testFind2NodeR0() throws Throwable {
-NodeInstance found = roleHistory.findRecentNodeForNewInstance(roleStat);
-LOG.info("found: {}", found);
-assertTrue(Arrays.asList(age2Active0, age3Active0).contains(found));
-NodeInstance found2 = roleHistory.findRece

[41/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
index 0ed4860..1049698 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/client/ServiceClient.java
@@ -22,6 +22,8 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.retry.RetryNTimes;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -40,6 +42,7 @@ import 
org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsReque
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
+import org.apache.hadoop.yarn.api.records.ApplicationTimeout;
 import org.apache.hadoop.yarn.api.records.ApplicationTimeoutType;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.LocalResource;
@@ -58,34 +61,32 @@ import 
org.apache.hadoop.yarn.proto.ClientAMProtocol.GetStatusResponseProto;
 import org.apache.hadoop.yarn.proto.ClientAMProtocol.StopRequestProto;
 import org.apache.hadoop.yarn.service.ClientAMProtocol;
 import org.apache.hadoop.yarn.service.ServiceMaster;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import 
org.apache.hadoop.yarn.service.client.params.AbstractClusterBuildingActionArgs;
 import org.apache.hadoop.yarn.service.client.params.ActionDependencyArgs;
 import org.apache.hadoop.yarn.service.client.params.ActionFlexArgs;
 import org.apache.hadoop.yarn.service.client.params.Arguments;
 import org.apache.hadoop.yarn.service.client.params.ClientArgs;
 import org.apache.hadoop.yarn.service.client.params.CommonArgs;
 import org.apache.hadoop.yarn.service.conf.SliderExitCodes;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
 import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
 import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
 import org.apache.hadoop.yarn.service.provider.ProviderUtils;
+import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
+import org.apache.hadoop.yarn.service.utils.ServiceRegistryUtils;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
 import org.apache.hadoop.yarn.util.Records;
 import org.apache.hadoop.yarn.util.Times;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.common.params.AbstractClusterBuildingActionArgs;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.exceptions.BadClusterStateException;
-import org.apache.slider.core.exceptions.BadConfigException;
-import org.apache.slider.core.exceptions.SliderException;
-import org.apache.slider.core.exceptions.UsageException;
-import org.apache.slider.core.launch.ClasspathConstructor;
-import org.apache.slider.core.launch.JavaCommandLineBuilder;
-import org.apache.slider.core.registry.SliderRegistryUtils;
-import org.apache.slider.core.zk.ZKIntegration;
-import org.apache.slider.core.zk.ZookeeperUtils;
-import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
+import org.apache.hadoop.yarn.service.exceptions.BadClusterStateException;
+import org.apache.hadoop.yarn.service.exceptions.BadConfigException;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+import org.apache.hadoop.yarn.service.exceptions.UsageException;
+import org.apache.hadoop.yarn.service.containerlaunch.ClasspathConstructor;
+import org.apache.hadoop.yarn.service.containerlaunch.JavaCommandLineBuilder;
+import org

[48/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
deleted file mode 100644
index 8310530..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestLoadExampleAppJson.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.service.conf;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.service.api.records.Application;
-import org.apache.hadoop.yarn.service.utils.ServiceApiUtil;
-import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
-import org.junit.Assert;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-import java.util.Arrays;
-import java.util.Collection;
-
-import static org.apache.hadoop.yarn.service.ServiceTestUtils.JSON_SER_DESER;
-import static org.easymock.EasyMock.*;
-
-/**
- * Test loading example resources.
- */
-@RunWith(value = Parameterized.class)
-public class TestLoadExampleAppJson extends Assert {
-  private String resource;
-
-  public TestLoadExampleAppJson(String resource) {
-this.resource = resource;
-  }
-
-  @Parameterized.Parameters
-  public static Collection filenames() {
-String[][] stringArray = new String[ExampleAppJson
-.ALL_EXAMPLE_RESOURCES.size()][1];
-int i = 0;
-for (String s : ExampleAppJson.ALL_EXAMPLE_RESOURCES) {
-  stringArray[i++][0] = s;
-}
-return Arrays.asList(stringArray);
-  }
-
-  @Test
-  public void testLoadResource() throws Throwable {
-try {
-  Application application = JSON_SER_DESER.fromResource(resource);
-
-  SliderFileSystem sfs = createNiceMock(SliderFileSystem.class);
-  FileSystem mockFs = createNiceMock(FileSystem.class);
-  expect(sfs.getFileSystem()).andReturn(mockFs).anyTimes();
-  expect(sfs.buildClusterDirPath(anyObject())).andReturn(
-  new Path("cluster_dir_path")).anyTimes();
-  replay(sfs, mockFs);
-
-  ServiceApiUtil.validateAndResolveApplication(application, sfs,
-  new YarnConfiguration());
-} catch (Exception e) {
-  throw new Exception("exception loading " + resource + ":" + 
e.toString());
-}
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
deleted file mode 100644
index 98c78d3..000
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/test/java/org/apache/hadoop/yarn/service/conf/TestValidateServiceNames.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apa

[39/75] [abbrv] hadoop git commit: YARN-7050. Post cleanup after YARN-6903, removal of org.apache.slider package. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/394183c7/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/AbstractLauncher.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/AbstractLauncher.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/AbstractLauncher.java
new file mode 100644
index 000..e4eae20
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/containerlaunch/AbstractLauncher.java
@@ -0,0 +1,271 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.containerlaunch;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
+import org.apache.hadoop.yarn.api.records.ContainerRetryContext;
+import org.apache.hadoop.yarn.api.records.ContainerRetryPolicy;
+import org.apache.hadoop.yarn.api.records.LocalResource;
+import org.apache.hadoop.yarn.util.Records;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.utils.CoreFileSystem;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import static 
org.apache.hadoop.yarn.service.provider.docker.DockerKeys.DEFAULT_DOCKER_NETWORK;
+
+/**
+ * Launcher of applications: base class
+ */
+public class AbstractLauncher {
+  private static final Logger log =
+LoggerFactory.getLogger(AbstractLauncher.class);
+  public static final String CLASSPATH = "CLASSPATH";
+  /**
+   * Filesystem to use for the launch
+   */
+  protected final CoreFileSystem coreFileSystem;
+  /**
+   * Env vars; set up at final launch stage
+   */
+  protected final Map envVars = new HashMap<>();
+  protected final ContainerLaunchContext containerLaunchContext =
+Records.newRecord(ContainerLaunchContext.class);
+  protected final List commands = new ArrayList<>(20);
+  protected final Map localResources = new HashMap<>();
+  protected final Map mountPaths = new HashMap<>();
+  private final Map serviceData = new HashMap<>();
+  // security
+  protected final Credentials credentials;
+  protected boolean yarnDockerMode = false;
+  protected String dockerImage;
+  protected String dockerNetwork = DEFAULT_DOCKER_NETWORK;
+  protected String dockerHostname;
+  protected String runPrivilegedContainer;
+
+
+  /**
+   * Create instance.
+   * @param coreFileSystem filesystem
+   * @param credentials initial set of credentials -null is permitted
+   */
+  public AbstractLauncher(
+  CoreFileSystem coreFileSystem,
+  Credentials credentials) {
+this.coreFileSystem = coreFileSystem;
+this.credentials = credentials != null ? credentials: new Credentials();
+  }
+  
+  public void setYarnDockerMode(boolean yarnDockerMode){
+this.yarnDockerMode = yarnDockerMode;
+  }
+
+  /**
+   * Get the env vars to work on
+   * @return env vars
+   */
+  public Map getEnv() {
+return envVars;
+  }
+
+  /**
+   * Get the launch commands.
+   * @return the live list of commands 
+   */
+  public List getCommands() {
+return commands;
+  }
+
+  public void addLocalResource(String subPath, LocalResource resource) {
+localResources.put(subPath, resource);
+  }
+
+  public void addLocalResource(String subPath, LocalResource resource, String 
mountPath) {
+localResources.put(subPath, resource);
+mountPaths.put(subPath, mountPath);
+  }
+
+  /**
+   * Accessor to the crede

[72/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
index 88f74ef..17f8c95 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services-api/src/main/resources/definition/YARN-Simplified-V1-API-Layer-For-Services.yaml
@@ -22,7 +22,7 @@ info:
 Bringing a new service on YARN today is not a simple experience. The APIs 
of existing frameworks are either too low level (native YARN), require writing 
new code (for frameworks with programmatic APIs) or writing a complex spec (for 
declarative frameworks). In addition to building critical building blocks 
inside YARN (as part of other efforts at 
link:https://issues.apache.org/jira/browse/YARN-4692[YARN-4692]), there is a 
need for simplifying the user facing story for building services. Experience of 
projects like Apache Slider running real-life services like HBase, Storm, 
Accumulo, Solr etc, gives us some very good insights on how simplified APIs for 
services should look like.
 
 
-To this end, we should look at a new simple-services API layer backed by 
REST interfaces. This API can be used to create and manage the lifecycle of 
YARN services. Services here can range from simple single-component apps to 
complex multi-component assemblies needing orchestration.
+To this end, we should look at a new simple-services API layer backed by 
REST interfaces. This API can be used to create and manage the lifecycle of 
YARN services. Services here can range from simple single-component service to 
complex multi-component assemblies needing orchestration.
 
 
 We should also look at making this a unified REST based entry point for 
other important features like resource-profile management 
(link:https://issues.apache.org/jira/browse/YARN-3926[YARN-3926]), 
package-definitions' lifecycle-management and service-discovery 
(link:https://issues.apache.org/jira/browse/YARN-913[YARN-913]/link:https://issues.apache.org/jira/browse/YARN-4757[YARN-4757]).
 We also need to flesh out its relation to our present much lower level REST 
APIs (link:https://issues.apache.org/jira/browse/YARN-1695[YARN-1695]) in YARN 
for application-submission and management.
@@ -41,177 +41,177 @@ schemes:
   - http
   - https
 # will be prefixed to all paths
-basePath: /services/v1/
+basePath: /ws/v1/
 consumes:
   - application/json
 produces:
   - application/json
 paths:
-  /applications:
+  /services:
 get:
-  summary: List of applications/services running in the cluster
-  description: Get a list of all currently running applications (response 
includes a minimal projection of the application info). For more details do a 
GET on a specific application name.
+  summary: List of services running in the cluster
+  description: Get a list of all currently running services (response 
includes a minimal projection of the service info). For more details do a GET 
on a specific service name.
   responses:
 200:
-  description: An array of applications
+  description: An array of services
   schema:
 type: array
 items:
-  $ref: '#/definitions/Application'
+  $ref: '#/definitions/Service'
 default:
   description: Unexpected error
   schema:
-$ref: '#/definitions/ApplicationStatus'
+$ref: '#/definitions/ServiceStatus'
 post:
-  summary: Create an application/service
-  description: Create an application. The request JSON is an Application 
object with details required for creation. If the request is successful it 
returns 202 Accepted. A success of this API only confirms success in submission 
of the application creation request. There is no guarantee that the application 
will actually reach a RUNNING state. Resource availability and several other 
factors determines if the application will be deployed in the cluster. It is 
expected that clients would subsequently call the GET API to get details of the 
application and determine its state.
+  summary: Create a service
+  description: Create a service. The request JSON is a service object with 
details required for creation. If the request is successful it returns 202 
Accepted. A success of this API only confirm

[73/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
YARN-7091. Rename application to service in yarn-native-services. Contributed 
by Jian He


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/db5888ea
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/db5888ea
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/db5888ea

Branch: refs/heads/yarn-native-services
Commit: db5888ea20a0765f5311d038afbbc78c218679bd
Parents: 963f2d9
Author: Billie Rinaldi 
Authored: Mon Aug 28 09:59:55 2017 -0700
Committer: Jian He 
Committed: Tue Aug 29 20:45:12 2017 -0700

--
 .../resources/assemblies/hadoop-yarn-dist.xml   |8 +-
 .../assemblies/hadoop-yarn-services-dist.xml|2 +-
 hadoop-project/pom.xml  |2 +-
 hadoop-yarn-project/hadoop-yarn/bin/yarn|   14 +-
 .../hadoop-yarn/conf/yarn-env.sh|2 +-
 .../hadoop-yarn-services-api/pom.xml|2 +-
 .../hadoop/yarn/service/webapp/ApiServer.java   |  274 +++
 .../yarn/service/webapp/ApiServerWebApp.java|  159 ++
 .../service/webapp/ApplicationApiService.java   |  275 ---
 .../service/webapp/ApplicationApiWebApp.java|  123 --
 ...RN-Simplified-V1-API-Layer-For-Services.yaml |  185 +-
 .../src/main/scripts/run_rest_service.sh|   28 -
 .../dev-support/findbugs-exclude.xml|   48 +
 .../conf/yarnservice-log4j.properties   |   62 +
 .../hadoop-yarn-services-core/pom.xml   |  408 +
 .../hadoop/yarn/service/ClientAMProtocol.java   |   40 +
 .../hadoop/yarn/service/ClientAMService.java|  132 ++
 .../yarn/service/ContainerFailureTracker.java   |   89 +
 .../hadoop/yarn/service/ServiceContext.java |   41 +
 .../hadoop/yarn/service/ServiceMaster.java  |  156 ++
 .../hadoop/yarn/service/ServiceMetrics.java |   98 +
 .../hadoop/yarn/service/ServiceScheduler.java   |  654 +++
 .../yarn/service/api/ServiceApiConstants.java   |   69 +
 .../yarn/service/api/records/Artifact.java  |  160 ++
 .../yarn/service/api/records/BaseResource.java  |   52 +
 .../yarn/service/api/records/Component.java |  412 +
 .../yarn/service/api/records/ConfigFile.java|  225 +++
 .../yarn/service/api/records/ConfigFormat.java  |   67 +
 .../yarn/service/api/records/Configuration.java |  225 +++
 .../yarn/service/api/records/Container.java |  297 +++
 .../service/api/records/ContainerState.java |   30 +
 .../hadoop/yarn/service/api/records/Error.java  |  129 ++
 .../service/api/records/PlacementPolicy.java|  102 ++
 .../service/api/records/ReadinessCheck.java |  175 ++
 .../yarn/service/api/records/Resource.java  |  159 ++
 .../yarn/service/api/records/Service.java   |  466 +
 .../yarn/service/api/records/ServiceState.java  |   33 +
 .../yarn/service/api/records/ServiceStatus.java |  148 ++
 .../yarn/service/client/ClientAMProxy.java  |   57 +
 .../hadoop/yarn/service/client/ServiceCLI.java  |  112 ++
 .../yarn/service/client/ServiceClient.java  |  892 +
 .../client/params/AbstractActionArgs.java   |  158 ++
 .../client/params/AbstractArgsDelegate.java |   28 +
 .../AbstractClusterBuildingActionArgs.java  |   58 +
 .../service/client/params/ActionBuildArgs.java  |   31 +
 .../service/client/params/ActionClientArgs.java |   71 +
 .../service/client/params/ActionCreateArgs.java |   33 +
 .../client/params/ActionDependencyArgs.java |   65 +
 .../client/params/ActionDestroyArgs.java|   37 +
 .../service/client/params/ActionExistsArgs.java |   49 +
 .../service/client/params/ActionFlexArgs.java   |   50 +
 .../service/client/params/ActionFreezeArgs.java |   56 +
 .../service/client/params/ActionHelpArgs.java   |   44 +
 .../service/client/params/ActionKDiagArgs.java  |   76 +
 .../service/client/params/ActionKeytabArgs.java |   76 +
 .../service/client/params/ActionListArgs.java   |   76 +
 .../client/params/ActionRegistryArgs.java   |  218 +++
 .../client/params/ActionResolveArgs.java|  153 ++
 .../client/params/ActionResourceArgs.java   |   70 +
 .../service/client/params/ActionStatusArgs.java |   51 +
 .../service/client/params/ActionThawArgs.java   |   67 +
 .../service/client/params/ActionTokensArgs.java |   78 +
 .../service/client/params/ActionUpdateArgs.java |   32 +
 .../yarn/service/client/params/ArgOps.java  |  156 ++
 .../yarn/service/client/params/Arguments.java   |  103 ++
 .../yarn/service/client/params/ClientArgs.java  |  252 +++
 .../yarn/service/client/params/CommonArgs.java  |  282 +++
 .../client/params/ComponentArgsDelegate.java|   52 +
 .../client/params/DontSplitArguments.java   |   34 +
 .../client/params/LaunchArgsAccessor.java   |   30 +
 .../client/params/LaunchArgsDelegate.java   |   51 +
 .../client/params/OptionArgsDelegate.java   |   66 +
 .../client/params/PathArgumentConverter.java|   34 +
 .../service/client/params/SliderAMArgs.j

[65/75] [abbrv] hadoop git commit: YARN-7091. Rename application to service in yarn-native-services. Contributed by Jian He

2017-08-29 Thread jianhe
http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/MonitorUtils.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/MonitorUtils.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/MonitorUtils.java
new file mode 100644
index 000..684f655
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/MonitorUtils.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.monitor.probe;
+
+import org.apache.hadoop.yarn.service.api.records.ReadinessCheck;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Formatter;
+import java.util.Locale;
+
+/**
+ * Various utils to work with the monitor
+ */
+public final class MonitorUtils {
+  protected static final Logger LOG = LoggerFactory.getLogger(MonitorUtils
+  .class);
+
+  private MonitorUtils() {
+  }
+
+  public static String toPlural(int val) {
+return val != 1 ? "s" : "";
+  }
+
+  /**
+   * Convert milliseconds to human time -the exact format is unspecified
+   * @param milliseconds a time in milliseconds
+   * @return a time that is converted to human intervals
+   */
+  public static String millisToHumanTime(long milliseconds) {
+StringBuilder sb = new StringBuilder();
+// Send all output to the Appendable object sb
+Formatter formatter = new Formatter(sb, Locale.US);
+
+long s = Math.abs(milliseconds / 1000);
+long m = Math.abs(milliseconds % 1000);
+if (milliseconds > 0) {
+  formatter.format("%d.%03ds", s, m);
+} else if (milliseconds == 0) {
+  formatter.format("0");
+} else {
+  formatter.format("-%d.%03ds", s, m);
+}
+return sb.toString();
+  }
+
+  public static Probe getProbe(ReadinessCheck readinessCheck) {
+if (readinessCheck == null) {
+  return null;
+}
+if (readinessCheck.getType() == null) {
+  return null;
+}
+try {
+  switch (readinessCheck.getType()) {
+  case HTTP:
+return HttpProbe.create(readinessCheck.getProps());
+  case PORT:
+return PortProbe.create(readinessCheck.getProps());
+  default:
+return null;
+  }
+} catch (Throwable t) {
+  throw new IllegalArgumentException("Error creating readiness check " +
+  t);
+}
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/db5888ea/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/PortProbe.java
--
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/PortProbe.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/PortProbe.java
new file mode 100644
index 000..aba5859
--- /dev/null
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/monitor/probe/PortProbe.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You m

  1   2   3   >