http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
new file mode 100644
index 0000000..d760536
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
@@ -0,0 +1,789 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import 
org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
+import 
org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
+
+public class TimelineStoreTestUtils {
+
+  protected static final List<TimelineEvent> EMPTY_EVENTS =
+      Collections.emptyList();
+  protected static final Map<String, Object> EMPTY_MAP =
+      Collections.emptyMap();
+  protected static final Map<String, Set<Object>> EMPTY_PRIMARY_FILTERS =
+      Collections.emptyMap();
+  protected static final Map<String, Set<String>> EMPTY_REL_ENTITIES =
+      Collections.emptyMap();
+
+  protected TimelineStore store;
+  protected String entityId1;
+  protected String entityType1;
+  protected String entityId1b;
+  protected String entityId2;
+  protected String entityType2;
+  protected String entityId4;
+  protected String entityType4;
+  protected String entityId5;
+  protected String entityType5;
+  protected Map<String, Set<Object>> primaryFilters;
+  protected Map<String, Object> secondaryFilters;
+  protected Map<String, Object> allFilters;
+  protected Map<String, Object> otherInfo;
+  protected Map<String, Set<String>> relEntityMap;
+  protected Map<String, Set<String>> relEntityMap2;
+  protected NameValuePair userFilter;
+  protected NameValuePair numericFilter1;
+  protected NameValuePair numericFilter2;
+  protected NameValuePair numericFilter3;
+  protected Collection<NameValuePair> goodTestingFilters;
+  protected Collection<NameValuePair> badTestingFilters;
+  protected TimelineEvent ev1;
+  protected TimelineEvent ev2;
+  protected TimelineEvent ev3;
+  protected TimelineEvent ev4;
+  protected Map<String, Object> eventInfo;
+  protected List<TimelineEvent> events1;
+  protected List<TimelineEvent> events2;
+  protected long beforeTs;
+
+  /**
+   * Load test data into the given store
+   */
+  protected void loadTestData() throws IOException {
+    beforeTs = System.currentTimeMillis()-1;
+    TimelineEntities entities = new TimelineEntities();
+    Map<String, Set<Object>> primaryFilters =
+        new HashMap<String, Set<Object>>();
+    Set<Object> l1 = new HashSet<Object>();
+    l1.add("username");
+    Set<Object> l2 = new HashSet<Object>();
+    l2.add((long)Integer.MAX_VALUE);
+    Set<Object> l3 = new HashSet<Object>();
+    l3.add("123abc");
+    Set<Object> l4 = new HashSet<Object>();
+    l4.add((long)Integer.MAX_VALUE + 1l);
+    primaryFilters.put("user", l1);
+    primaryFilters.put("appname", l2);
+    primaryFilters.put("other", l3);
+    primaryFilters.put("long", l4);
+    Map<String, Object> secondaryFilters = new HashMap<String, Object>();
+    secondaryFilters.put("startTime", 123456l);
+    secondaryFilters.put("status", "RUNNING");
+    Map<String, Object> otherInfo1 = new HashMap<String, Object>();
+    otherInfo1.put("info1", "val1");
+    otherInfo1.putAll(secondaryFilters);
+
+    String entityId1 = "id_1";
+    String entityType1 = "type_1";
+    String entityId1b = "id_2";
+    String entityId2 = "id_2";
+    String entityType2 = "type_2";
+    String entityId4 = "id_4";
+    String entityType4 = "type_4";
+    String entityId5 = "id_5";
+    String entityType5 = "type_5";
+
+    Map<String, Set<String>> relatedEntities =
+        new HashMap<String, Set<String>>();
+    relatedEntities.put(entityType2, Collections.singleton(entityId2));
+
+    TimelineEvent ev3 = createEvent(789l, "launch_event", null);
+    TimelineEvent ev4 = createEvent(-123l, "init_event", null);
+    List<TimelineEvent> events = new ArrayList<TimelineEvent>();
+    events.add(ev3);
+    events.add(ev4);
+    entities.setEntities(Collections.singletonList(createEntity(entityId2,
+        entityType2, null, events, null, null, null)));
+    TimelinePutResponse response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    TimelineEvent ev1 = createEvent(123l, "start_event", null);
+    entities.setEntities(Collections.singletonList(createEntity(entityId1,
+        entityType1, 123l, Collections.singletonList(ev1),
+        relatedEntities, primaryFilters, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+    entities.setEntities(Collections.singletonList(createEntity(entityId1b,
+        entityType1, null, Collections.singletonList(ev1), relatedEntities,
+        primaryFilters, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    Map<String, Object> eventInfo = new HashMap<String, Object>();
+    eventInfo.put("event info 1", "val1");
+    TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo);
+    Map<String, Object> otherInfo2 = new HashMap<String, Object>();
+    otherInfo2.put("info2", "val2");
+    entities.setEntities(Collections.singletonList(createEntity(entityId1,
+        entityType1, null, Collections.singletonList(ev2), null,
+        primaryFilters, otherInfo2)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+    entities.setEntities(Collections.singletonList(createEntity(entityId1b,
+        entityType1, 789l, Collections.singletonList(ev2), null,
+        primaryFilters, otherInfo2)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    entities.setEntities(Collections.singletonList(createEntity(
+        "badentityid", "badentity", null, null, null, null, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(1, response.getErrors().size());
+    TimelinePutError error = response.getErrors().get(0);
+    assertEquals("badentityid", error.getEntityId());
+    assertEquals("badentity", error.getEntityType());
+    assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode());
+
+    relatedEntities.clear();
+    relatedEntities.put(entityType5, Collections.singleton(entityId5));
+    entities.setEntities(Collections.singletonList(createEntity(entityId4,
+        entityType4, 42l, null, relatedEntities, null, null)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+  }
+
+  /**
+   * Load verification data
+   */
+  protected void loadVerificationData() throws Exception {
+    userFilter = new NameValuePair("user", "username");
+    numericFilter1 = new NameValuePair("appname", Integer.MAX_VALUE);
+    numericFilter2 = new NameValuePair("long", (long)Integer.MAX_VALUE + 1l);
+    numericFilter3 = new NameValuePair("other", "123abc");
+    goodTestingFilters = new ArrayList<NameValuePair>();
+    goodTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
+    goodTestingFilters.add(new NameValuePair("status", "RUNNING"));
+    badTestingFilters = new ArrayList<NameValuePair>();
+    badTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
+    badTestingFilters.add(new NameValuePair("status", "FINISHED"));
+
+    primaryFilters = new HashMap<String, Set<Object>>();
+    Set<Object> l1 = new HashSet<Object>();
+    l1.add("username");
+    Set<Object> l2 = new HashSet<Object>();
+    l2.add(Integer.MAX_VALUE);
+    Set<Object> l3 = new HashSet<Object>();
+    l3.add("123abc");
+    Set<Object> l4 = new HashSet<Object>();
+    l4.add((long)Integer.MAX_VALUE + 1l);
+    primaryFilters.put("user", l1);
+    primaryFilters.put("appname", l2);
+    primaryFilters.put("other", l3);
+    primaryFilters.put("long", l4);
+    secondaryFilters = new HashMap<String, Object>();
+    secondaryFilters.put("startTime", 123456);
+    secondaryFilters.put("status", "RUNNING");
+    allFilters = new HashMap<String, Object>();
+    allFilters.putAll(secondaryFilters);
+    for (Entry<String, Set<Object>> pf : primaryFilters.entrySet()) {
+      for (Object o : pf.getValue()) {
+        allFilters.put(pf.getKey(), o);
+      }
+    }
+    otherInfo = new HashMap<String, Object>();
+    otherInfo.put("info1", "val1");
+    otherInfo.put("info2", "val2");
+    otherInfo.putAll(secondaryFilters);
+
+    entityId1 = "id_1";
+    entityType1 = "type_1";
+    entityId1b = "id_2";
+    entityId2 = "id_2";
+    entityType2 = "type_2";
+    entityId4 = "id_4";
+    entityType4 = "type_4";
+    entityId5 = "id_5";
+    entityType5 = "type_5";
+
+    ev1 = createEvent(123l, "start_event", null);
+
+    eventInfo = new HashMap<String, Object>();
+    eventInfo.put("event info 1", "val1");
+    ev2 = createEvent(456l, "end_event", eventInfo);
+    events1 = new ArrayList<TimelineEvent>();
+    events1.add(ev2);
+    events1.add(ev1);
+
+    relEntityMap =
+        new HashMap<String, Set<String>>();
+    Set<String> ids = new HashSet<String>();
+    ids.add(entityId1);
+    ids.add(entityId1b);
+    relEntityMap.put(entityType1, ids);
+
+    relEntityMap2 =
+        new HashMap<String, Set<String>>();
+    relEntityMap2.put(entityType4, Collections.singleton(entityId4));
+
+    ev3 = createEvent(789l, "launch_event", null);
+    ev4 = createEvent(-123l, "init_event", null);
+    events2 = new ArrayList<TimelineEvent>();
+    events2.add(ev3);
+    events2.add(ev4);
+  }
+
+  public void testGetSingleEntity() throws IOException {
+    // test getting entity info
+    verifyEntityInfo(null, null, null, null, null, null,
+        store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, 123l, store.getEntity(entityId1,
+        entityType1, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, 123l, store.getEntity(entityId1b,
+        entityType1, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, -123l, store.getEntity(entityId2,
+        entityType2, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId4, entityType4, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId4,
+        entityType4, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId5, entityType5, EMPTY_EVENTS, relEntityMap2,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId5,
+        entityType5, EnumSet.allOf(Field.class)));
+
+    // test getting single fields
+    verifyEntityInfo(entityId1, entityType1, events1, null, null, null,
+        store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS)));
+
+    verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2),
+        null, null, null, store.getEntity(entityId1, entityType1,
+        EnumSet.of(Field.LAST_EVENT_ONLY)));
+
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1,
+        null));
+
+    verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null,
+        store.getEntity(entityId1, entityType1,
+            EnumSet.of(Field.PRIMARY_FILTERS)));
+
+    verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo,
+        store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO)));
+
+    verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null,
+        store.getEntity(entityId2, entityType2,
+            EnumSet.of(Field.RELATED_ENTITIES)));
+  }
+
+  protected List<TimelineEntity> getEntities(String entityType)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter) throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromId(String entityType,
+      String fromId) throws IOException {
+    return store.getEntities(entityType, null, null, null, fromId, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromTs(String entityType,
+      long fromTs) throws IOException {
+    return store.getEntities(entityType, null, null, null, null, fromTs,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter, String fromId)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, fromId, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromTsWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter, long fromTs)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, fromTs,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithWindow(String entityType,
+      Long windowEnd, String fromId) throws IOException {
+    return store.getEntities(entityType, null, null, windowEnd, fromId, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithPrimaryFilterAndWindow(
+      String entityType, Long windowEnd, String fromId,
+      NameValuePair primaryFilter) throws IOException {
+    return store.getEntities(entityType, null, null, windowEnd, fromId, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesWithFilters(String entityType,
+      NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        primaryFilter, secondaryFilters, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntities(String entityType, Long limit,
+      Long windowStart, Long windowEnd, NameValuePair primaryFilter,
+      EnumSet<Field> fields) throws IOException {
+    return store.getEntities(entityType, limit, windowStart, windowEnd, null,
+        null, primaryFilter, null, fields).getEntities();
+  }
+
+  public void testGetEntities() throws IOException {
+    // test getting entities
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_0").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_3").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_6").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_0", userFilter).size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_3", userFilter).size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_6", userFilter).size());
+
+    List<TimelineEntity> entities = getEntities("type_1");
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntities("type_2");
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
+
+    entities = getEntities("type_1", 1l, null, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", 1l, 0l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", null, 234l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 123l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 234l, 345l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, null, 345l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntities("type_1", null, null, 123l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+  }
+
+  public void testGetEntitiesWithFromId() throws IOException {
+    List<TimelineEntity> entities = getEntitiesFromId("type_1", entityId1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesFromId("type_1", entityId1b);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntitiesFromIdWithWindow("type_1", 0l, entityId1);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromId("type_2", "a");
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromId("type_2", entityId2);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
+
+    entities = getEntitiesFromIdWithWindow("type_2", -456l, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", -456l, "a");
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", 0l, null);
+    assertEquals(1, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", 0l, entityId2);
+    assertEquals(1, entities.size());
+
+    // same tests with primary filters
+    entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter,
+        entityId1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter,
+        entityId1b);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntitiesFromIdWithPrimaryFilterAndWindow("type_1", 0l,
+        entityId1, userFilter);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithPrimaryFilter("type_2", userFilter, "a");
+    assertEquals(0, entities.size());
+  }
+
+  public void testGetEntitiesWithFromTs() throws IOException {
+    assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size());
+    assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        beforeTs).size());
+    long afterTs = System.currentTimeMillis();
+    assertEquals(2, getEntitiesFromTs("type_1", afterTs).size());
+    assertEquals(1, getEntitiesFromTs("type_2", afterTs).size());
+    assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        afterTs).size());
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+    assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+    // check insert time is not overwritten
+    long beforeTs = this.beforeTs;
+    loadTestData();
+    assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size());
+    assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        beforeTs).size());
+    assertEquals(2, getEntitiesFromTs("type_1", afterTs).size());
+    assertEquals(1, getEntitiesFromTs("type_2", afterTs).size());
+    assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        afterTs).size());
+  }
+
+  public void testGetEntitiesWithPrimaryFilters() throws IOException {
+    // test using primary filter
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_1",
+            new NameValuePair("none", "none")).size());
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_2",
+            new NameValuePair("none", "none")).size());
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_3",
+            new NameValuePair("none", "none")).size());
+
+    List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
+        userFilter);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter2);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter3);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_2", userFilter);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", 1l, null, null, userFilter, null);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", 1l, 0l, null, userFilter, null);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", null, 234l, null, userFilter, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 234l, 345l, userFilter, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, null, 345l, userFilter, null);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+  }
+
+  public void testGetEntitiesWithSecondaryFilters() throws IOException {
+    // test using secondary filter
+    List<TimelineEntity> entities = getEntitiesWithFilters("type_1", null,
+        goodTestingFilters);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithFilters("type_1", userFilter, 
goodTestingFilters);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithFilters("type_1", null,
+        Collections.singleton(new NameValuePair("user", "none")));
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesWithFilters("type_1", null, badTestingFilters);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesWithFilters("type_1", userFilter, badTestingFilters);
+    assertEquals(0, entities.size());
+  }
+
+  public void testGetEvents() throws IOException {
+    // test getting entity timelines
+    SortedSet<String> sortedSet = new TreeSet<String>();
+    sortedSet.add(entityId1);
+    List<EventsOfOneEntity> timelines =
+        store.getEntityTimelines(entityType1, sortedSet, null, null,
+            null, null).getAllEvents();
+    assertEquals(1, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
+
+    sortedSet.add(entityId1b);
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, 1l,
+        null, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        345l, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        123l, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, 345l, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, 123l, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, null, Collections.singleton("end_event")).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    sortedSet.add(entityId2);
+    timelines = store.getEntityTimelines(entityType2, sortedSet, null,
+        null, null, null).getAllEvents();
+    assertEquals(1, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId2, entityType2, ev3, ev4);
+  }
+
+  /**
+   * Verify a single entity and its start time
+   */
+  protected static void verifyEntityInfo(String entityId, String entityType,
+      List<TimelineEvent> events, Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters, Map<String, Object> otherInfo,
+      Long startTime, TimelineEntity retrievedEntityInfo) {
+
+    verifyEntityInfo(entityId, entityType, events, relatedEntities,
+        primaryFilters, otherInfo, retrievedEntityInfo);
+    assertEquals(startTime, retrievedEntityInfo.getStartTime());
+  }
+
+  /**
+   * Verify a single entity
+   */
+  protected static void verifyEntityInfo(String entityId, String entityType,
+      List<TimelineEvent> events, Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters, Map<String, Object> otherInfo,
+      TimelineEntity retrievedEntityInfo) {
+    if (entityId == null) {
+      assertNull(retrievedEntityInfo);
+      return;
+    }
+    assertEquals(entityId, retrievedEntityInfo.getEntityId());
+    assertEquals(entityType, retrievedEntityInfo.getEntityType());
+    if (events == null) {
+      assertNull(retrievedEntityInfo.getEvents());
+    } else {
+      assertEquals(events, retrievedEntityInfo.getEvents());
+    }
+    if (relatedEntities == null) {
+      assertNull(retrievedEntityInfo.getRelatedEntities());
+    } else {
+      assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities());
+    }
+    if (primaryFilters == null) {
+      assertNull(retrievedEntityInfo.getPrimaryFilters());
+    } else {
+      assertTrue(primaryFilters.equals(
+          retrievedEntityInfo.getPrimaryFilters()));
+    }
+    if (otherInfo == null) {
+      assertNull(retrievedEntityInfo.getOtherInfo());
+    } else {
+      assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo()));
+    }
+  }
+
+  /**
+   * Verify timeline events
+   */
+  private static void verifyEntityTimeline(
+      EventsOfOneEntity retrievedEvents, String entityId, String entityType,
+      TimelineEvent... actualEvents) {
+    assertEquals(entityId, retrievedEvents.getEntityId());
+    assertEquals(entityType, retrievedEvents.getEntityType());
+    assertEquals(actualEvents.length, retrievedEvents.getEvents().size());
+    for (int i = 0; i < actualEvents.length; i++) {
+      assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i));
+    }
+  }
+
+  /**
+   * Create a test entity
+   */
+  protected static TimelineEntity createEntity(String entityId, String 
entityType,
+      Long startTime, List<TimelineEvent> events,
+      Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters,
+      Map<String, Object> otherInfo) {
+    TimelineEntity entity = new TimelineEntity();
+    entity.setEntityId(entityId);
+    entity.setEntityType(entityType);
+    entity.setStartTime(startTime);
+    entity.setEvents(events);
+    if (relatedEntities != null) {
+      for (Entry<String, Set<String>> e : relatedEntities.entrySet()) {
+        for (String v : e.getValue()) {
+          entity.addRelatedEntity(e.getKey(), v);
+        }
+      }
+    } else {
+      entity.setRelatedEntities(null);
+    }
+    entity.setPrimaryFilters(primaryFilters);
+    entity.setOtherInfo(otherInfo);
+    return entity;
+  }
+
+  /**
+   * Create a test event
+   */
+  private static TimelineEvent createEvent(long timestamp, String type, 
Map<String,
+      Object> info) {
+    TimelineEvent event = new TimelineEvent();
+    event.setTimestamp(timestamp);
+    event.setEventType(type);
+    event.setEventInfo(info);
+    return event;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
new file mode 100644
index 0000000..5f23f83
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebApp.java
@@ -0,0 +1,183 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import static org.apache.hadoop.yarn.webapp.Params.TITLE;
+import static org.mockito.Mockito.mock;
+import junit.framework.Assert;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.api.ApplicationContext;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerImpl;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStoreTestUtils;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore;
+import org.apache.hadoop.yarn.util.StringHelper;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.webapp.test.WebAppTests;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Injector;
+
+import javax.servlet.http.HttpServletResponse;
+
+public class TestAHSWebApp extends ApplicationHistoryStoreTestUtils {
+
+  public void setApplicationHistoryStore(ApplicationHistoryStore store) {
+    this.store = store;
+  }
+
+  @Before
+  public void setup() {
+    store = new MemoryApplicationHistoryStore();
+  }
+
+  @Test
+  public void testAppControllerIndex() throws Exception {
+    ApplicationHistoryManager ahManager = 
mock(ApplicationHistoryManager.class);
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationHistoryManager.class,
+          ahManager);
+    AHSController controller = injector.getInstance(AHSController.class);
+    controller.index();
+    Assert.assertEquals("Application History", controller.get(TITLE, 
"unknown"));
+  }
+
+  @Test
+  public void testView() throws Exception {
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationContext.class,
+          mockApplicationHistoryManager(5, 1, 1));
+    AHSView ahsViewInstance = injector.getInstance(AHSView.class);
+
+    ahsViewInstance.render();
+    //WebAppTests.flushOutput(injector);
+
+    ahsViewInstance.set(YarnWebParams.APP_STATE,
+      YarnApplicationState.FAILED.toString());
+    ahsViewInstance.render();
+    //WebAppTests.flushOutput(injector);
+
+    ahsViewInstance.set(YarnWebParams.APP_STATE, StringHelper.cjoin(
+      YarnApplicationState.FAILED.toString(), YarnApplicationState.KILLED));
+    ahsViewInstance.render();
+    //WebAppTests.flushOutput(injector);
+  }
+
+  @Test
+  public void testAppPage() throws Exception {
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationContext.class,
+          mockApplicationHistoryManager(1, 5, 1));
+    AppPage appPageInstance = injector.getInstance(AppPage.class);
+
+    appPageInstance.render();
+    //WebAppTests.flushOutput(injector);
+
+    appPageInstance.set(YarnWebParams.APPLICATION_ID, ApplicationId
+      .newInstance(0, 1).toString());
+    appPageInstance.render();
+   // WebAppTests.flushOutput(injector);
+  }
+
+  @Test
+  public void testAppAttemptPage() throws Exception {
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationContext.class,
+          mockApplicationHistoryManager(1, 1, 5));
+    AppAttemptPage appAttemptPageInstance =
+        injector.getInstance(AppAttemptPage.class);
+
+    appAttemptPageInstance.render();
+    //WebAppTests.flushOutput(injector);
+
+    appAttemptPageInstance.set(YarnWebParams.APPLICATION_ATTEMPT_ID,
+      ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1)
+        .toString());
+    appAttemptPageInstance.render();
+    //WebAppTests.flushOutput(injector);
+  }
+
+  @Test
+  public void testContainerPage() throws Exception {
+    Injector injector =
+        WebAppTests.createMockInjector(ApplicationContext.class,
+          mockApplicationHistoryManager(1, 1, 1));
+    ContainerPage containerPageInstance =
+        injector.getInstance(ContainerPage.class);
+
+    containerPageInstance.render();
+    //WebAppTests.flushOutput(injector);
+
+    containerPageInstance.set(
+      YarnWebParams.CONTAINER_ID,
+      ContainerId
+        .newInstance(
+          ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1),
+          1).toString());
+    containerPageInstance.render();
+    //WebAppTests.flushOutput(injector);
+  }
+
+  ApplicationHistoryManager mockApplicationHistoryManager(int numApps,
+      int numAppAttempts, int numContainers) throws Exception {
+    ApplicationHistoryManager ahManager =
+        new MockApplicationHistoryManagerImpl(store);
+    for (int i = 1; i <= numApps; ++i) {
+      ApplicationId appId = ApplicationId.newInstance(0, i);
+      writeApplicationStartData(appId);
+      for (int j = 1; j <= numAppAttempts; ++j) {
+        ApplicationAttemptId appAttemptId =
+            ApplicationAttemptId.newInstance(appId, j);
+        writeApplicationAttemptStartData(appAttemptId);
+        for (int k = 1; k <= numContainers; ++k) {
+          ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
+          writeContainerStartData(containerId);
+          writeContainerFinishData(containerId);
+        }
+        writeApplicationAttemptFinishData(appAttemptId);
+      }
+      writeApplicationFinishData(appId);
+    }
+    return ahManager;
+  }
+
+  class MockApplicationHistoryManagerImpl extends 
ApplicationHistoryManagerImpl {
+
+    public MockApplicationHistoryManagerImpl(ApplicationHistoryStore store) {
+      super();
+      init(new YarnConfiguration());
+      start();
+    }
+
+    @Override
+    protected ApplicationHistoryStore createApplicationHistoryStore(
+        Configuration conf) {
+      return store;
+    }
+  };
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
new file mode 100644
index 0000000..7ca5a03
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestAHSWebServices.java
@@ -0,0 +1,303 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import javax.ws.rs.core.MediaType;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerState;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.api.records.NodeId;
+import org.apache.hadoop.yarn.api.records.Priority;
+import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.server.api.ApplicationContext;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+public class TestAHSWebServices extends JerseyTest {
+
+  private static ApplicationHistoryManager ahManager;
+
+  private Injector injector = Guice.createInjector(new ServletModule() {
+
+    @Override
+    protected void configureServlets() {
+      bind(JAXBContextResolver.class);
+      bind(AHSWebServices.class);
+      bind(GenericExceptionHandler.class);
+      try {
+        ahManager = mockApplicationHistoryManager();
+      } catch (Exception e) {
+        Assert.fail();
+      }
+      bind(ApplicationContext.class).toInstance(ahManager);
+      serve("/*").with(GuiceContainer.class);
+    }
+  });
+
+  public class GuiceServletConfig extends GuiceServletContextListener {
+
+    @Override
+    protected Injector getInjector() {
+      return injector;
+    }
+  }
+
+  private ApplicationHistoryManager mockApplicationHistoryManager()
+      throws Exception {
+    ApplicationHistoryStore store = new MemoryApplicationHistoryStore();
+    TestAHSWebApp testAHSWebApp = new TestAHSWebApp();
+    testAHSWebApp.setApplicationHistoryStore(store);
+    ApplicationHistoryManager ahManager =
+        testAHSWebApp.mockApplicationHistoryManager(5, 5, 5);
+    return ahManager;
+  }
+
+  public TestAHSWebServices() {
+    super(new WebAppDescriptor.Builder(
+      "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp")
+      .contextListenerClass(GuiceServletConfig.class)
+      .filterClass(com.google.inject.servlet.GuiceFilter.class)
+      .contextPath("jersey-guice-filter").servletPath("/").build());
+  }
+
+  @Before
+  @Override
+  public void setUp() throws Exception {
+    super.setUp();
+  }
+
+  @Test
+  public void testInvalidUri() throws JSONException, Exception {
+    WebResource r = resource();
+    String responseStr = "";
+    try {
+      responseStr =
+          r.path("ws").path("v1").path("applicationhistory").path("bogus")
+            .accept(MediaType.APPLICATION_JSON).get(String.class);
+      fail("should have thrown exception on invalid uri");
+    } catch (UniformInterfaceException ue) {
+      ClientResponse response = ue.getResponse();
+      assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+
+      WebServicesTestUtils.checkStringMatch(
+        "error string exists and shouldn't", "", responseStr);
+    }
+  }
+
+  @Test
+  public void testInvalidUri2() throws JSONException, Exception {
+    WebResource r = resource();
+    String responseStr = "";
+    try {
+      responseStr = r.accept(MediaType.APPLICATION_JSON).get(String.class);
+      fail("should have thrown exception on invalid uri");
+    } catch (UniformInterfaceException ue) {
+      ClientResponse response = ue.getResponse();
+      assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+      WebServicesTestUtils.checkStringMatch(
+        "error string exists and shouldn't", "", responseStr);
+    }
+  }
+
+  @Test
+  public void testInvalidAccept() throws JSONException, Exception {
+    WebResource r = resource();
+    String responseStr = "";
+    try {
+      responseStr =
+          r.path("ws").path("v1").path("applicationhistory")
+            .accept(MediaType.TEXT_PLAIN).get(String.class);
+      fail("should have thrown exception on invalid uri");
+    } catch (UniformInterfaceException ue) {
+      ClientResponse response = ue.getResponse();
+      assertEquals(Status.INTERNAL_SERVER_ERROR,
+        response.getClientResponseStatus());
+      WebServicesTestUtils.checkStringMatch(
+        "error string exists and shouldn't", "", responseStr);
+    }
+  }
+
+  @Test
+  public void testAppsQuery() throws Exception {
+    WebResource r = resource();
+    ClientResponse response =
+        r.path("ws").path("v1").path("applicationhistory").path("apps")
+          .queryParam("state", YarnApplicationState.FINISHED.toString())
+          .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    JSONObject json = response.getEntity(JSONObject.class);
+    assertEquals("incorrect number of elements", 1, json.length());
+    JSONObject apps = json.getJSONObject("apps");
+    assertEquals("incorrect number of elements", 1, apps.length());
+    JSONArray array = apps.getJSONArray("app");
+    assertEquals("incorrect number of elements", 5, array.length());
+  }
+
+  @Test
+  public void testSingleApp() throws Exception {
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    WebResource r = resource();
+    ClientResponse response =
+        r.path("ws").path("v1").path("applicationhistory").path("apps")
+          .path(appId.toString()).accept(MediaType.APPLICATION_JSON)
+          .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    JSONObject json = response.getEntity(JSONObject.class);
+    assertEquals("incorrect number of elements", 1, json.length());
+    JSONObject app = json.getJSONObject("app");
+    assertEquals(appId.toString(), app.getString("appId"));
+    assertEquals(appId.toString(), app.get("name"));
+    assertEquals(appId.toString(), app.get("diagnosticsInfo"));
+    assertEquals("test queue", app.get("queue"));
+    assertEquals("test user", app.get("user"));
+    assertEquals("test type", app.get("type"));
+    assertEquals(FinalApplicationStatus.UNDEFINED.toString(),
+      app.get("finalAppStatus"));
+    assertEquals(YarnApplicationState.FINISHED.toString(), 
app.get("appState"));
+  }
+
+  @Test
+  public void testMultipleAttempts() throws Exception {
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    WebResource r = resource();
+    ClientResponse response =
+        r.path("ws").path("v1").path("applicationhistory").path("apps")
+          .path(appId.toString()).path("appattempts")
+          .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    JSONObject json = response.getEntity(JSONObject.class);
+    assertEquals("incorrect number of elements", 1, json.length());
+    JSONObject appAttempts = json.getJSONObject("appAttempts");
+    assertEquals("incorrect number of elements", 1, appAttempts.length());
+    JSONArray array = appAttempts.getJSONArray("appAttempt");
+    assertEquals("incorrect number of elements", 5, array.length());
+  }
+
+  @Test
+  public void testSingleAttempt() throws Exception {
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    ApplicationAttemptId appAttemptId =
+        ApplicationAttemptId.newInstance(appId, 1);
+    WebResource r = resource();
+    ClientResponse response =
+        r.path("ws").path("v1").path("applicationhistory").path("apps")
+          .path(appId.toString()).path("appattempts")
+          .path(appAttemptId.toString()).accept(MediaType.APPLICATION_JSON)
+          .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    JSONObject json = response.getEntity(JSONObject.class);
+    assertEquals("incorrect number of elements", 1, json.length());
+    JSONObject appAttempt = json.getJSONObject("appAttempt");
+    assertEquals(appAttemptId.toString(), 
appAttempt.getString("appAttemptId"));
+    assertEquals(appAttemptId.toString(), appAttempt.getString("host"));
+    assertEquals(appAttemptId.toString(),
+      appAttempt.getString("diagnosticsInfo"));
+    assertEquals("test tracking url", appAttempt.getString("trackingUrl"));
+    assertEquals(YarnApplicationAttemptState.FINISHED.toString(),
+      appAttempt.get("appAttemptState"));
+  }
+
+  @Test
+  public void testMultipleContainers() throws Exception {
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    ApplicationAttemptId appAttemptId =
+        ApplicationAttemptId.newInstance(appId, 1);
+    WebResource r = resource();
+    ClientResponse response =
+        r.path("ws").path("v1").path("applicationhistory").path("apps")
+          .path(appId.toString()).path("appattempts")
+          .path(appAttemptId.toString()).path("containers")
+          .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    JSONObject json = response.getEntity(JSONObject.class);
+    assertEquals("incorrect number of elements", 1, json.length());
+    JSONObject containers = json.getJSONObject("containers");
+    assertEquals("incorrect number of elements", 1, containers.length());
+    JSONArray array = containers.getJSONArray("container");
+    assertEquals("incorrect number of elements", 5, array.length());
+  }
+
+  @Test
+  public void testSingleContainer() throws Exception {
+    ApplicationId appId = ApplicationId.newInstance(0, 1);
+    ApplicationAttemptId appAttemptId =
+        ApplicationAttemptId.newInstance(appId, 1);
+    ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
+    WebResource r = resource();
+    ClientResponse response =
+        r.path("ws").path("v1").path("applicationhistory").path("apps")
+          .path(appId.toString()).path("appattempts")
+          .path(appAttemptId.toString()).path("containers")
+          .path(containerId.toString()).accept(MediaType.APPLICATION_JSON)
+          .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    JSONObject json = response.getEntity(JSONObject.class);
+    assertEquals("incorrect number of elements", 1, json.length());
+    JSONObject container = json.getJSONObject("container");
+    assertEquals(containerId.toString(), container.getString("containerId"));
+    assertEquals(containerId.toString(), 
container.getString("diagnosticsInfo"));
+    assertEquals("0", container.getString("allocatedMB"));
+    assertEquals("0", container.getString("allocatedVCores"));
+    assertEquals(NodeId.newInstance("localhost", 0).toString(),
+      container.getString("assignedNodeId"));
+    assertEquals(Priority.newInstance(containerId.getId()).toString(),
+      container.getString("priority"));
+    Configuration conf = new YarnConfiguration();
+    assertEquals(WebAppUtils.getHttpSchemePrefix(conf) +
+        WebAppUtils.getAHSWebAppURLWithoutScheme(conf) +
+        "/applicationhistory/logs/localhost:0/container_0_0001_01_000001/" +
+        "container_0_0001_01_000001/test user",
+        container.getString("logUrl"));
+    assertEquals(ContainerState.COMPLETE.toString(),
+      container.getString("containerState"));
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
new file mode 100644
index 0000000..2b93190
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/TestTimelineWebServices.java
@@ -0,0 +1,391 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
+
+import static org.junit.Assert.assertEquals;
+
+import javax.ws.rs.core.MediaType;
+
+import junit.framework.Assert;
+
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvents;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TestTimelineMetricStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineStore;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TestMemoryTimelineStore;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
+import org.junit.Test;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+
+public class TestTimelineWebServices extends JerseyTest {
+
+  private static TimelineStore store;
+  private static TimelineMetricStore metricStore;
+  private long beforeTime;
+
+  private Injector injector = Guice.createInjector(new ServletModule() {
+
+    @Override
+    protected void configureServlets() {
+      bind(YarnJacksonJaxbJsonProvider.class);
+      bind(TimelineWebServices.class);
+      bind(GenericExceptionHandler.class);
+      try{
+        store = mockTimelineStore();
+        metricStore = new TestTimelineMetricStore();
+      } catch (Exception e) {
+        Assert.fail();
+      }
+      bind(TimelineStore.class).toInstance(store);
+      bind(TimelineMetricStore.class).toInstance(metricStore);
+      serve("/*").with(GuiceContainer.class);
+    }
+
+  });
+
+  public class GuiceServletConfig extends GuiceServletContextListener {
+
+    @Override
+    protected Injector getInjector() {
+      return injector;
+    }
+  }
+
+  private TimelineStore mockTimelineStore()
+      throws Exception {
+    beforeTime = System.currentTimeMillis() - 1;
+    TestMemoryTimelineStore store = new TestMemoryTimelineStore();
+    store.setup();
+    return store.getTimelineStore();
+  }
+
+  public TestTimelineWebServices() {
+    super(new WebAppDescriptor.Builder(
+        "org.apache.hadoop.yarn.server.applicationhistoryservice.webapp")
+        .contextListenerClass(GuiceServletConfig.class)
+        .filterClass(com.google.inject.servlet.GuiceFilter.class)
+        .contextPath("jersey-guice-filter")
+        .servletPath("/")
+        .clientConfig(new 
DefaultClientConfig(YarnJacksonJaxbJsonProvider.class))
+        .build());
+  }
+
+  @Test
+  public void testAbout() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    TimelineWebServices.AboutInfo about =
+        response.getEntity(TimelineWebServices.AboutInfo.class);
+    Assert.assertNotNull(about);
+    Assert.assertEquals("Timeline API", about.getAbout());
+  }
+
+  private static void verifyEntities(TimelineEntities entities) {
+    Assert.assertNotNull(entities);
+    Assert.assertEquals(2, entities.getEntities().size());
+    TimelineEntity entity1 = entities.getEntities().get(0);
+    Assert.assertNotNull(entity1);
+    Assert.assertEquals("id_1", entity1.getEntityId());
+    Assert.assertEquals("type_1", entity1.getEntityType());
+    Assert.assertEquals(123l, entity1.getStartTime().longValue());
+    Assert.assertEquals(2, entity1.getEvents().size());
+    Assert.assertEquals(4, entity1.getPrimaryFilters().size());
+    Assert.assertEquals(4, entity1.getOtherInfo().size());
+    TimelineEntity entity2 = entities.getEntities().get(1);
+    Assert.assertNotNull(entity2);
+    Assert.assertEquals("id_2", entity2.getEntityId());
+    Assert.assertEquals("type_1", entity2.getEntityType());
+    Assert.assertEquals(123l, entity2.getStartTime().longValue());
+    Assert.assertEquals(2, entity2.getEvents().size());
+    Assert.assertEquals(4, entity2.getPrimaryFilters().size());
+    Assert.assertEquals(4, entity2.getOtherInfo().size());
+  }
+
+  private static void verifyMetrics(TimelineMetrics metrics) {
+    Assert.assertNotNull(metrics);
+    Assert.assertEquals("cpu_user", 
metrics.getMetrics().get(0).getMetricName());
+    Assert.assertEquals(3, 
metrics.getMetrics().get(0).getMetricValues().size());
+    Assert.assertEquals("mem_free", 
metrics.getMetrics().get(1).getMetricName());
+    Assert.assertEquals(3, 
metrics.getMetrics().get(1).getMetricValues().size());
+  }
+
+  @Test
+  public void testGetEntities() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    verifyEntities(response.getEntity(TimelineEntities.class));
+  }
+
+  @Test
+  public void testFromId() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("fromId", "id_2")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    assertEquals(1, response.getEntity(TimelineEntities.class).getEntities()
+        .size());
+
+    response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("fromId", "id_1")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    assertEquals(2, response.getEntity(TimelineEntities.class).getEntities()
+        .size());
+  }
+
+  @Test
+  public void testFromTs() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("fromTs", Long.toString(beforeTime))
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    assertEquals(0, response.getEntity(TimelineEntities.class).getEntities()
+        .size());
+
+    response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("fromTs", Long.toString(
+            System.currentTimeMillis()))
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    assertEquals(2, response.getEntity(TimelineEntities.class).getEntities()
+        .size());
+  }
+
+  @Test
+  public void testPrimaryFilterString() {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("primaryFilter", "user:username")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    verifyEntities(response.getEntity(TimelineEntities.class));
+  }
+
+  @Test
+  public void testPrimaryFilterInteger() {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("primaryFilter",
+            "appname:" + Integer.toString(Integer.MAX_VALUE))
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    verifyEntities(response.getEntity(TimelineEntities.class));
+  }
+
+  @Test
+  public void testPrimaryFilterLong() {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("primaryFilter",
+            "long:" + Long.toString((long)Integer.MAX_VALUE + 1l))
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    verifyEntities(response.getEntity(TimelineEntities.class));
+  }
+
+  @Test
+  public void testPrimaryFilterNumericString() {
+    // without quotes, 123abc is interpreted as the number 123,
+    // which finds no entities
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("primaryFilter", "other:123abc")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    assertEquals(0, response.getEntity(TimelineEntities.class).getEntities()
+        .size());
+  }
+
+  @Test
+  public void testPrimaryFilterNumericStringWithQuotes() {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").queryParam("primaryFilter", "other:\"123abc\"")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    verifyEntities(response.getEntity(TimelineEntities.class));
+  }
+
+  @Test
+  public void testSecondaryFilters() {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1")
+        .queryParam("secondaryFilter",
+            "user:username,appname:" + Integer.toString(Integer.MAX_VALUE))
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    verifyEntities(response.getEntity(TimelineEntities.class));
+  }
+
+  @Test
+  public void testGetEntity() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").path("id_1")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    TimelineEntity entity = response.getEntity(TimelineEntity.class);
+    Assert.assertNotNull(entity);
+    Assert.assertEquals("id_1", entity.getEntityId());
+    Assert.assertEquals("type_1", entity.getEntityType());
+    Assert.assertEquals(123l, entity.getStartTime().longValue());
+    Assert.assertEquals(2, entity.getEvents().size());
+    Assert.assertEquals(4, entity.getPrimaryFilters().size());
+    Assert.assertEquals(4, entity.getOtherInfo().size());
+  }
+
+  @Test
+  public void testGetEntityFields1() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").path("id_1").queryParam("fields", "events,otherinfo")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    TimelineEntity entity = response.getEntity(TimelineEntity.class);
+    Assert.assertNotNull(entity);
+    Assert.assertEquals("id_1", entity.getEntityId());
+    Assert.assertEquals("type_1", entity.getEntityType());
+    Assert.assertEquals(123l, entity.getStartTime().longValue());
+    Assert.assertEquals(2, entity.getEvents().size());
+    Assert.assertEquals(0, entity.getPrimaryFilters().size());
+    Assert.assertEquals(4, entity.getOtherInfo().size());
+  }
+
+  @Test
+  public void testGetEntityFields2() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").path("id_1").queryParam("fields", "lasteventonly," +
+            "primaryfilters,relatedentities")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    TimelineEntity entity = response.getEntity(TimelineEntity.class);
+    Assert.assertNotNull(entity);
+    Assert.assertEquals("id_1", entity.getEntityId());
+    Assert.assertEquals("type_1", entity.getEntityType());
+    Assert.assertEquals(123l, entity.getStartTime().longValue());
+    Assert.assertEquals(1, entity.getEvents().size());
+    Assert.assertEquals(4, entity.getPrimaryFilters().size());
+    Assert.assertEquals(0, entity.getOtherInfo().size());
+  }
+
+  @Test
+  public void testGetEvents() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .path("type_1").path("events")
+        .queryParam("entityId", "id_1")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    TimelineEvents events = response.getEntity(TimelineEvents.class);
+    Assert.assertNotNull(events);
+    Assert.assertEquals(1, events.getAllEvents().size());
+    TimelineEvents.EventsOfOneEntity partEvents = events.getAllEvents().get(0);
+    Assert.assertEquals(2, partEvents.getEvents().size());
+    TimelineEvent event1 = partEvents.getEvents().get(0);
+    Assert.assertEquals(456l, event1.getTimestamp());
+    Assert.assertEquals("end_event", event1.getEventType());
+    Assert.assertEquals(1, event1.getEventInfo().size());
+    TimelineEvent event2 = partEvents.getEvents().get(1);
+    Assert.assertEquals(123l, event2.getTimestamp());
+    Assert.assertEquals("start_event", event2.getEventType());
+    Assert.assertEquals(0, event2.getEventInfo().size());
+  }
+
+  @Test
+  public void testPostEntities() throws Exception {
+    TimelineEntities entities = new TimelineEntities();
+    TimelineEntity entity = new TimelineEntity();
+    entity.setEntityId("test id");
+    entity.setEntityType("test type");
+    entity.setStartTime(System.currentTimeMillis());
+    entities.addEntity(entity);
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+        .accept(MediaType.APPLICATION_JSON)
+        .type(MediaType.APPLICATION_JSON)
+        .post(ClientResponse.class, entities);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    TimelinePutResponse putResposne = 
response.getEntity(TimelinePutResponse.class);
+    Assert.assertNotNull(putResposne);
+    Assert.assertEquals(0, putResposne.getErrors().size());
+    // verify the entity exists in the store
+    response = r.path("ws").path("v1").path("timeline")
+        .path("test type").path("test id")
+        .accept(MediaType.APPLICATION_JSON)
+        .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    entity = response.getEntity(TimelineEntity.class);
+    Assert.assertNotNull(entity);
+    Assert.assertEquals("test id", entity.getEntityId());
+    Assert.assertEquals("test type", entity.getEntityType());
+  }
+
+  @Test
+  public void testGetMetrics() throws Exception {
+    WebResource r = resource();
+    ClientResponse response = r.path("ws").path("v1").path("timeline")
+      .path("metrics").queryParam("metricNames", "cpu_user")
+      .accept(MediaType.APPLICATION_JSON)
+      .get(ClientResponse.class);
+    assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+    verifyMetrics(response.getEntity(TimelineMetrics.class));
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/hbase-default.xml
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/hbase-default.xml
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/hbase-default.xml
new file mode 100644
index 0000000..7502346
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/hbase-default.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+<!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+<configuration>
+  <property>
+    <name>hbase.defaults.for.version.skip</name>
+    <value>true</value>
+    <description>
+      Set to true to skip the 'hbase.defaults.for.version' check.
+      Setting this to true can be useful in contexts other than
+      the other side of a maven generation; i.e. running in an
+      ide.  You'll want to set this boolean to true to avoid
+      seeing the RuntimException complaint: "hbase-default.xml file
+      seems to be for and old version of HBase (@@@VERSION@@@), this
+      version is X.X.X-SNAPSHOT"
+    </description>
+  </property>
+</configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.jar
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.jar
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.jar
new file mode 100644
index 0000000..468409a
Binary files /dev/null and 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.jar
 differ

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom
new file mode 100644
index 0000000..83c7106
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/4.2.0/phoenix-core-tests-4.2.0.pom
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd"; 
xmlns="http://maven.apache.org/POM/4.0.0";
+    xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.phoenix</groupId>
+  <artifactId>phoenix-core-tests</artifactId>
+  <version>4.2.0</version>
+  <description>POM was created from install:install-file</description>
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml
new file mode 100644
index 0000000..53bbfc2
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/resources/lib/org/apache/phoenix/phoenix-core-tests/maven-metadata-local.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<metadata>
+  <groupId>org.apache.phoenix</groupId>
+  <artifactId>phoenix-core-tests</artifactId>
+  <versioning>
+    <release>4.2.0</release>
+    <versions>
+      <version>4.2.0</version>
+    </versions>
+    <lastUpdated>20141103224551</lastUpdated>
+  </versioning>
+</metadata>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-metrics/pom.xml
----------------------------------------------------------------------
diff --git a/ambari-metrics/pom.xml b/ambari-metrics/pom.xml
new file mode 100644
index 0000000..8549c26
--- /dev/null
+++ b/ambari-metrics/pom.xml
@@ -0,0 +1,180 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+                             http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+
+  <groupId>org.apache.ambari</groupId>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>ambari-metrics</artifactId>
+  <version>0.1.0-SNAPSHOT</version>
+  <packaging>pom</packaging>
+  <modules>
+    <module>ambari-metrics-hadoop-sink</module>
+    <module>ambari-metrics-timelineservice</module>
+    <module>ambari-metrics-host-monitoring</module>
+  </modules>
+  <properties>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <python.ver>python &gt;= 2.6</python.ver>
+    <deb.python.ver>python (&gt;= 2.6)</deb.python.ver>
+    <!--TODO change to HDP URL-->
+    
<hbase.tar>http://public-repo-1.hortonworks.com/HDP/centos6/2.x/GA/2.2.0.0/tars/hbase-0.98.4.2.2.0.0-2041-hadoop2.tar.gz</hbase.tar>
+    <hbase.folder>hbase-0.98.4.2.2.0.0-2041-hadoop2</hbase.folder>
+  </properties>
+  <repositories>
+    <repository>
+      <id>apache-hadoop</id>
+      <name>hdp</name>
+      <url>http://54.235.92.15/nexus/content/groups/public/</url>
+    </repository>
+  </repositories>
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <configuration>
+          <descriptors>
+            
<descriptor>../ambari-project/src/main/assemblies/empty.xml</descriptor>
+          </descriptors>
+        </configuration>
+        <executions>
+          <execution>
+            <id>build-tarball</id>
+            <phase>none</phase>
+            <goals>
+              <goal>single</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <version>1.8</version>
+        <executions>
+          <execution>
+            <id>parse-version</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>parse-version</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>regex-property</id>
+            <goals>
+              <goal>regex-property</goal>
+            </goals>
+            <configuration>
+              <name>ambariVersion</name>
+              <value>${project.version}</value>
+              <regex>^([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).*</regex>
+              <replacement>$1.$2.$3</replacement>
+              <failIfNoMatch>false</failIfNoMatch>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>3.0</version>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-clean-plugin</artifactId>
+        <configuration>
+          <filesets>
+            <fileset>
+              <directory>${basedir}</directory>
+              <followSymlinks>false</followSymlinks>
+              <includes>
+                <include>**/*.pyc</include>
+              </includes>
+            </fileset>
+          </filesets>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <version>1.8</version>
+        <executions>
+          <execution>
+            <id>parse-package-version</id>
+            <goals>
+              <goal>regex-property</goal>
+            </goals>
+            <configuration>
+              <name>package-version</name>
+              <value>${project.version}</value>
+              <regex>^([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-).*</regex>
+              <replacement>$1.$2.$3</replacement>
+              <failIfNoMatch>true</failIfNoMatch>
+            </configuration>
+          </execution>
+          <execution>
+            <id>parse-package-release</id>
+            <goals>
+              <goal>regex-property</goal>
+            </goals>
+            <configuration>
+              <name>package-release</name>
+              <value>${project.version}</value>
+              
<regex>^([0-9]+)\.([0-9]+)\.([0-9]+)(\.|-)(([0-9]+)|(SNAPSHOT)).*</regex>
+              <replacement>$5</replacement>
+              <failIfNoMatch>true</failIfNoMatch>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>rpm-maven-plugin</artifactId>
+        <version>2.0.1</version>
+        <executions>
+          <execution>
+            <!-- unbinds rpm creation from maven lifecycle -->
+            <phase>none</phase>
+            <goals>
+              <goal>attached-rpm</goal>
+            </goals>
+          </execution>
+        </executions>
+        <configuration>
+          <copyright>2012, Apache Software Foundation</copyright>
+          <group>Development</group>
+          <description>Maven Recipe: RPM Package.</description>
+          <version>${package-version}</version>
+          <release>${package-release}</release>
+          <mappings/>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+
+  <dependencies>
+  </dependencies>
+  <profiles>
+    <profile>
+      <id>metrics2-standalone</id>
+    </profile>
+  </profiles>
+
+
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/a52f8a55/ambari-server/src/main/java/org/apache/ambari/server/Role.java
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/java/org/apache/ambari/server/Role.java 
b/ambari-server/src/main/java/org/apache/ambari/server/Role.java
index 45a9ac5..4712992 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/Role.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/Role.java
@@ -107,6 +107,8 @@ public class Role {
   public static final Role KDC_SERVER = valueOf("KDC_SERVER");
   public static final Role KERBEROS_CLIENT = valueOf("KERBEROS_CLIENT");
   public static final Role KERBEROS_SERVICE_CHECK = 
valueOf("KERBEROS_SERVICE_CHECK");
+  public static final Role METRIC_COLLECTOR = valueOf("METRIC_COLLECTOR");
+  public static final Role METRIC_MONITOR = valueOf("METRIC_MONITOR");
 
   private String name = null;
   

Reply via email to