http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
new file mode 100644
index 0000000..a0572a2
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/data/TestMetric.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.data;
+
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util.Json;
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.entry;
+import static org.junit.Assert.assertEquals;
+
+public class TestMetric {
+  private static final String SAMPLE_METRIC_IN_JSON = "{\n" +
+    "  \"instanceid\" : \"\",\n" +
+    "  \"hostname\" : \"localhost\",\n" +
+    "  \"metrics\" : {\n" +
+    "    \"0\" : \"5.35\",\n" +
+    "    \"5000\" : \"5.35\",\n" +
+    "    \"10000\" : \"5.35\",\n" +
+    "    \"15000\" : \"5.35\"\n" +
+    "  },\n" +
+    "  \"starttime\" : \"0\",\n" +
+    "  \"appid\" : \"HOST\",\n" +
+    "  \"metricname\" : \"disk_free\"\n" +
+    "}";
+
+  @Test
+  public void testSerializeToJson() throws IOException {
+    Metric diskOnHostMetric = new Metric(new ApplicationInstance("localhost", 
AppID.HOST, ""), "disk_free", 0);
+
+    long timestamp = 0;
+    double value = 5.35;
+
+    diskOnHostMetric.putMetric(timestamp, Double.toString(value));
+    diskOnHostMetric.putMetric(timestamp + 5000, Double.toString(value));
+    diskOnHostMetric.putMetric(timestamp + 10000, Double.toString(value));
+    diskOnHostMetric.putMetric(timestamp + 15000, Double.toString(value));
+
+    String expected = SAMPLE_METRIC_IN_JSON;
+    String s = new Json(true).serialize(diskOnHostMetric);
+
+    assertEquals("Json should match", expected, s);
+  }
+
+  @Test
+  public void testDeserializeObjectFromString() throws IOException {
+    String source = SAMPLE_METRIC_IN_JSON;
+
+    Metric m = new Json().deserialize(source, Metric.class);
+
+    assertEquals("localhost", m.getHostname());
+    assertEquals("HOST", m.getAppid());
+    assertEquals("", m.getInstanceid());
+    assertEquals("disk_free", m.getMetricname());
+    assertEquals("0", m.getStarttime());
+
+    assertThat(m.getMetrics()).isNotEmpty().hasSize(4).contains(
+      entry("0", "5.35"),
+      entry("5000", "5.35"),
+      entry("10000", "5.35"),
+      entry("15000", "5.35"));
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
new file mode 100644
index 0000000..4411be5
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestRestMetricsSender.java
@@ -0,0 +1,76 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
+  .loadsimulator.net;
+
+import org.junit.Test;
+
+import java.io.IOException;
+
+import static org.easymock.EasyMock.*;
+import static org.junit.Assert.assertEquals;
+
+public class TestRestMetricsSender {
+
+  @Test
+  public void testPushMetrics() throws Exception {
+    final UrlService svcMock = createStrictMock(UrlService.class);
+    final String payload = "test";
+    final String expectedResponse = "mockResponse";
+
+    expect(svcMock.send(anyString())).andReturn(expectedResponse);
+    svcMock.disconnect();
+    expectLastCall();
+
+    replay(svcMock);
+
+    RestMetricsSender sender = new RestMetricsSender("expectedHostName") {
+      @Override
+      protected UrlService getConnectedUrlService() throws IOException {
+        return svcMock;
+      }
+    };
+    String response = sender.pushMetrics(payload);
+
+    verify(svcMock);
+    assertEquals("", expectedResponse, response);
+  }
+
+  @Test
+  public void testPushMetricsFailed() throws Exception {
+    final UrlService svcMock = createStrictMock(UrlService.class);
+    final String payload = "test";
+    final String expectedResponse = "mockResponse";
+    RestMetricsSender sender = new RestMetricsSender("expectedHostName") {
+      @Override
+      protected UrlService getConnectedUrlService() throws IOException {
+        return svcMock;
+      }
+    };
+
+    expect(svcMock.send(anyString())).andThrow(new IOException());
+    svcMock.disconnect();
+    expectLastCall();
+
+    replay(svcMock);
+
+    String response = sender.pushMetrics(payload);
+
+    verify(svcMock);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
new file mode 100644
index 0000000..7e29ae3
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/net/TestStdOutMetricsSender.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.net;
+
+
+import org.junit.Test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+
+public class TestStdOutMetricsSender {
+
+  @Test
+  public void testPushMetrics() throws Exception {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    PrintStream out = new PrintStream(baos);
+    StdOutMetricsSender sender = new StdOutMetricsSender("expectedHostName", 
out);
+    sender.pushMetrics("test");
+
+    System.out.println(baos.toString());
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestRandomMetricsProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestRandomMetricsProvider.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestRandomMetricsProvider.java
new file mode 100644
index 0000000..462aaf0
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestRandomMetricsProvider.java
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertTrue;
+
+public class TestRandomMetricsProvider {
+
+  @Test
+  public void testReturnSingle() {
+    double from = 5.25;
+    double to = 5.40;
+    RandomMetricsProvider provider = new RandomMetricsProvider(from, to);
+    double metric = provider.next();
+
+    assertTrue("Generated metric should be in range", from < metric && metric 
< to);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestTimeStampProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestTimeStampProvider.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestTimeStampProvider.java
new file mode 100644
index 0000000..dd513aa
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/loadsimulator/util/TestTimeStampProvider.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.loadsimulator.util;
+
+import org.junit.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.assertEquals;
+
+public class TestTimeStampProvider {
+
+  @Test
+  public void testReturnSingle() {
+    long startTime = 1411663170112L;
+    int timeStep = 5000;
+    TimeStampProvider tm = new TimeStampProvider(startTime, timeStep, 0);
+
+    long tStamp = tm.next();
+
+    assertEquals("First generated timestamp should match starttime", 
startTime, tStamp);
+  }
+
+  @Test
+  public void testReturnTstampsForSendInterval() throws Exception {
+    long startTime = 0;
+    int collectInterval = 5;
+    int sendInterval = 30;
+    TimeStampProvider tsp = new TimeStampProvider(startTime, collectInterval, 
sendInterval);
+
+    long[] timestamps = tsp.timestampsForNextInterval();
+
+    assertThat(timestamps)
+      .hasSize(6)
+      .containsOnly(0, 5, 10, 15, 20, 25);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
new file mode 100644
index 0000000..96b8a83
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractMiniHBaseClusterTest.java
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import com.google.common.collect.Maps;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.phoenix.hbase.index.write.IndexWriterUtils;
+import org.apache.phoenix.query.BaseTest;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.assertj.core.api.Assertions.assertThat;
+
+public abstract class AbstractMiniHBaseClusterTest extends BaseTest {
+
+  protected static final long BATCH_SIZE = 3;
+
+  @BeforeClass
+  public static void doSetup() throws Exception {
+    Map<String, String> props = getDefaultProps();
+    props.put(QueryServices.QUEUE_SIZE_ATTRIB, Integer.toString(5000));
+    props.put(IndexWriterUtils.HTABLE_THREAD_KEY, Integer.toString(100));
+    // Make a small batch size to test multiple calls to reserve sequences
+    props.put(QueryServices.SEQUENCE_CACHE_SIZE_ATTRIB,
+      Long.toString(BATCH_SIZE));
+    // Must update config before starting server
+    setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
+  }
+
+  @AfterClass
+  public static void doTeardown() throws Exception {
+    dropNonSystemTables();
+  }
+
+  @After
+  public void cleanUpAfterTest() throws Exception {
+    deletePriorTables(HConstants.LATEST_TIMESTAMP, getUrl());
+  }
+
+  public static Map<String, String> getDefaultProps() {
+    Map<String, String> props = new HashMap<String, String>();
+    // Must update config before starting server
+    props.put(QueryServices.STATS_USE_CURRENT_TIME_ATTRIB,
+      Boolean.FALSE.toString());
+    props.put("java.security.krb5.realm", "");
+    props.put("java.security.krb5.kdc", "");
+    return props;
+  }
+
+  protected Connection getConnection(String url) throws SQLException {
+    Properties props = PropertiesUtil.deepCopy(TEST_PROPERTIES);
+    Connection conn = DriverManager.getConnection(getUrl(), props);
+    return conn;
+  }
+
+  /**
+   * A canary test. Will show if the infrastructure is set-up correctly.
+   */
+  @Test
+  public void testClusterOK() throws Exception {
+    Connection conn = getConnection(getUrl());
+    conn.setAutoCommit(true);
+
+    String sampleDDL = "CREATE TABLE TEST_METRICS " +
+      "(TEST_COLUMN VARCHAR " +
+      "CONSTRAINT pk PRIMARY KEY (TEST_COLUMN)) " +
+      "DATA_BLOCK_ENCODING='FAST_DIFF', IMMUTABLE_ROWS=true, " +
+      "TTL=86400, COMPRESSION='NONE' ";
+
+    Statement stmt = conn.createStatement();
+    stmt.executeUpdate(sampleDDL);
+    conn.commit();
+
+    ResultSet rs = stmt.executeQuery(
+      "SELECT COUNT(TEST_COLUMN) FROM TEST_METRICS");
+
+    rs.next();
+    long l = rs.getLong(1);
+    assertThat(l).isGreaterThanOrEqualTo(0);
+
+    stmt.execute("DROP TABLE TEST_METRICS");
+    conn.close();
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractPhoenixConnectionlessTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractPhoenixConnectionlessTest.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractPhoenixConnectionlessTest.java
new file mode 100644
index 0000000..1430478
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/AbstractPhoenixConnectionlessTest.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.apache.phoenix.jdbc.PhoenixEmbeddedDriver;
+import org.apache.phoenix.jdbc.PhoenixTestDriver;
+import org.apache.phoenix.query.BaseTest;
+import org.apache.phoenix.util.PropertiesUtil;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.apache.phoenix.util.TestUtil;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import static org.apache.phoenix.util.PhoenixRuntime.TENANT_ID_ATTRIB;
+import static org.apache.phoenix.util.TestUtil.TEST_PROPERTIES;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+public abstract class AbstractPhoenixConnectionlessTest extends BaseTest {
+
+  protected static String getUrl() {
+    return TestUtil.PHOENIX_CONNECTIONLESS_JDBC_URL;
+  }
+
+  protected static String getUrl(String tenantId) {
+    return getUrl() + ';' + TENANT_ID_ATTRIB + '=' + tenantId;
+  }
+
+  protected static PhoenixTestDriver driver;
+
+  private static void startServer(String url) throws Exception {
+    assertNull(driver);
+    // only load the test driver if we are testing locally - for integration 
tests, we want to
+    // test on a wider scale
+    if (PhoenixEmbeddedDriver.isTestUrl(url)) {
+      driver = initDriver(ReadOnlyProps.EMPTY_PROPS);
+      assertTrue(DriverManager.getDriver(url) == driver);
+      driver.connect(url, PropertiesUtil.deepCopy(TEST_PROPERTIES));
+    }
+  }
+
+  protected static synchronized PhoenixTestDriver initDriver(ReadOnlyProps 
props) throws Exception {
+    if (driver == null) {
+      driver = new PhoenixTestDriver(props);
+      DriverManager.registerDriver(driver);
+    }
+    return driver;
+  }
+
+  private String connUrl;
+
+  @Before
+  public void setup() throws Exception {
+    connUrl = getUrl();
+    startServer(connUrl);
+  }
+
+  @Test
+  public void testStorageSystemInitialized() throws Exception {
+    String sampleDDL = "CREATE TABLE TEST_METRICS (TEST_COLUMN VARCHAR " +
+      "CONSTRAINT pk PRIMARY KEY (TEST_COLUMN)) 
DATA_BLOCK_ENCODING='FAST_DIFF', " +
+      "IMMUTABLE_ROWS=true, TTL=86400, COMPRESSION='SNAPPY'";
+
+    Connection conn = null;
+    PreparedStatement stmt = null;
+    try {
+      conn = DriverManager.getConnection(connUrl);
+      stmt = conn.prepareStatement(sampleDDL);
+      stmt.execute();
+      conn.commit();
+    } finally {
+      if (stmt != null) {
+        stmt.close();
+      }
+      if (conn != null) {
+        conn.close();
+      }
+    }
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    if (driver != null) {
+      try {
+        driver.close();
+      } finally {
+        PhoenixTestDriver phoenixTestDriver = driver;
+        driver = null;
+        DriverManager.deregisterDriver(phoenixTestDriver);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestMetricHostAggregate.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestMetricHostAggregate.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestMetricHostAggregate.java
new file mode 100644
index 0000000..5d8ba96
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestMetricHostAggregate.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.metrics
+  .timeline;
+
+import org.junit.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+public class TestMetricHostAggregate {
+
+  @Test
+  public void testCreateAggregate() throws Exception {
+    // given
+    MetricHostAggregate aggregate = createAggregate(3.0, 1.0, 2.0, 2);
+
+    //then
+    assertThat(aggregate.getSum()).isEqualTo(3.0);
+    assertThat(aggregate.getMin()).isEqualTo(1.0);
+    assertThat(aggregate.getMax()).isEqualTo(2.0);
+    assertThat(aggregate.getAvg()).isEqualTo(3.0 / 2);
+  }
+
+  @Test
+  public void testUpdateAggregates() throws Exception {
+    // given
+    MetricHostAggregate aggregate = createAggregate(3.0, 1.0, 2.0, 2);
+
+    //when
+    aggregate.updateAggregates(createAggregate(8.0, 0.5, 7.5, 2));
+    aggregate.updateAggregates(createAggregate(1.0, 1.0, 1.0, 1));
+
+    //then
+    assertThat(aggregate.getSum()).isEqualTo(12.0);
+    assertThat(aggregate.getMin()).isEqualTo(0.5);
+    assertThat(aggregate.getMax()).isEqualTo(7.5);
+    assertThat(aggregate.getAvg()).isEqualTo((3.0 + 8.0 + 1.0) / 5);
+  }
+
+  private MetricHostAggregate createAggregate
+    (double sum, double min, double max, int samplesCount) {
+    MetricHostAggregate aggregate = new MetricHostAggregate();
+    aggregate.setSum(sum);
+    aggregate.setMax(max);
+    aggregate.setMin(min);
+    aggregate.setDeviation(0.0);
+    aggregate.setNumberOfSamples(samplesCount);
+    return aggregate;
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
new file mode 100644
index 0000000..758f5a9
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestPhoenixTransactSQL.java
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Arrays;
+
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.Condition;
+
+public class TestPhoenixTransactSQL {
+  @Test
+  public void testConditionClause() throws Exception {
+    Condition condition = new Condition(
+      Arrays.asList("cpu_user", "mem_free"), "h1", "a1", "i1",
+        1407959718L, 1407959918L, null, false);
+
+    String preparedClause = condition.getConditionClause();
+    String expectedClause = "METRIC_NAME IN (?, ?) AND HOSTNAME = ? AND " +
+      "APP_ID = ? AND INSTANCE_ID = ? AND SERVER_TIME >= ? AND SERVER_TIME < 
?";
+
+    Assert.assertNotNull(preparedClause);
+    Assert.assertEquals(expectedClause, preparedClause);
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
new file mode 100644
index 0000000..c893314
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/metrics/timeline/TestTimelineMetricStore.java
@@ -0,0 +1,81 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package 
org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline;
+
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetric;
+import org.apache.hadoop.metrics2.sink.timeline.TimelineMetrics;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import java.io.IOException;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+public class TestTimelineMetricStore implements TimelineMetricStore {
+  @Override
+  public TimelineMetrics getTimelineMetrics(List<String> metricNames,
+      String hostname, String applicationId, String instanceId, Long startTime,
+      Long endTime, Integer limit, boolean groupedByHost) throws SQLException,
+    IOException {
+    TimelineMetrics timelineMetrics = new TimelineMetrics();
+    List<TimelineMetric> metricList = new ArrayList<TimelineMetric>();
+    timelineMetrics.setMetrics(metricList);
+    TimelineMetric metric1 = new TimelineMetric();
+    TimelineMetric metric2 = new TimelineMetric();
+    metricList.add(metric1);
+    metricList.add(metric2);
+    metric1.setMetricName("cpu_user");
+    metric1.setAppId("1");
+    metric1.setInstanceId(null);
+    metric1.setHostName("c6401");
+    metric1.setStartTime(1407949812L);
+    metric1.setMetricValues(new HashMap<Long, Double>() {{
+      put(1407949812L, 1.0d);
+      put(1407949912L, 1.8d);
+      put(1407950002L, 0.7d);
+    }});
+
+    metric2.setMetricName("mem_free");
+    metric2.setAppId("2");
+    metric2.setInstanceId("3");
+    metric2.setHostName("c6401");
+    metric2.setStartTime(1407949812L);
+    metric2.setMetricValues(new HashMap<Long, Double>() {{
+      put(1407949812L, 2.5d);
+      put(1407949912L, 3.0d);
+      put(1407950002L, 0.9d);
+    }});
+
+    return timelineMetrics;
+  }
+
+  @Override
+  public TimelineMetric getTimelineMetric(String metricName, String hostname,
+      String applicationId, String instanceId, Long startTime, Long endTime,
+      Integer limit) throws SQLException, IOException {
+
+    return null;
+  }
+
+  @Override
+  public TimelinePutResponse putMetrics(TimelineMetrics metrics)
+      throws SQLException, IOException {
+
+    return new TimelinePutResponse();
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
new file mode 100644
index 0000000..d684a27
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestGenericObjectMapper.java
@@ -0,0 +1,102 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.io.WritableComparator;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class TestGenericObjectMapper {
+
+  @Test
+  public void testEncoding() {
+    testEncoding(Long.MAX_VALUE);
+    testEncoding(Long.MIN_VALUE);
+    testEncoding(0l);
+    testEncoding(128l);
+    testEncoding(256l);
+    testEncoding(512l);
+    testEncoding(-256l);
+  }
+
+  private static void testEncoding(long l) {
+    byte[] b = GenericObjectMapper.writeReverseOrderedLong(l);
+    assertEquals("error decoding", l,
+        GenericObjectMapper.readReverseOrderedLong(b, 0));
+    byte[] buf = new byte[16];
+    System.arraycopy(b, 0, buf, 5, 8);
+    assertEquals("error decoding at offset", l,
+        GenericObjectMapper.readReverseOrderedLong(buf, 5));
+    if (l > Long.MIN_VALUE) {
+      byte[] a = GenericObjectMapper.writeReverseOrderedLong(l-1);
+      assertEquals("error preserving ordering", 1,
+          WritableComparator.compareBytes(a, 0, a.length, b, 0, b.length));
+    }
+    if (l < Long.MAX_VALUE) {
+      byte[] c = GenericObjectMapper.writeReverseOrderedLong(l+1);
+      assertEquals("error preserving ordering", 1,
+          WritableComparator.compareBytes(b, 0, b.length, c, 0, c.length));
+    }
+  }
+
+  private static void verify(Object o) throws IOException {
+    assertEquals(o, GenericObjectMapper.read(GenericObjectMapper.write(o)));
+  }
+
+  @Test
+  public void testValueTypes() throws IOException {
+    verify(Integer.MAX_VALUE);
+    verify(Integer.MIN_VALUE);
+    assertEquals(Integer.MAX_VALUE, GenericObjectMapper.read(
+        GenericObjectMapper.write((long) Integer.MAX_VALUE)));
+    assertEquals(Integer.MIN_VALUE, GenericObjectMapper.read(
+        GenericObjectMapper.write((long) Integer.MIN_VALUE)));
+    verify((long)Integer.MAX_VALUE + 1l);
+    verify((long)Integer.MIN_VALUE - 1l);
+
+    verify(Long.MAX_VALUE);
+    verify(Long.MIN_VALUE);
+
+    assertEquals(42, GenericObjectMapper.read(GenericObjectMapper.write(42l)));
+    verify(42);
+    verify(1.23);
+    verify("abc");
+    verify(true);
+    List<String> list = new ArrayList<String>();
+    list.add("123");
+    list.add("abc");
+    verify(list);
+    Map<String,String> map = new HashMap<String,String>();
+    map.put("k1","v1");
+    map.put("k2","v2");
+    verify(map);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
new file mode 100644
index 0000000..9b27309
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestLeveldbTimelineStore.java
@@ -0,0 +1,253 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.iq80.leveldb.DBIterator;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import static 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.GenericObjectMapper.writeReverseOrderedLong;
+import static org.junit.Assert.assertEquals;
+
+@InterfaceAudience.Private
+@InterfaceStability.Unstable
+public class TestLeveldbTimelineStore extends TimelineStoreTestUtils {
+  private FileContext fsContext;
+  private File fsPath;
+
+  @Before
+  public void setup() throws Exception {
+    fsContext = FileContext.getLocalFSFileContext();
+    Configuration conf = new Configuration();
+    fsPath = new File("target", this.getClass().getSimpleName() +
+        "-tmpDir").getAbsoluteFile();
+    fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
+    conf.set(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH,
+        fsPath.getAbsolutePath());
+    conf.setBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, false);
+    store = new LeveldbTimelineStore();
+    store.init(conf);
+    store.start();
+    loadTestData();
+    loadVerificationData();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    store.stop();
+    fsContext.delete(new Path(fsPath.getAbsolutePath()), true);
+  }
+
+  @Test
+  public void testGetSingleEntity() throws IOException {
+    super.testGetSingleEntity();
+    ((LeveldbTimelineStore)store).clearStartTimeCache();
+    super.testGetSingleEntity();
+    loadTestData();
+  }
+
+  @Test
+  public void testGetEntities() throws IOException {
+    super.testGetEntities();
+  }
+
+  @Test
+  public void testGetEntitiesWithFromId() throws IOException {
+    super.testGetEntitiesWithFromId();
+  }
+
+  @Test
+  public void testGetEntitiesWithFromTs() throws IOException {
+    super.testGetEntitiesWithFromTs();
+  }
+
+  @Test
+  public void testGetEntitiesWithPrimaryFilters() throws IOException {
+    super.testGetEntitiesWithPrimaryFilters();
+  }
+
+  @Test
+  public void testGetEntitiesWithSecondaryFilters() throws IOException {
+    super.testGetEntitiesWithSecondaryFilters();
+  }
+
+  @Test
+  public void testGetEvents() throws IOException {
+    super.testGetEvents();
+  }
+
+  @Test
+  public void testCacheSizes() {
+    Configuration conf = new Configuration();
+    assertEquals(10000, LeveldbTimelineStore.getStartTimeReadCacheSize(conf));
+    assertEquals(10000, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf));
+    conf.setInt(
+        YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_READ_CACHE_SIZE,
+        10001);
+    assertEquals(10001, LeveldbTimelineStore.getStartTimeReadCacheSize(conf));
+    conf = new Configuration();
+    conf.setInt(
+        YarnConfiguration.TIMELINE_SERVICE_LEVELDB_START_TIME_WRITE_CACHE_SIZE,
+        10002);
+    assertEquals(10002, LeveldbTimelineStore.getStartTimeWriteCacheSize(conf));
+  }
+
+  private boolean deleteNextEntity(String entityType, byte[] ts)
+      throws IOException, InterruptedException {
+    DBIterator iterator = null;
+    DBIterator pfIterator = null;
+    try {
+      iterator = ((LeveldbTimelineStore)store).getDbIterator(false);
+      pfIterator = ((LeveldbTimelineStore)store).getDbIterator(false);
+      return ((LeveldbTimelineStore)store).deleteNextEntity(entityType, ts,
+          iterator, pfIterator, false);
+    } finally {
+      IOUtils.cleanup(null, iterator, pfIterator);
+    }
+  }
+
+  @Test
+  public void testGetEntityTypes() throws IOException {
+    List<String> entityTypes = ((LeveldbTimelineStore)store).getEntityTypes();
+    assertEquals(4, entityTypes.size());
+    assertEquals(entityType1, entityTypes.get(0));
+    assertEquals(entityType2, entityTypes.get(1));
+    assertEquals(entityType4, entityTypes.get(2));
+    assertEquals(entityType5, entityTypes.get(3));
+  }
+
+  @Test
+  public void testDeleteEntities() throws IOException, InterruptedException {
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+
+    assertEquals(false, deleteNextEntity(entityType1,
+        writeReverseOrderedLong(122l)));
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+
+    assertEquals(true, deleteNextEntity(entityType1,
+        writeReverseOrderedLong(123l)));
+    List<TimelineEntity> entities = getEntities("type_2");
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId2, entityType2, events2, Collections.singletonMap(
+        entityType1, Collections.singleton(entityId1b)), EMPTY_PRIMARY_FILTERS,
+        EMPTY_MAP, entities.get(0));
+    entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    ((LeveldbTimelineStore)store).discardOldEntities(-123l);
+    assertEquals(1, getEntities("type_1").size());
+    assertEquals(0, getEntities("type_2").size());
+    assertEquals(3, ((LeveldbTimelineStore)store).getEntityTypes().size());
+
+    ((LeveldbTimelineStore)store).discardOldEntities(123l);
+    assertEquals(0, getEntities("type_1").size());
+    assertEquals(0, getEntities("type_2").size());
+    assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());
+    assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+  }
+
+  @Test
+  public void testDeleteEntitiesPrimaryFilters()
+      throws IOException, InterruptedException {
+    Map<String, Set<Object>> primaryFilter =
+        Collections.singletonMap("user", Collections.singleton(
+            (Object) "otheruser"));
+    TimelineEntities atsEntities = new TimelineEntities();
+    atsEntities.setEntities(Collections.singletonList(createEntity(entityId1b,
+        entityType1, 789l, Collections.singletonList(ev2), null, primaryFilter,
+        null)));
+    TimelinePutResponse response = store.put(atsEntities);
+    assertEquals(0, response.getErrors().size());
+
+    NameValuePair pfPair = new NameValuePair("user", "otheruser");
+    List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
+        pfPair);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, Collections.singletonList(ev2),
+        EMPTY_REL_ENTITIES, primaryFilter, EMPTY_MAP, entities.get(0));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", userFilter);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    ((LeveldbTimelineStore)store).discardOldEntities(-123l);
+    assertEquals(1, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
+    assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+
+    ((LeveldbTimelineStore)store).discardOldEntities(123l);
+    assertEquals(0, getEntities("type_1").size());
+    assertEquals(0, getEntities("type_2").size());
+    assertEquals(0, ((LeveldbTimelineStore)store).getEntityTypes().size());
+
+    assertEquals(0, getEntitiesWithPrimaryFilter("type_1", pfPair).size());
+    assertEquals(0, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+  }
+
+  @Test
+  public void testFromTsWithDeletion()
+      throws IOException, InterruptedException {
+    long l = System.currentTimeMillis();
+    assertEquals(2, getEntitiesFromTs("type_1", l).size());
+    assertEquals(1, getEntitiesFromTs("type_2", l).size());
+    assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        l).size());
+    ((LeveldbTimelineStore)store).discardOldEntities(123l);
+    assertEquals(0, getEntitiesFromTs("type_1", l).size());
+    assertEquals(0, getEntitiesFromTs("type_2", l).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        l).size());
+    assertEquals(0, getEntities("type_1").size());
+    assertEquals(0, getEntities("type_2").size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        l).size());
+    loadTestData();
+    assertEquals(0, getEntitiesFromTs("type_1", l).size());
+    assertEquals(0, getEntitiesFromTs("type_2", l).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        l).size());
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+    assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
new file mode 100644
index 0000000..415de53
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TestMemoryTimelineStore.java
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+
+public class TestMemoryTimelineStore extends TimelineStoreTestUtils {
+
+  @Before
+  public void setup() throws Exception {
+    store = new MemoryTimelineStore();
+    store.init(new YarnConfiguration());
+    store.start();
+    loadTestData();
+    loadVerificationData();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    store.stop();
+  }
+
+  public TimelineStore getTimelineStore() {
+    return store;
+  }
+
+  @Test
+  public void testGetSingleEntity() throws IOException {
+    super.testGetSingleEntity();
+  }
+
+  @Test
+  public void testGetEntities() throws IOException {
+    super.testGetEntities();
+  }
+
+  @Test
+  public void testGetEntitiesWithFromId() throws IOException {
+    super.testGetEntitiesWithFromId();
+  }
+
+  @Test
+  public void testGetEntitiesWithFromTs() throws IOException {
+    super.testGetEntitiesWithFromTs();
+  }
+
+  @Test
+  public void testGetEntitiesWithPrimaryFilters() throws IOException {
+    super.testGetEntitiesWithPrimaryFilters();
+  }
+
+  @Test
+  public void testGetEntitiesWithSecondaryFilters() throws IOException {
+    super.testGetEntitiesWithSecondaryFilters();
+  }
+
+  @Test
+  public void testGetEvents() throws IOException {
+    super.testGetEvents();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/c20904e4/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
----------------------------------------------------------------------
diff --git 
a/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
new file mode 100644
index 0000000..d760536
--- /dev/null
+++ 
b/ambari-metrics/ambari-metrics-timelineservice/src/test/java/org/apache/hadoop/yarn/server/applicationhistoryservice/timeline/TimelineStoreTestUtils.java
@@ -0,0 +1,789 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.applicationhistoryservice.timeline;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntities;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEntity;
+import org.apache.hadoop.yarn.api.records.timeline.TimelineEvent;
+import org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse;
+import 
org.apache.hadoop.yarn.api.records.timeline.TimelineEvents.EventsOfOneEntity;
+import 
org.apache.hadoop.yarn.api.records.timeline.TimelinePutResponse.TimelinePutError;
+import 
org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.TimelineReader.Field;
+
+public class TimelineStoreTestUtils {
+
+  protected static final List<TimelineEvent> EMPTY_EVENTS =
+      Collections.emptyList();
+  protected static final Map<String, Object> EMPTY_MAP =
+      Collections.emptyMap();
+  protected static final Map<String, Set<Object>> EMPTY_PRIMARY_FILTERS =
+      Collections.emptyMap();
+  protected static final Map<String, Set<String>> EMPTY_REL_ENTITIES =
+      Collections.emptyMap();
+
+  protected TimelineStore store;
+  protected String entityId1;
+  protected String entityType1;
+  protected String entityId1b;
+  protected String entityId2;
+  protected String entityType2;
+  protected String entityId4;
+  protected String entityType4;
+  protected String entityId5;
+  protected String entityType5;
+  protected Map<String, Set<Object>> primaryFilters;
+  protected Map<String, Object> secondaryFilters;
+  protected Map<String, Object> allFilters;
+  protected Map<String, Object> otherInfo;
+  protected Map<String, Set<String>> relEntityMap;
+  protected Map<String, Set<String>> relEntityMap2;
+  protected NameValuePair userFilter;
+  protected NameValuePair numericFilter1;
+  protected NameValuePair numericFilter2;
+  protected NameValuePair numericFilter3;
+  protected Collection<NameValuePair> goodTestingFilters;
+  protected Collection<NameValuePair> badTestingFilters;
+  protected TimelineEvent ev1;
+  protected TimelineEvent ev2;
+  protected TimelineEvent ev3;
+  protected TimelineEvent ev4;
+  protected Map<String, Object> eventInfo;
+  protected List<TimelineEvent> events1;
+  protected List<TimelineEvent> events2;
+  protected long beforeTs;
+
+  /**
+   * Load test data into the given store
+   */
+  protected void loadTestData() throws IOException {
+    beforeTs = System.currentTimeMillis()-1;
+    TimelineEntities entities = new TimelineEntities();
+    Map<String, Set<Object>> primaryFilters =
+        new HashMap<String, Set<Object>>();
+    Set<Object> l1 = new HashSet<Object>();
+    l1.add("username");
+    Set<Object> l2 = new HashSet<Object>();
+    l2.add((long)Integer.MAX_VALUE);
+    Set<Object> l3 = new HashSet<Object>();
+    l3.add("123abc");
+    Set<Object> l4 = new HashSet<Object>();
+    l4.add((long)Integer.MAX_VALUE + 1l);
+    primaryFilters.put("user", l1);
+    primaryFilters.put("appname", l2);
+    primaryFilters.put("other", l3);
+    primaryFilters.put("long", l4);
+    Map<String, Object> secondaryFilters = new HashMap<String, Object>();
+    secondaryFilters.put("startTime", 123456l);
+    secondaryFilters.put("status", "RUNNING");
+    Map<String, Object> otherInfo1 = new HashMap<String, Object>();
+    otherInfo1.put("info1", "val1");
+    otherInfo1.putAll(secondaryFilters);
+
+    String entityId1 = "id_1";
+    String entityType1 = "type_1";
+    String entityId1b = "id_2";
+    String entityId2 = "id_2";
+    String entityType2 = "type_2";
+    String entityId4 = "id_4";
+    String entityType4 = "type_4";
+    String entityId5 = "id_5";
+    String entityType5 = "type_5";
+
+    Map<String, Set<String>> relatedEntities =
+        new HashMap<String, Set<String>>();
+    relatedEntities.put(entityType2, Collections.singleton(entityId2));
+
+    TimelineEvent ev3 = createEvent(789l, "launch_event", null);
+    TimelineEvent ev4 = createEvent(-123l, "init_event", null);
+    List<TimelineEvent> events = new ArrayList<TimelineEvent>();
+    events.add(ev3);
+    events.add(ev4);
+    entities.setEntities(Collections.singletonList(createEntity(entityId2,
+        entityType2, null, events, null, null, null)));
+    TimelinePutResponse response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    TimelineEvent ev1 = createEvent(123l, "start_event", null);
+    entities.setEntities(Collections.singletonList(createEntity(entityId1,
+        entityType1, 123l, Collections.singletonList(ev1),
+        relatedEntities, primaryFilters, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+    entities.setEntities(Collections.singletonList(createEntity(entityId1b,
+        entityType1, null, Collections.singletonList(ev1), relatedEntities,
+        primaryFilters, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    Map<String, Object> eventInfo = new HashMap<String, Object>();
+    eventInfo.put("event info 1", "val1");
+    TimelineEvent ev2 = createEvent(456l, "end_event", eventInfo);
+    Map<String, Object> otherInfo2 = new HashMap<String, Object>();
+    otherInfo2.put("info2", "val2");
+    entities.setEntities(Collections.singletonList(createEntity(entityId1,
+        entityType1, null, Collections.singletonList(ev2), null,
+        primaryFilters, otherInfo2)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+    entities.setEntities(Collections.singletonList(createEntity(entityId1b,
+        entityType1, 789l, Collections.singletonList(ev2), null,
+        primaryFilters, otherInfo2)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+
+    entities.setEntities(Collections.singletonList(createEntity(
+        "badentityid", "badentity", null, null, null, null, otherInfo1)));
+    response = store.put(entities);
+    assertEquals(1, response.getErrors().size());
+    TimelinePutError error = response.getErrors().get(0);
+    assertEquals("badentityid", error.getEntityId());
+    assertEquals("badentity", error.getEntityType());
+    assertEquals(TimelinePutError.NO_START_TIME, error.getErrorCode());
+
+    relatedEntities.clear();
+    relatedEntities.put(entityType5, Collections.singleton(entityId5));
+    entities.setEntities(Collections.singletonList(createEntity(entityId4,
+        entityType4, 42l, null, relatedEntities, null, null)));
+    response = store.put(entities);
+    assertEquals(0, response.getErrors().size());
+  }
+
+  /**
+   * Load verification data
+   */
+  protected void loadVerificationData() throws Exception {
+    userFilter = new NameValuePair("user", "username");
+    numericFilter1 = new NameValuePair("appname", Integer.MAX_VALUE);
+    numericFilter2 = new NameValuePair("long", (long)Integer.MAX_VALUE + 1l);
+    numericFilter3 = new NameValuePair("other", "123abc");
+    goodTestingFilters = new ArrayList<NameValuePair>();
+    goodTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
+    goodTestingFilters.add(new NameValuePair("status", "RUNNING"));
+    badTestingFilters = new ArrayList<NameValuePair>();
+    badTestingFilters.add(new NameValuePair("appname", Integer.MAX_VALUE));
+    badTestingFilters.add(new NameValuePair("status", "FINISHED"));
+
+    primaryFilters = new HashMap<String, Set<Object>>();
+    Set<Object> l1 = new HashSet<Object>();
+    l1.add("username");
+    Set<Object> l2 = new HashSet<Object>();
+    l2.add(Integer.MAX_VALUE);
+    Set<Object> l3 = new HashSet<Object>();
+    l3.add("123abc");
+    Set<Object> l4 = new HashSet<Object>();
+    l4.add((long)Integer.MAX_VALUE + 1l);
+    primaryFilters.put("user", l1);
+    primaryFilters.put("appname", l2);
+    primaryFilters.put("other", l3);
+    primaryFilters.put("long", l4);
+    secondaryFilters = new HashMap<String, Object>();
+    secondaryFilters.put("startTime", 123456);
+    secondaryFilters.put("status", "RUNNING");
+    allFilters = new HashMap<String, Object>();
+    allFilters.putAll(secondaryFilters);
+    for (Entry<String, Set<Object>> pf : primaryFilters.entrySet()) {
+      for (Object o : pf.getValue()) {
+        allFilters.put(pf.getKey(), o);
+      }
+    }
+    otherInfo = new HashMap<String, Object>();
+    otherInfo.put("info1", "val1");
+    otherInfo.put("info2", "val2");
+    otherInfo.putAll(secondaryFilters);
+
+    entityId1 = "id_1";
+    entityType1 = "type_1";
+    entityId1b = "id_2";
+    entityId2 = "id_2";
+    entityType2 = "type_2";
+    entityId4 = "id_4";
+    entityType4 = "type_4";
+    entityId5 = "id_5";
+    entityType5 = "type_5";
+
+    ev1 = createEvent(123l, "start_event", null);
+
+    eventInfo = new HashMap<String, Object>();
+    eventInfo.put("event info 1", "val1");
+    ev2 = createEvent(456l, "end_event", eventInfo);
+    events1 = new ArrayList<TimelineEvent>();
+    events1.add(ev2);
+    events1.add(ev1);
+
+    relEntityMap =
+        new HashMap<String, Set<String>>();
+    Set<String> ids = new HashSet<String>();
+    ids.add(entityId1);
+    ids.add(entityId1b);
+    relEntityMap.put(entityType1, ids);
+
+    relEntityMap2 =
+        new HashMap<String, Set<String>>();
+    relEntityMap2.put(entityType4, Collections.singleton(entityId4));
+
+    ev3 = createEvent(789l, "launch_event", null);
+    ev4 = createEvent(-123l, "init_event", null);
+    events2 = new ArrayList<TimelineEvent>();
+    events2.add(ev3);
+    events2.add(ev4);
+  }
+
+  public void testGetSingleEntity() throws IOException {
+    // test getting entity info
+    verifyEntityInfo(null, null, null, null, null, null,
+        store.getEntity("id_1", "type_2", EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, 123l, store.getEntity(entityId1,
+        entityType1, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, 123l, store.getEntity(entityId1b,
+        entityType1, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, -123l, store.getEntity(entityId2,
+        entityType2, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId4, entityType4, EMPTY_EVENTS, EMPTY_REL_ENTITIES,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId4,
+        entityType4, EnumSet.allOf(Field.class)));
+
+    verifyEntityInfo(entityId5, entityType5, EMPTY_EVENTS, relEntityMap2,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, 42l, store.getEntity(entityId5,
+        entityType5, EnumSet.allOf(Field.class)));
+
+    // test getting single fields
+    verifyEntityInfo(entityId1, entityType1, events1, null, null, null,
+        store.getEntity(entityId1, entityType1, EnumSet.of(Field.EVENTS)));
+
+    verifyEntityInfo(entityId1, entityType1, Collections.singletonList(ev2),
+        null, null, null, store.getEntity(entityId1, entityType1,
+        EnumSet.of(Field.LAST_EVENT_ONLY)));
+
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, store.getEntity(entityId1b, entityType1,
+        null));
+
+    verifyEntityInfo(entityId1, entityType1, null, null, primaryFilters, null,
+        store.getEntity(entityId1, entityType1,
+            EnumSet.of(Field.PRIMARY_FILTERS)));
+
+    verifyEntityInfo(entityId1, entityType1, null, null, null, otherInfo,
+        store.getEntity(entityId1, entityType1, EnumSet.of(Field.OTHER_INFO)));
+
+    verifyEntityInfo(entityId2, entityType2, null, relEntityMap, null, null,
+        store.getEntity(entityId2, entityType2,
+            EnumSet.of(Field.RELATED_ENTITIES)));
+  }
+
+  protected List<TimelineEntity> getEntities(String entityType)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter) throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromId(String entityType,
+      String fromId) throws IOException {
+    return store.getEntities(entityType, null, null, null, fromId, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromTs(String entityType,
+      long fromTs) throws IOException {
+    return store.getEntities(entityType, null, null, null, null, fromTs,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter, String fromId)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, fromId, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromTsWithPrimaryFilter(
+      String entityType, NameValuePair primaryFilter, long fromTs)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, fromTs,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithWindow(String entityType,
+      Long windowEnd, String fromId) throws IOException {
+    return store.getEntities(entityType, null, null, windowEnd, fromId, null,
+        null, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesFromIdWithPrimaryFilterAndWindow(
+      String entityType, Long windowEnd, String fromId,
+      NameValuePair primaryFilter) throws IOException {
+    return store.getEntities(entityType, null, null, windowEnd, fromId, null,
+        primaryFilter, null, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntitiesWithFilters(String entityType,
+      NameValuePair primaryFilter, Collection<NameValuePair> secondaryFilters)
+      throws IOException {
+    return store.getEntities(entityType, null, null, null, null, null,
+        primaryFilter, secondaryFilters, null).getEntities();
+  }
+
+  protected List<TimelineEntity> getEntities(String entityType, Long limit,
+      Long windowStart, Long windowEnd, NameValuePair primaryFilter,
+      EnumSet<Field> fields) throws IOException {
+    return store.getEntities(entityType, limit, windowStart, windowEnd, null,
+        null, primaryFilter, null, fields).getEntities();
+  }
+
+  public void testGetEntities() throws IOException {
+    // test getting entities
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_0").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_3").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntities("type_6").size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_0", userFilter).size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_3", userFilter).size());
+    assertEquals("nonzero entities size for nonexistent type", 0,
+        getEntitiesWithPrimaryFilter("type_6", userFilter).size());
+
+    List<TimelineEntity> entities = getEntities("type_1");
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntities("type_2");
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
+
+    entities = getEntities("type_1", 1l, null, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", 1l, 0l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", null, 234l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 123l, null, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 234l, 345l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, null, 345l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntities("type_1", null, null, 123l, null,
+        EnumSet.allOf(Field.class));
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+  }
+
+  public void testGetEntitiesWithFromId() throws IOException {
+    List<TimelineEntity> entities = getEntitiesFromId("type_1", entityId1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesFromId("type_1", entityId1b);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntitiesFromIdWithWindow("type_1", 0l, entityId1);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromId("type_2", "a");
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromId("type_2", entityId2);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId2, entityType2, events2, relEntityMap,
+        EMPTY_PRIMARY_FILTERS, EMPTY_MAP, entities.get(0));
+
+    entities = getEntitiesFromIdWithWindow("type_2", -456l, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", -456l, "a");
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", 0l, null);
+    assertEquals(1, entities.size());
+
+    entities = getEntitiesFromIdWithWindow("type_2", 0l, entityId2);
+    assertEquals(1, entities.size());
+
+    // same tests with primary filters
+    entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter,
+        entityId1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesFromIdWithPrimaryFilter("type_1", userFilter,
+        entityId1b);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntitiesFromIdWithPrimaryFilterAndWindow("type_1", 0l,
+        entityId1, userFilter);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesFromIdWithPrimaryFilter("type_2", userFilter, "a");
+    assertEquals(0, entities.size());
+  }
+
+  public void testGetEntitiesWithFromTs() throws IOException {
+    assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size());
+    assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        beforeTs).size());
+    long afterTs = System.currentTimeMillis();
+    assertEquals(2, getEntitiesFromTs("type_1", afterTs).size());
+    assertEquals(1, getEntitiesFromTs("type_2", afterTs).size());
+    assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        afterTs).size());
+    assertEquals(2, getEntities("type_1").size());
+    assertEquals(1, getEntities("type_2").size());
+    assertEquals(2, getEntitiesWithPrimaryFilter("type_1", userFilter).size());
+    // check insert time is not overwritten
+    long beforeTs = this.beforeTs;
+    loadTestData();
+    assertEquals(0, getEntitiesFromTs("type_1", beforeTs).size());
+    assertEquals(0, getEntitiesFromTs("type_2", beforeTs).size());
+    assertEquals(0, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        beforeTs).size());
+    assertEquals(2, getEntitiesFromTs("type_1", afterTs).size());
+    assertEquals(1, getEntitiesFromTs("type_2", afterTs).size());
+    assertEquals(2, getEntitiesFromTsWithPrimaryFilter("type_1", userFilter,
+        afterTs).size());
+  }
+
+  public void testGetEntitiesWithPrimaryFilters() throws IOException {
+    // test using primary filter
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_1",
+            new NameValuePair("none", "none")).size());
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_2",
+            new NameValuePair("none", "none")).size());
+    assertEquals("nonzero entities size for primary filter", 0,
+        getEntitiesWithPrimaryFilter("type_3",
+            new NameValuePair("none", "none")).size());
+
+    List<TimelineEntity> entities = getEntitiesWithPrimaryFilter("type_1",
+        userFilter);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter1);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter2);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_1", numericFilter3);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithPrimaryFilter("type_2", userFilter);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", 1l, null, null, userFilter, null);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", 1l, 0l, null, userFilter, null);
+    assertEquals(1, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+
+    entities = getEntities("type_1", null, 234l, null, userFilter, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, 234l, 345l, userFilter, null);
+    assertEquals(0, entities.size());
+
+    entities = getEntities("type_1", null, null, 345l, userFilter, null);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+  }
+
+  public void testGetEntitiesWithSecondaryFilters() throws IOException {
+    // test using secondary filter
+    List<TimelineEntity> entities = getEntitiesWithFilters("type_1", null,
+        goodTestingFilters);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithFilters("type_1", userFilter, 
goodTestingFilters);
+    assertEquals(2, entities.size());
+    verifyEntityInfo(entityId1, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(0));
+    verifyEntityInfo(entityId1b, entityType1, events1, EMPTY_REL_ENTITIES,
+        primaryFilters, otherInfo, entities.get(1));
+
+    entities = getEntitiesWithFilters("type_1", null,
+        Collections.singleton(new NameValuePair("user", "none")));
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesWithFilters("type_1", null, badTestingFilters);
+    assertEquals(0, entities.size());
+
+    entities = getEntitiesWithFilters("type_1", userFilter, badTestingFilters);
+    assertEquals(0, entities.size());
+  }
+
+  public void testGetEvents() throws IOException {
+    // test getting entity timelines
+    SortedSet<String> sortedSet = new TreeSet<String>();
+    sortedSet.add(entityId1);
+    List<EventsOfOneEntity> timelines =
+        store.getEntityTimelines(entityType1, sortedSet, null, null,
+            null, null).getAllEvents();
+    assertEquals(1, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
+
+    sortedSet.add(entityId1b);
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, 1l,
+        null, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        345l, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        123l, null, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, 345l, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, 123l, null).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev1);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev1);
+
+    timelines = store.getEntityTimelines(entityType1, sortedSet, null,
+        null, null, Collections.singleton("end_event")).getAllEvents();
+    assertEquals(2, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId1, entityType1, ev2);
+    verifyEntityTimeline(timelines.get(1), entityId1b, entityType1, ev2);
+
+    sortedSet.add(entityId2);
+    timelines = store.getEntityTimelines(entityType2, sortedSet, null,
+        null, null, null).getAllEvents();
+    assertEquals(1, timelines.size());
+    verifyEntityTimeline(timelines.get(0), entityId2, entityType2, ev3, ev4);
+  }
+
+  /**
+   * Verify a single entity and its start time
+   */
+  protected static void verifyEntityInfo(String entityId, String entityType,
+      List<TimelineEvent> events, Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters, Map<String, Object> otherInfo,
+      Long startTime, TimelineEntity retrievedEntityInfo) {
+
+    verifyEntityInfo(entityId, entityType, events, relatedEntities,
+        primaryFilters, otherInfo, retrievedEntityInfo);
+    assertEquals(startTime, retrievedEntityInfo.getStartTime());
+  }
+
+  /**
+   * Verify a single entity
+   */
+  protected static void verifyEntityInfo(String entityId, String entityType,
+      List<TimelineEvent> events, Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters, Map<String, Object> otherInfo,
+      TimelineEntity retrievedEntityInfo) {
+    if (entityId == null) {
+      assertNull(retrievedEntityInfo);
+      return;
+    }
+    assertEquals(entityId, retrievedEntityInfo.getEntityId());
+    assertEquals(entityType, retrievedEntityInfo.getEntityType());
+    if (events == null) {
+      assertNull(retrievedEntityInfo.getEvents());
+    } else {
+      assertEquals(events, retrievedEntityInfo.getEvents());
+    }
+    if (relatedEntities == null) {
+      assertNull(retrievedEntityInfo.getRelatedEntities());
+    } else {
+      assertEquals(relatedEntities, retrievedEntityInfo.getRelatedEntities());
+    }
+    if (primaryFilters == null) {
+      assertNull(retrievedEntityInfo.getPrimaryFilters());
+    } else {
+      assertTrue(primaryFilters.equals(
+          retrievedEntityInfo.getPrimaryFilters()));
+    }
+    if (otherInfo == null) {
+      assertNull(retrievedEntityInfo.getOtherInfo());
+    } else {
+      assertTrue(otherInfo.equals(retrievedEntityInfo.getOtherInfo()));
+    }
+  }
+
+  /**
+   * Verify timeline events
+   */
+  private static void verifyEntityTimeline(
+      EventsOfOneEntity retrievedEvents, String entityId, String entityType,
+      TimelineEvent... actualEvents) {
+    assertEquals(entityId, retrievedEvents.getEntityId());
+    assertEquals(entityType, retrievedEvents.getEntityType());
+    assertEquals(actualEvents.length, retrievedEvents.getEvents().size());
+    for (int i = 0; i < actualEvents.length; i++) {
+      assertEquals(actualEvents[i], retrievedEvents.getEvents().get(i));
+    }
+  }
+
+  /**
+   * Create a test entity
+   */
+  protected static TimelineEntity createEntity(String entityId, String 
entityType,
+      Long startTime, List<TimelineEvent> events,
+      Map<String, Set<String>> relatedEntities,
+      Map<String, Set<Object>> primaryFilters,
+      Map<String, Object> otherInfo) {
+    TimelineEntity entity = new TimelineEntity();
+    entity.setEntityId(entityId);
+    entity.setEntityType(entityType);
+    entity.setStartTime(startTime);
+    entity.setEvents(events);
+    if (relatedEntities != null) {
+      for (Entry<String, Set<String>> e : relatedEntities.entrySet()) {
+        for (String v : e.getValue()) {
+          entity.addRelatedEntity(e.getKey(), v);
+        }
+      }
+    } else {
+      entity.setRelatedEntities(null);
+    }
+    entity.setPrimaryFilters(primaryFilters);
+    entity.setOtherInfo(otherInfo);
+    return entity;
+  }
+
+  /**
+   * Create a test event
+   */
+  private static TimelineEvent createEvent(long timestamp, String type, 
Map<String,
+      Object> info) {
+    TimelineEvent event = new TimelineEvent();
+    event.setTimestamp(timestamp);
+    event.setEventType(type);
+    event.setEventInfo(info);
+    return event;
+  }
+
+}

Reply via email to