http://git-wip-us.apache.org/repos/asf/ambari/blob/4a4644b8/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProviderTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProviderTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProviderTest.java
new file mode 100644
index 0000000..982dd4b
--- /dev/null
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/controller/internal/AlertTargetResourceProviderTest.java
@@ -0,0 +1,290 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.controller.internal;
+
+import static org.easymock.EasyMock.capture;
+import static org.easymock.EasyMock.createMock;
+import static org.easymock.EasyMock.createStrictMock;
+import static org.easymock.EasyMock.expect;
+import static org.easymock.EasyMock.expectLastCall;
+import static org.easymock.EasyMock.replay;
+import static org.easymock.EasyMock.resetToStrict;
+import static org.easymock.EasyMock.verify;
+import static org.junit.Assert.assertEquals;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.ambari.server.controller.AmbariManagementController;
+import org.apache.ambari.server.controller.spi.Predicate;
+import org.apache.ambari.server.controller.spi.Request;
+import org.apache.ambari.server.controller.spi.Resource;
+import org.apache.ambari.server.controller.utilities.PredicateBuilder;
+import org.apache.ambari.server.controller.utilities.PropertyHelper;
+import org.apache.ambari.server.metadata.ActionMetadata;
+import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
+import org.apache.ambari.server.orm.dao.AlertDispatchDAO;
+import org.apache.ambari.server.orm.entities.AlertTargetEntity;
+import org.apache.ambari.server.state.Cluster;
+import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.alert.TargetType;
+import org.easymock.Capture;
+import org.easymock.EasyMock;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Binder;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.Module;
+import com.google.inject.util.Modules;
+
+/**
+ * {@link AlertTargetResourceProvider} tests.
+ */
+public class AlertTargetResourceProviderTest {
+
+  private static final Long ALERT_TARGET_ID = Long.valueOf(28);
+  private static final String ALERT_TARGET_NAME = "The Administrators";
+  private static final String ALERT_TARGET_DESC = "Admins and Others";
+  private static final String ALERT_TARGET_TYPE = TargetType.EMAIL.name();
+
+  private AlertDispatchDAO m_dao;
+  private Injector m_injector;
+
+  @Before
+  public void before() {
+    m_dao = createStrictMock(AlertDispatchDAO.class);
+
+    m_injector = Guice.createInjector(Modules.override(
+        new InMemoryDefaultTestModule()).with(new MockModule()));
+
+    AlertTargetResourceProvider.init(m_injector);
+  }
+
+  /**
+   * @throws Exception
+   */
+  @Test
+  public void testGetResourcesNoPredicate() throws Exception {
+    Request request = PropertyHelper.getReadRequest(
+        AlertTargetResourceProvider.ALERT_TARGET_DESCRIPTION,
+        AlertTargetResourceProvider.ALERT_TARGET_ID,
+        AlertTargetResourceProvider.ALERT_TARGET_NAME,
+        AlertTargetResourceProvider.ALERT_TARGET_NOTIFICATION_TYPE);
+
+    expect(m_dao.findAllTargets()).andReturn(getMockEntities());
+    replay(m_dao);
+
+    AmbariManagementController amc = 
createMock(AmbariManagementController.class);
+    AlertTargetResourceProvider provider = createProvider(amc);
+    Set<Resource> results = provider.getResources(request, null);
+
+    assertEquals(1, results.size());
+
+    Resource r = results.iterator().next();
+    Assert.assertEquals(ALERT_TARGET_NAME,
+        r.getPropertyValue(AlertTargetResourceProvider.ALERT_TARGET_NAME));
+
+    verify(m_dao);
+  }
+
+  /**
+   * @throws Exception
+   */
+  @Test
+  public void testGetSingleResource() throws Exception {
+    Request request = PropertyHelper.getReadRequest(
+        AlertTargetResourceProvider.ALERT_TARGET_DESCRIPTION,
+        AlertTargetResourceProvider.ALERT_TARGET_ID,
+        AlertTargetResourceProvider.ALERT_TARGET_NAME,
+        AlertTargetResourceProvider.ALERT_TARGET_NOTIFICATION_TYPE);
+
+    AmbariManagementController amc = 
createMock(AmbariManagementController.class);
+
+    Predicate predicate = new PredicateBuilder().property(
+        AlertTargetResourceProvider.ALERT_TARGET_ID).equals(
+        ALERT_TARGET_ID.toString()).toPredicate();
+
+    expect(m_dao.findTargetById(ALERT_TARGET_ID.longValue())).andReturn(
+        getMockEntities().get(0));
+
+    replay(amc, m_dao);
+
+    AlertTargetResourceProvider provider = createProvider(amc);
+    Set<Resource> results = provider.getResources(request, predicate);
+
+    assertEquals(1, results.size());
+
+    Resource r = results.iterator().next();
+    Assert.assertEquals(ALERT_TARGET_ID,
+        r.getPropertyValue(AlertTargetResourceProvider.ALERT_TARGET_ID));
+
+    Assert.assertEquals(ALERT_TARGET_NAME,
+        r.getPropertyValue(AlertTargetResourceProvider.ALERT_TARGET_NAME));
+
+    verify(amc, m_dao);
+  }
+
+  /**
+   * @throws Exception
+   */
+  @Test
+  public void testCreateResources() throws Exception {
+    AmbariManagementController amc = 
createMock(AmbariManagementController.class);
+    Capture<List<AlertTargetEntity>> listCapture = new 
Capture<List<AlertTargetEntity>>();
+
+    m_dao.createTargets(capture(listCapture));
+    expectLastCall();
+
+    replay(amc, m_dao);
+
+    AlertTargetResourceProvider provider = createProvider(amc);
+    Map<String, Object> requestProps = new HashMap<String, Object>();
+    requestProps.put(AlertTargetResourceProvider.ALERT_TARGET_NAME,
+        ALERT_TARGET_NAME);
+
+    requestProps.put(AlertTargetResourceProvider.ALERT_TARGET_DESCRIPTION,
+        ALERT_TARGET_DESC);
+
+    requestProps.put(
+        AlertTargetResourceProvider.ALERT_TARGET_NOTIFICATION_TYPE,
+        ALERT_TARGET_TYPE);
+
+    Request request = 
PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+    provider.createResources(request);
+
+    Assert.assertTrue(listCapture.hasCaptured());
+    AlertTargetEntity entity = listCapture.getValue().get(0);
+    Assert.assertNotNull(entity);
+
+    Assert.assertEquals(ALERT_TARGET_NAME, entity.getTargetName());
+    Assert.assertEquals(ALERT_TARGET_DESC, entity.getDescription());
+    Assert.assertEquals(ALERT_TARGET_TYPE, entity.getNotificationType());
+
+    verify(amc, m_dao);
+  }
+
+  /**
+   * @throws Exception
+   */
+  @Test
+  public void testUpdateResources() throws Exception {
+  }
+
+  /**
+   * @throws Exception
+   */
+  @Test
+  public void testDeleteResources() throws Exception {
+    AmbariManagementController amc = 
createMock(AmbariManagementController.class);
+    Capture<AlertTargetEntity> entityCapture = new 
Capture<AlertTargetEntity>();
+    Capture<List<AlertTargetEntity>> listCapture = new 
Capture<List<AlertTargetEntity>>();
+
+    m_dao.createTargets(capture(listCapture));
+    expectLastCall();
+
+    replay(amc, m_dao);
+
+    AlertTargetResourceProvider provider = createProvider(amc);
+
+    Map<String, Object> requestProps = new HashMap<String, Object>();
+    requestProps.put(AlertTargetResourceProvider.ALERT_TARGET_NAME,
+        ALERT_TARGET_NAME);
+
+    requestProps.put(AlertTargetResourceProvider.ALERT_TARGET_DESCRIPTION,
+        ALERT_TARGET_DESC);
+
+    requestProps.put(
+        AlertTargetResourceProvider.ALERT_TARGET_NOTIFICATION_TYPE,
+        ALERT_TARGET_TYPE);
+
+    Request request = 
PropertyHelper.getCreateRequest(Collections.singleton(requestProps), null);
+    provider.createResources(request);
+
+    Assert.assertTrue(listCapture.hasCaptured());
+    AlertTargetEntity entity = listCapture.getValue().get(0);
+    Assert.assertNotNull(entity);
+
+    Predicate p = new PredicateBuilder().property(
+        AlertTargetResourceProvider.ALERT_TARGET_ID).equals(
+        ALERT_TARGET_ID.toString()).toPredicate();
+
+    // everything is mocked, there is no DB
+    entity.setTargetId(ALERT_TARGET_ID);
+
+    resetToStrict(m_dao);
+    
expect(m_dao.findTargetById(ALERT_TARGET_ID.longValue())).andReturn(entity).anyTimes();
+    m_dao.remove(capture(entityCapture));
+    expectLastCall();
+    replay(m_dao);
+
+    provider.deleteResources(p);
+
+    AlertTargetEntity entity1 = entityCapture.getValue();
+    Assert.assertEquals(ALERT_TARGET_ID, entity1.getTargetId());
+
+    verify(amc, m_dao);
+  }
+
+  /**
+   * @param amc
+   * @return
+   */
+  private AlertTargetResourceProvider createProvider(
+      AmbariManagementController amc) {
+    return new AlertTargetResourceProvider(
+        PropertyHelper.getPropertyIds(Resource.Type.AlertTarget),
+        PropertyHelper.getKeyPropertyIds(Resource.Type.AlertTarget), amc);
+  }
+
+  /**
+   * @return
+   */
+  private List<AlertTargetEntity> getMockEntities() throws Exception {
+    AlertTargetEntity entity = new AlertTargetEntity();
+    entity.setTargetId(ALERT_TARGET_ID);
+    entity.setDescription(ALERT_TARGET_DESC);
+    entity.setTargetName(ALERT_TARGET_NAME);
+    entity.setNotificationType(TargetType.EMAIL.name());
+    return Arrays.asList(entity);
+  }
+
+  /**
+  *
+  */
+  private class MockModule implements Module {
+    /**
+    *
+    */
+    @Override
+    public void configure(Binder binder) {
+      binder.bind(AlertDispatchDAO.class).toInstance(m_dao);
+      binder.bind(Clusters.class).toInstance(
+          EasyMock.createNiceMock(Clusters.class));
+      binder.bind(Cluster.class).toInstance(
+          EasyMock.createNiceMock(Cluster.class));
+      binder.bind(ActionMetadata.class);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/4a4644b8/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
index c0bdaa2..d7c8c54 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostTest.java
@@ -105,12 +105,12 @@ public class ServiceComponentHostTest {
     metaInfo.init();
     clusters.mapHostToCluster("h1","C1");
   }
-  
+
   private void setOsFamily(Host host, String osFamily, String osVersion) {
     Map<String, String> hostAttributes = new HashMap<String, String>(2);
     hostAttributes.put("os_family", osFamily);
     hostAttributes.put("os_release_version", osVersion);
-    
+
     host.setHostAttributes(hostAttributes);
   }
 
@@ -118,15 +118,16 @@ public class ServiceComponentHostTest {
   public void teardown() {
     injector.getInstance(PersistService.class).stop();
   }
-  
+
   private ServiceComponentHost createNewServiceComponentHost(
       String svc,
       String svcComponent,
       String hostName, boolean isClient) throws AmbariException{
     Cluster c = clusters.getCluster("C1");
-    
+
     return createNewServiceComponentHost(c, svc, svcComponent, hostName);
   }
+
   private ServiceComponentHost createNewServiceComponentHost(
       Cluster c,
       String svc,
@@ -156,18 +157,22 @@ public class ServiceComponentHostTest {
 
     ServiceComponentHost impl = serviceComponentHostFactory.createNew(
         sc, hostName);
+
     impl.persist();
-    Assert.assertEquals(State.INIT,
-        impl.getState());
-    Assert.assertEquals(State.INIT,
-        impl.getDesiredState());
+
+    Assert.assertEquals(State.INIT, impl.getState());
+    Assert.assertEquals(State.INIT, impl.getDesiredState());
     Assert.assertEquals(c.getClusterName(), impl.getClusterName());
     Assert.assertEquals(c.getClusterId(), impl.getClusterId());
     Assert.assertEquals(s.getName(), impl.getServiceName());
     Assert.assertEquals(sc.getName(), impl.getServiceComponentName());
     Assert.assertEquals(hostName, impl.getHostName());
+
+    Assert.assertNotNull(c.getServiceComponentHosts(hostName));
+
     Assert.assertFalse(
         impl.getDesiredStackVersion().getStackId().isEmpty());
+
     Assert.assertTrue(impl.getStackVersion().getStackId().isEmpty());
 
     return impl;
@@ -264,7 +269,7 @@ public class ServiceComponentHostTest {
 
     ServiceComponentHostEvent installEvent2 = createEvent(impl, ++timestamp,
         startEventType);
-   
+
     boolean exceptionThrown = false;
     LOG.info("Transitioning from " + impl.getState() + " " + 
installEvent2.getType());
     try {
@@ -503,7 +508,7 @@ public class ServiceComponentHostTest {
     Assert.assertEquals("HDP-1.1.0",
         sch.getDesiredStackVersion().getStackId());
   }
-  
+
   @Test
   public void testActualConfigs() throws Exception {
     ServiceComponentHost sch =
@@ -520,25 +525,25 @@ public class ServiceComponentHostTest {
 
     configGroup.persist();
     cluster.addConfigGroup(configGroup);
-    
+
     Map<String, Map<String,String>> actual =
         new HashMap<String, Map<String, String>>() {{
           put("global", new HashMap<String,String>() {{ put("tag", 
"version1"); }});
           put("core-site", new HashMap<String,String>() {{ put("tag", 
"version1");
             put(configGroup.getId().toString(), "version2"); }});
         }};
-        
+
     sch.updateActualConfigs(actual);
-    
+
     Map<String, HostConfig> confirm = sch.getActualConfigs();
-    
+
     Assert.assertEquals(2, confirm.size());
     Assert.assertTrue(confirm.containsKey("global"));
     Assert.assertTrue(confirm.containsKey("core-site"));
     Assert.assertEquals(1, 
confirm.get("core-site").getConfigGroupOverrides().size());
 
   }
-  
+
   @Test
   public void testConvertToResponse() throws AmbariException {
     ServiceComponentHost sch =
@@ -555,7 +560,7 @@ public class ServiceComponentHostTest {
     Assert.assertEquals(State.INSTALLED.toString(), r.getDesiredState());
     Assert.assertEquals(State.INSTALLING.toString(), r.getLiveState());
     Assert.assertEquals("HDP-1.0.0", r.getStackVersion());
-    
+
     Assert.assertFalse(r.isStaleConfig());
 
     // TODO check configs
@@ -615,62 +620,61 @@ public class ServiceComponentHostTest {
         impl.getState());
   }
 
-   @Test
-   public void TestDisableInVariousStates() throws AmbariException,
-           InvalidStateTransitionException {
-       ServiceComponentHost sch =
-               createNewServiceComponentHost("HDFS", "DATANODE", "h1", false);
-       ServiceComponentHostImpl impl =  (ServiceComponentHostImpl) sch;
-
-       // Test valid states in which host component can be disabled
-       long timestamp = 0;
-       HashSet<State> validStates = new HashSet<State>();
-       validStates.add(State.INSTALLED);
-       validStates.add(State.INSTALL_FAILED);
-       validStates.add(State.UNKNOWN);
-       validStates.add(State.DISABLED);
-       for (State state : validStates)
-       {
-         sch.setState(state);
-         ServiceComponentHostEvent disableEvent = createEvent(
-                   impl, ++timestamp, 
ServiceComponentHostEventType.HOST_SVCCOMP_DISABLE);
-         impl.handleEvent(disableEvent);
-         // TODO: At present operation timestamps are not getting updated.
-         Assert.assertEquals(-1, impl.getLastOpStartTime());
-         Assert.assertEquals(-1, impl.getLastOpLastUpdateTime());
-         Assert.assertEquals(-1, impl.getLastOpEndTime());
-         Assert.assertEquals(State.DISABLED, impl.getState());
-       }
-
-       // Test invalid states in which host component cannot be disabled
-       HashSet<State> invalidStates = new HashSet<State>();
-       invalidStates.add(State.INIT);
-       invalidStates.add(State.INSTALLING);
-       invalidStates.add(State.STARTING);
-       invalidStates.add(State.STARTED);
-       invalidStates.add(State.STOPPING);
-       invalidStates.add(State.UNINSTALLING);
-       invalidStates.add(State.UNINSTALLED);
-       invalidStates.add(State.UPGRADING);
-
-       for(State state : invalidStates)
-       {
-           sch.setState(state);
-           ServiceComponentHostEvent disableEvent = createEvent(
-                   impl, ++timestamp, 
ServiceComponentHostEventType.HOST_SVCCOMP_DISABLE);
-           boolean exceptionThrown = false;
-           try {
-               impl.handleEvent(disableEvent);
-           } catch (Exception e) {
-               exceptionThrown = true;
-           }
-           Assert.assertTrue("Exception not thrown on invalid event", 
exceptionThrown);
-           // TODO: At present operation timestamps are not getting updated.
-           Assert.assertEquals(-1, impl.getLastOpStartTime());
-           Assert.assertEquals(-1, impl.getLastOpLastUpdateTime());
-           Assert.assertEquals(-1, impl.getLastOpEndTime());
-       }
-   }
+  @Test
+  public void testDisableInVariousStates() throws AmbariException,
+      InvalidStateTransitionException {
+    ServiceComponentHost sch = createNewServiceComponentHost("HDFS",
+        "DATANODE", "h1", false);
+    ServiceComponentHostImpl impl = (ServiceComponentHostImpl) sch;
+
+    // Test valid states in which host component can be disabled
+    long timestamp = 0;
+    HashSet<State> validStates = new HashSet<State>();
+    validStates.add(State.INSTALLED);
+    validStates.add(State.INSTALL_FAILED);
+    validStates.add(State.UNKNOWN);
+    validStates.add(State.DISABLED);
+    for (State state : validStates) {
+      sch.setState(state);
+      ServiceComponentHostEvent disableEvent = createEvent(impl, ++timestamp,
+          ServiceComponentHostEventType.HOST_SVCCOMP_DISABLE);
+      impl.handleEvent(disableEvent);
+      // TODO: At present operation timestamps are not getting updated.
+      Assert.assertEquals(-1, impl.getLastOpStartTime());
+      Assert.assertEquals(-1, impl.getLastOpLastUpdateTime());
+      Assert.assertEquals(-1, impl.getLastOpEndTime());
+      Assert.assertEquals(State.DISABLED, impl.getState());
+    }
+
+    // Test invalid states in which host component cannot be disabled
+    HashSet<State> invalidStates = new HashSet<State>();
+    invalidStates.add(State.INIT);
+    invalidStates.add(State.INSTALLING);
+    invalidStates.add(State.STARTING);
+    invalidStates.add(State.STARTED);
+    invalidStates.add(State.STOPPING);
+    invalidStates.add(State.UNINSTALLING);
+    invalidStates.add(State.UNINSTALLED);
+    invalidStates.add(State.UPGRADING);
+
+    for (State state : invalidStates) {
+      sch.setState(state);
+      ServiceComponentHostEvent disableEvent = createEvent(impl, ++timestamp,
+          ServiceComponentHostEventType.HOST_SVCCOMP_DISABLE);
+      boolean exceptionThrown = false;
+      try {
+        impl.handleEvent(disableEvent);
+      } catch (Exception e) {
+        exceptionThrown = true;
+      }
+      Assert.assertTrue("Exception not thrown on invalid event",
+          exceptionThrown);
+      // TODO: At present operation timestamps are not getting updated.
+      Assert.assertEquals(-1, impl.getLastOpStartTime());
+      Assert.assertEquals(-1, impl.getLastOpLastUpdateTime());
+      Assert.assertEquals(-1, impl.getLastOpEndTime());
+    }
+  }
 
   @Test
   public void testCanBeRemoved() throws Exception{
@@ -688,13 +692,13 @@ public class ServiceComponentHostTest {
       }
     }
   }
-  
+
   @Test
   public void testStaleConfigs() throws Exception {
     String stackVersion="HDP-2.0.6";
     String clusterName = "c2";
     String hostName = "h3";
-    
+
     clusters.addCluster(clusterName);
     clusters.addHost(hostName);
     setOsFamily(clusters.getHost(hostName), "redhat", "5.9");
@@ -702,14 +706,14 @@ public class ServiceComponentHostTest {
     clusters.getCluster(clusterName).setDesiredStackVersion(
         new StackId(stackVersion));
     metaInfo.init();
-    clusters.mapHostToCluster(hostName, clusterName);    
-    
+    clusters.mapHostToCluster(hostName, clusterName);
+
     Cluster cluster = clusters.getCluster(clusterName);
-    
+
     ServiceComponentHost sch1 = createNewServiceComponentHost(cluster, "HDFS", 
"NAMENODE", hostName);
     ServiceComponentHost sch2 = createNewServiceComponentHost(cluster, "HDFS", 
"DATANODE", hostName);
     ServiceComponentHost sch3 = createNewServiceComponentHost(cluster, 
"MAPREDUCE2", "HISTORYSERVER", hostName);
-    
+
     sch1.setDesiredState(State.INSTALLED);
     sch1.setState(State.INSTALLING);
     sch1.setStackVersion(new StackId(stackVersion));
@@ -717,10 +721,10 @@ public class ServiceComponentHostTest {
     sch2.setDesiredState(State.INSTALLED);
     sch2.setState(State.INSTALLING);
     sch2.setStackVersion(new StackId(stackVersion));
-    
+
     sch3.setDesiredState(State.INSTALLED);
     sch3.setState(State.INSTALLING);
-    sch3.setStackVersion(new StackId(stackVersion));    
+    sch3.setStackVersion(new StackId(stackVersion));
 
     Assert.assertFalse(sch1.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch2.convertToResponse().isStaleConfig());
@@ -735,7 +739,7 @@ public class ServiceComponentHostTest {
     Map<String, Map<String, String>> actual = new HashMap<String, Map<String, 
String>>() {{
       put("global", new HashMap<String,String>() {{ put("tag", "version1"); 
}});
     }};
-    
+
     sch1.updateActualConfigs(actual);
     sch2.updateActualConfigs(actual);
     sch3.updateActualConfigs(actual);
@@ -746,7 +750,7 @@ public class ServiceComponentHostTest {
     // HDP-x/HDFS does not define type 'foo', so changes do not count to stale
     Assert.assertFalse(sch1.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch2.convertToResponse().isStaleConfig());
-    
+
     makeConfig(cluster, "hdfs-site", "version1",
         new HashMap<String,String>() {{ put("a", "b"); }}, new HashMap<String, 
Map<String,String>>());
 
@@ -755,7 +759,7 @@ public class ServiceComponentHostTest {
     Assert.assertTrue(sch2.convertToResponse().isStaleConfig());
 
     actual.put("hdfs-site", new HashMap<String, String>() {{ put ("tag", 
"version1"); }});
-    
+
     sch1.updateActualConfigs(actual);
     // previous value from cache
     Assert.assertTrue(sch1.convertToResponse().isStaleConfig());
@@ -765,7 +769,7 @@ public class ServiceComponentHostTest {
     // HDP-x/HDFS/hdfs-site up to date, only for sch1
     Assert.assertFalse(sch1.convertToResponse().isStaleConfig());
     Assert.assertTrue(sch2.convertToResponse().isStaleConfig());
-    
+
     sch2.updateActualConfigs(actual);
     // previous value from cache
     Assert.assertTrue(sch2.convertToResponse().isStaleConfig());
@@ -775,15 +779,15 @@ public class ServiceComponentHostTest {
     // HDP-x/HDFS/hdfs-site up to date for both
     Assert.assertFalse(sch1.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch2.convertToResponse().isStaleConfig());
-    
+
     makeConfig(cluster, "hdfs-site", "version2",
-        new HashMap<String, String>() {{ put("dfs.journalnode.http-address", 
"http://foo";); }}, 
+        new HashMap<String, String>() {{ put("dfs.journalnode.http-address", 
"http://foo";); }},
         new HashMap<String, Map<String,String>>());
 
     // HDP-x/HDFS/hdfs-site updated to changed property
     Assert.assertTrue(sch1.convertToResponse().isStaleConfig());
     Assert.assertTrue(sch2.convertToResponse().isStaleConfig());
-    
+
     actual.get("hdfs-site").put("tag", "version2");
     sch1.updateActualConfigs(actual);
     sch2.updateActualConfigs(actual);
@@ -794,13 +798,13 @@ public class ServiceComponentHostTest {
     // HDP-x/HDFS/hdfs-site updated to changed property
     Assert.assertFalse(sch1.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch2.convertToResponse().isStaleConfig());
-    
+
     // make a host override
     final Host host = 
clusters.getHostsForCluster(cluster.getClusterName()).get(hostName);
     Assert.assertNotNull(host);
-    
+
     final Config c = configFactory.createNew(cluster, "hdfs-site",
-        new HashMap<String, String>() {{ put("dfs.journalnode.http-address", 
"http://goo";); }}, 
+        new HashMap<String, String>() {{ put("dfs.journalnode.http-address", 
"http://goo";); }},
         new HashMap<String, Map<String,String>>());
     c.setTag("version3");
     c.persist();
@@ -811,11 +815,11 @@ public class ServiceComponentHostTest {
       new HashMap<String, Host>() {{ put("h3", host); }});
     configGroup.persist();
     cluster.addConfigGroup(configGroup);
-    
+
     // HDP-x/HDFS/hdfs-site updated host to changed property
     Assert.assertTrue(sch1.convertToResponse().isStaleConfig());
     Assert.assertTrue(sch2.convertToResponse().isStaleConfig());
-    
+
     actual.get("hdfs-site").put(configGroup.getId().toString(), "version3");
     sch2.updateActualConfigs(actual);
     // previous value from cache
@@ -826,7 +830,7 @@ public class ServiceComponentHostTest {
     // HDP-x/HDFS/hdfs-site updated host to changed property
     Assert.assertTrue(sch1.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch2.convertToResponse().isStaleConfig());
-    
+
     sch1.updateActualConfigs(actual);
     // previous value from cache
     Assert.assertTrue(sch1.convertToResponse().isStaleConfig());
@@ -836,7 +840,7 @@ public class ServiceComponentHostTest {
     // HDP-x/HDFS/hdfs-site updated host to changed property
     Assert.assertFalse(sch1.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch2.convertToResponse().isStaleConfig());
-    
+
     // change 'global' property only affecting global/HDFS
     makeConfig(cluster, "global", "version2",
       new HashMap<String,String>() {{
@@ -844,7 +848,7 @@ public class ServiceComponentHostTest {
         put("dfs_namenode_name_dir", "/foo3"); // HDFS only
         put("mapred_log_dir_prefix", "/foo2"); // MR2 only
       }}, new HashMap<String, Map<String,String>>());
-    
+
     Assert.assertTrue(sch1.convertToResponse().isStaleConfig());
     Assert.assertTrue(sch2.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch3.convertToResponse().isStaleConfig());
@@ -867,7 +871,7 @@ public class ServiceComponentHostTest {
     sch1.updateActualConfigs(actual);
 
     final Config c1 = configFactory.createNew(cluster, "core-site",
-      new HashMap<String, String>() {{ put("fs.trash.interval", "400"); }}, 
+      new HashMap<String, String>() {{ put("fs.trash.interval", "400"); }},
       new HashMap<String, Map<String,String>>());
     c1.setTag("version2");
     c1.persist();
@@ -912,13 +916,13 @@ public class ServiceComponentHostTest {
     sch3.setRestartRequired(false);
     Assert.assertFalse(sch3.convertToResponse().isStaleConfig());
   }
-  
+
   @Test
   public void testStaleConfigsAttributes() throws Exception {
     String stackVersion="HDP-2.0.6";
     String clusterName = "c2";
     String hostName = "h3";
-    
+
     clusters.addCluster(clusterName);
     clusters.addHost(hostName);
     setOsFamily(clusters.getHost(hostName), "redhat", "5.9");
@@ -926,14 +930,14 @@ public class ServiceComponentHostTest {
     clusters.getCluster(clusterName).setDesiredStackVersion(
         new StackId(stackVersion));
     metaInfo.init();
-    clusters.mapHostToCluster(hostName, clusterName);    
-    
+    clusters.mapHostToCluster(hostName, clusterName);
+
     Cluster cluster = clusters.getCluster(clusterName);
-    
+
     ServiceComponentHost sch1 = createNewServiceComponentHost(cluster, "HDFS", 
"NAMENODE", hostName);
     ServiceComponentHost sch2 = createNewServiceComponentHost(cluster, "HDFS", 
"DATANODE", hostName);
     ServiceComponentHost sch3 = createNewServiceComponentHost(cluster, 
"MAPREDUCE2", "HISTORYSERVER", hostName);
-    
+
     sch1.setDesiredState(State.INSTALLED);
     sch1.setState(State.INSTALLING);
     sch1.setStackVersion(new StackId(stackVersion));
@@ -941,10 +945,10 @@ public class ServiceComponentHostTest {
     sch2.setDesiredState(State.INSTALLED);
     sch2.setState(State.INSTALLING);
     sch2.setStackVersion(new StackId(stackVersion));
-    
+
     sch3.setDesiredState(State.INSTALLED);
     sch3.setState(State.INSTALLING);
-    sch3.setStackVersion(new StackId(stackVersion));    
+    sch3.setStackVersion(new StackId(stackVersion));
 
     Assert.assertFalse(sch1.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch2.convertToResponse().isStaleConfig());
@@ -963,7 +967,7 @@ public class ServiceComponentHostTest {
       put("global", new HashMap<String,String>() {{ put("tag", "version1"); 
}});
       put("hdfs-site", new HashMap<String,String>() {{ put("tag", "version1"); 
}});
     }};
-    
+
     sch1.updateActualConfigs(actual);
     sch2.updateActualConfigs(actual);
     sch3.updateActualConfigs(actual);
@@ -972,7 +976,7 @@ public class ServiceComponentHostTest {
       new HashMap<String,String>() {{ put("a", "c"); }},new HashMap<String, 
Map<String,String>>(){{
        put("final", new HashMap<String, String>(){{
          put("a", "true");
-       }}); 
+       }});
       }});
     // HDP-x/HDFS does not define type 'foo', so changes do not count to stale
     Assert.assertFalse(sch1.convertToResponse().isStaleConfig());
@@ -985,7 +989,7 @@ public class ServiceComponentHostTest {
     sch3.setRestartRequired(false);
     sch3.updateActualConfigs(actual);
     Assert.assertFalse(sch3.convertToResponse().isStaleConfig());
-    
+
     // Now add config-attributes
     Map<String, Map<String, String>> c1PropAttributes = new HashMap<String, 
Map<String,String>>();
     c1PropAttributes.put("final", new HashMap<String, String>());
@@ -1000,7 +1004,7 @@ public class ServiceComponentHostTest {
     Assert.assertTrue(sch1.convertToResponse().isStaleConfig());
     Assert.assertTrue(sch2.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch3.convertToResponse().isStaleConfig());
-    
+
     // Now change config-attributes
     Map<String, Map<String, String>> c2PropAttributes = new HashMap<String, 
Map<String,String>>();
     c2PropAttributes.put("final", new HashMap<String, String>());
@@ -1015,7 +1019,7 @@ public class ServiceComponentHostTest {
     Assert.assertTrue(sch1.convertToResponse().isStaleConfig());
     Assert.assertTrue(sch2.convertToResponse().isStaleConfig());
     Assert.assertFalse(sch3.convertToResponse().isStaleConfig());
-    
+
     // Now change config-attributes
     makeConfig(cluster, "hdfs-site", "version4",
         new HashMap<String,String>() {{
@@ -1043,13 +1047,13 @@ public class ServiceComponentHostTest {
     cluster.addConfig(config);
     cluster.addDesiredConfig("user", config);
   }
-  
+
   @Test
   public void testMaintenance() throws Exception {
     String stackVersion="HDP-2.0.6";
     String clusterName = "c2";
     String hostName = "h3";
-    
+
     clusters.addCluster(clusterName);
     clusters.addHost(hostName);
     setOsFamily(clusters.getHost(hostName), "redhat", "5.9");
@@ -1057,10 +1061,10 @@ public class ServiceComponentHostTest {
     clusters.getCluster(clusterName).setDesiredStackVersion(
         new StackId(stackVersion));
     metaInfo.init();
-    clusters.mapHostToCluster(hostName, clusterName);    
-    
+    clusters.mapHostToCluster(hostName, clusterName);
+
     Cluster cluster = clusters.getCluster(clusterName);
-    
+
     ServiceComponentHost sch1 = createNewServiceComponentHost(cluster, "HDFS", 
"NAMENODE", hostName);
     ServiceComponentHost sch2 = createNewServiceComponentHost(cluster, "HDFS", 
"DATANODE", hostName);
     ServiceComponentHost sch3 = createNewServiceComponentHost(cluster, 
"MAPREDUCE2", "HISTORYSERVER", hostName);
@@ -1070,20 +1074,20 @@ public class ServiceComponentHostTest {
     pk.setComponentName(sch1.getServiceComponentName());
     pk.setServiceName(sch1.getServiceName());
     pk.setHostName(hostName);
-    
+
     HostComponentDesiredStateDAO dao = 
injector.getInstance(HostComponentDesiredStateDAO.class);
     HostComponentDesiredStateEntity entity = dao.findByPK(pk);
     Assert.assertEquals(MaintenanceState.OFF, entity.getMaintenanceState());
     Assert.assertEquals(MaintenanceState.OFF, sch1.getMaintenanceState());
-    
+
     sch1.setMaintenanceState(MaintenanceState.ON);
     Assert.assertEquals(MaintenanceState.ON, sch1.getMaintenanceState());
-    
+
     entity = dao.findByPK(pk);
     Assert.assertEquals(MaintenanceState.ON, entity.getMaintenanceState());
 
   }
 
-  
-  
+
+
 }

Reply via email to