Repository: ambari
Updated Branches:
  refs/heads/branch-1.7.0 5fbaece3d -> df0f496a7


AMBARI-7442 ServiceCheck cannot be run if there is only one stack definition 
(adenisso via jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/df0f496a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/df0f496a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/df0f496a

Branch: refs/heads/branch-1.7.0
Commit: df0f496a71e5cc47d0e550eb027d5e3c0191d768
Parents: 5fbaece
Author: Jun Aoki <ja...@apache.org>
Authored: Mon Oct 6 11:52:59 2014 -0700
Committer: Jun Aoki <ja...@apache.org>
Committed: Mon Oct 6 11:52:59 2014 -0700

----------------------------------------------------------------------
 .../server/api/util/StackExtensionHelper.java   |  14 +-
 .../api/util/StackExtensionHelperTest.java      |  27 +++
 .../single_stack/ABC/1.0.0/metainfo.xml         |  22 +++
 .../ABC/1.0.0/services/HDFS/metainfo.xml        | 193 +++++++++++++++++++
 4 files changed, 251 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/df0f496a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
index 1c3f9a7..35ae9a7 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/api/util/StackExtensionHelper.java
@@ -490,11 +490,6 @@ public class StackExtensionHelper {
           }
         }
         
-        // add action for service check
-        if(serviceInfo.getCommandScript() != null) {
-          actionMetadata.addServiceCheckAction(serviceInfo.getName());
-        }
-        
       }
       parentStack = currentStackInfo;
     }
@@ -536,6 +531,7 @@ public class StackExtensionHelper {
       ParserConfigurationException, SAXException,
       XPathExpressionException, IOException, JAXBException {
     List<ServiceInfo> services = new ArrayList<ServiceInfo>();
+
     File servicesFolder = new File(stackRoot.getAbsolutePath() + File
       .separator + stackInfo.getName() + File.separator + 
stackInfo.getVersion()
       + File.separator + AmbariMetaInfo.SERVICES_FOLDER_NAME);
@@ -602,6 +598,14 @@ public class StackExtensionHelper {
     }
 
     stackInfo.getServices().addAll(services);
+
+    // add service check actions from the target stack
+    for(ServiceInfo serviceInfo : stackInfo.getServices()) {
+      if(serviceInfo.getCommandScript() != null) {
+        actionMetadata.addServiceCheckAction(serviceInfo.getName());
+      }
+    }
+
   }
 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/df0f496a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
index 7262dfb..84ba6aa 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/api/util/StackExtensionHelperTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.server.api.util;
 import com.google.inject.AbstractModule;
 import com.google.inject.Guice;
 import com.google.inject.Injector;
+
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.metadata.ActionMetadata;
 import org.apache.ambari.server.state.*;
@@ -32,6 +33,7 @@ import javax.xml.bind.JAXBException;
 import javax.xml.namespace.QName;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.xpath.XPathExpressionException;
+
 import java.io.File;
 import java.io.IOException;
 import java.util.*;
@@ -353,6 +355,31 @@ public class StackExtensionHelperTest {
     Map<String, String> supports = config.get(keyword);
     supports.put(attributeName, value);    
   }
+
+  /**
+   * This test ensures the service status check is added into the action 
metadata when
+   * the stack has no parent and is the only stack in the stack family
+   * @throws Exception
+   */
+  @Test
+  public void testGetServiceInfoFromSingleStack() throws Exception {
+    File stackRoot = new File("./src/test/resources/single_stack".replace("/", 
File.separator));
+    StackExtensionHelper helper = new StackExtensionHelper(injector, 
stackRoot);
+    helper.fillInfo();
+    List<StackInfo> stackInfoList = helper.getAllAvailableStacks();
+    assertEquals(1, stackInfoList.size());
+
+    List<ServiceInfo> serviceInfoList = 
helper.getAllApplicableServices(stackInfoList.get(0));
+    for(ServiceInfo serviceInfo: serviceInfoList) {
+      if ("HDFS".equalsIgnoreCase(serviceInfo.getName())) {
+        ActionMetadata actionMetadata = 
injector.getInstance(ActionMetadata.class);
+        String hdfsStatusCheckCmd = 
actionMetadata.getServiceCheckAction("HDFS");
+        assertEquals("HDFS_SERVICE_CHECK", hdfsStatusCheckCmd);
+        break;
+      }
+    }
+  }
+
   @Test
   public void testPopulateConfigTypes() throws XPathExpressionException, 
ParserConfigurationException, SAXException, IOException, JAXBException {
     StackExtensionHelper helper = getStackExtensionHelper();

http://git-wip-us.apache.org/repos/asf/ambari/blob/df0f496a/ambari-server/src/test/resources/single_stack/ABC/1.0.0/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/single_stack/ABC/1.0.0/metainfo.xml 
b/ambari-server/src/test/resources/single_stack/ABC/1.0.0/metainfo.xml
new file mode 100644
index 0000000..c00cf4d
--- /dev/null
+++ b/ambari-server/src/test/resources/single_stack/ABC/1.0.0/metainfo.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+    <versions>
+         <active>true</active>
+    </versions>
+</metainfo>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/df0f496a/ambari-server/src/test/resources/single_stack/ABC/1.0.0/services/HDFS/metainfo.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/single_stack/ABC/1.0.0/services/HDFS/metainfo.xml
 
b/ambari-server/src/test/resources/single_stack/ABC/1.0.0/services/HDFS/metainfo.xml
new file mode 100644
index 0000000..c4aa8fb
--- /dev/null
+++ 
b/ambari-server/src/test/resources/single_stack/ABC/1.0.0/services/HDFS/metainfo.xml
@@ -0,0 +1,193 @@
+<?xml version="1.0"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<metainfo>
+  <schemaVersion>2.0</schemaVersion>
+  <services>
+    <service>
+      <name>HDFS</name>
+      <comment>Apache Hadoop Distributed File System</comment>
+      <version>2.1.0.2.0.5.0</version>
+
+      <components>
+        <component>
+          <name>NAMENODE</name>
+          <category>MASTER</category>
+          <cardinality>1</cardinality>
+          <commandScript>
+            <script>scripts/namenode.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+          <customCommands>
+            <customCommand>
+              <name>DECOMMISSION</name>
+              <commandScript>
+                <script>scripts/namenode.py</script>
+                <scriptType>PYTHON</scriptType>
+                <timeout>600</timeout>
+              </commandScript>
+            </customCommand>
+            <customCommand>
+              <name>REBALANCEHDFS</name>
+              <background>true</background>
+              <commandScript>
+                <script>scripts/namenode.py</script>
+                <scriptType>PYTHON</scriptType>
+              </commandScript>
+            </customCommand>
+
+          </customCommands>
+        </component>
+
+        <component>
+          <name>DATANODE</name>
+          <category>SLAVE</category>
+          <cardinality>1+</cardinality>
+          <commandScript>
+            <script>scripts/datanode.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
+        <component>
+          <name>SECONDARY_NAMENODE</name>
+          <!-- TODO:  cardinality is conditional on HA usage -->
+          <cardinality>1</cardinality>
+          <category>MASTER</category>
+          <commandScript>
+            <script>scripts/snamenode.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
+        <component>
+          <name>HDFS_CLIENT</name>
+          <category>CLIENT</category>
+          <cardinality>0+</cardinality>
+          <commandScript>
+            <script>scripts/hdfs_client.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
+        <component>
+          <name>JOURNALNODE</name>
+          <category>SLAVE</category>
+          <cardinality>0+</cardinality>
+          <commandScript>
+            <script>scripts/journalnode.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+
+        <component>
+          <name>ZKFC</name>
+          <category>SLAVE</category>
+          <!-- TODO: cardinality is conditional on HA topology -->
+          <cardinality>0+</cardinality>
+          <commandScript>
+            <script>scripts/zkfc_slave.py</script>
+            <scriptType>PYTHON</scriptType>
+            <timeout>600</timeout>
+          </commandScript>
+        </component>
+      </components>
+
+      <osSpecifics>
+        <osSpecific>
+          <osFamily>any</osFamily>
+          <packages>
+            <package>
+              <name>hadoop</name>
+            </package>
+            <package>
+              <name>hadoop-lzo</name>
+            </package>
+          </packages>
+        </osSpecific>
+
+        <osSpecific>
+          <osFamily>redhat5,redhat6,suse11</osFamily>
+          <packages>
+            <package>
+              <name>snappy</name>
+            </package>
+            <package>
+              <name>snappy-devel</name>
+            </package>
+            <package>
+              <name>lzo</name>
+            </package>
+            <package>
+              <name>hadoop-lzo-native</name>
+            </package>
+            <package>
+              <name>hadoop-libhdfs</name>
+            </package>
+            <package>
+              <name>ambari-log4j</name>
+            </package>
+          </packages>
+        </osSpecific>
+
+        <osSpecific>
+          <osFamily>ubuntu12</osFamily>
+          <packages>
+            <package>
+              <name>libsnappy1</name>
+            </package>
+            <package>
+              <name>libsnappy-dev</name>
+            </package>
+            <package>
+              <name>liblzo2-2</name>
+            </package>
+            <package>
+              <name>hadoop-hdfs</name>
+            </package>
+            <package>
+              <name>libhdfs0</name>
+            </package>
+            <package>
+              <name>libhdfs0-dev</name>
+            </package>
+          </packages>
+        </osSpecific>
+
+      </osSpecifics>
+
+      <commandScript>
+        <script>scripts/service_check.py</script>
+        <scriptType>PYTHON</scriptType>
+        <timeout>300</timeout>
+      </commandScript>
+
+      <configuration-dependencies>
+        <config-type>core-site</config-type>
+        <config-type>global</config-type>
+        <config-type>hdfs-site</config-type>
+        <config-type>hadoop-policy</config-type>
+        <config-type>hdfs-log4j</config-type>
+      </configuration-dependencies>
+    </service>
+  </services>
+</metainfo>
\ No newline at end of file

Reply via email to