Author: asavu
Date: Sat Oct 22 17:05:07 2011
New Revision: 1187730

URL: http://svn.apache.org/viewvc?rev=1187730&view=rev
Log:
WHIRR-342. hadoop/hbase configuration & active roles on a node (kve via asavu)

Added:
    
whirr/trunk/core/src/main/java/org/apache/whirr/service/jclouds/CreateFileStatement.java
    
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/AbstractHadoopServiceTest.java
    
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSeparateNodesServiceTest.java
    
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSingleNodeServiceTest.java
    
whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-separate.properties
    
whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-single.properties
    
whirr/trunk/services/hbase/src/test/java/org/apache/whirr/service/hbase/integration/HBase090SingleNodeServiceTest.java
    
whirr/trunk/services/hbase/src/test/resources/whirr-hbase-0.90-singlenode-test.properties
Modified:
    whirr/trunk/CHANGES.txt
    
whirr/trunk/services/cdh/src/main/resources/functions/configure_cdh_hadoop.sh
    whirr/trunk/services/cdh/src/main/resources/functions/install_cdh_hadoop.sh
    
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopCluster.java
    
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopClusterActionHandler.java
    
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopConfigurationConverter.java
    
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopDataNodeClusterActionHandler.java
    
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopJobTrackerClusterActionHandler.java
    
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopNameNodeClusterActionHandler.java
    
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopTaskTrackerClusterActionHandler.java
    whirr/trunk/services/hadoop/src/main/resources/functions/configure_hadoop.sh
    whirr/trunk/services/hadoop/src/main/resources/functions/install_hadoop.sh
    
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceController.java
    
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceTest.java
    
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopBenchmarkSuite.java
    
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTeraSortBenchmark.java
    
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTestDFSIOBenchmark.java

Modified: whirr/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/whirr/trunk/CHANGES.txt?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- whirr/trunk/CHANGES.txt (original)
+++ whirr/trunk/CHANGES.txt Sat Oct 22 17:05:07 2011
@@ -44,6 +44,8 @@ Trunk (unreleased changes)
 
     WHIRR-397. Automatic template selection is too restrictive (asavu) 
 
+    WHIRR-342. hadoop/hbase configuration & active roles on a node (kve via 
asavu)
+
   BUG FIXES
 
     WHIRR-377. Fix broken CLI logging config. (asavu via tomwhite)

Added: 
whirr/trunk/core/src/main/java/org/apache/whirr/service/jclouds/CreateFileStatement.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/core/src/main/java/org/apache/whirr/service/jclouds/CreateFileStatement.java?rev=1187730&view=auto
==============================================================================
--- 
whirr/trunk/core/src/main/java/org/apache/whirr/service/jclouds/CreateFileStatement.java
 (added)
+++ 
whirr/trunk/core/src/main/java/org/apache/whirr/service/jclouds/CreateFileStatement.java
 Sat Oct 22 17:05:07 2011
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.whirr.service.jclouds;
+
+import java.util.Collections;
+import java.util.List;
+
+import org.jclouds.scriptbuilder.domain.OsFamily;
+import org.jclouds.scriptbuilder.domain.Statement;
+import org.jclouds.scriptbuilder.domain.Statements;
+
+public class CreateFileStatement implements Statement {
+  
+  private String path;
+  private List<String> lines;
+  
+  public CreateFileStatement(String path, List<String> lines) {
+     this.path = path;
+     this.lines = lines;
+  }
+
+  @Override
+  public Iterable<String> functionDependencies(OsFamily osFamily) {
+    return Collections.emptyList();
+  }
+
+  @Override
+  public String render(OsFamily osFamily) {
+    StringBuilder builder = new StringBuilder();
+    builder.append(Statements.rm(path).render(osFamily));
+    builder.append(Statements.appendFile(path, lines).render(osFamily));
+    
+    return builder.toString();
+  }
+
+}

Modified: 
whirr/trunk/services/cdh/src/main/resources/functions/configure_cdh_hadoop.sh
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/cdh/src/main/resources/functions/configure_cdh_hadoop.sh?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/cdh/src/main/resources/functions/configure_cdh_hadoop.sh 
(original)
+++ 
whirr/trunk/services/cdh/src/main/resources/functions/configure_cdh_hadoop.sh 
Sat Oct 22 17:05:07 2011
@@ -18,6 +18,11 @@ function configure_cdh_hadoop() {
   local OPTIND
   local OPTARG
   
+  if [ "$CONFIGURE_HADOOP_DONE" == "1" ]; then
+    echo "Hadoop is already configured."
+    return;
+  fi
+  
   ROLES=$1
   shift
   
@@ -93,6 +98,9 @@ EOF
       ;;
     esac
   done
+  
+    CONFIGURE_HADOOP_DONE=1
+  
 }
 
 function start_namenode() {

Modified: 
whirr/trunk/services/cdh/src/main/resources/functions/install_cdh_hadoop.sh
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/cdh/src/main/resources/functions/install_cdh_hadoop.sh?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- whirr/trunk/services/cdh/src/main/resources/functions/install_cdh_hadoop.sh 
(original)
+++ whirr/trunk/services/cdh/src/main/resources/functions/install_cdh_hadoop.sh 
Sat Oct 22 17:05:07 2011
@@ -57,4 +57,6 @@ function install_cdh_hadoop() {
     cp -r /etc/$HADOOP/conf.empty $HADOOP_CONF_DIR
     alternatives --install /etc/$HADOOP/conf $HADOOP-conf $HADOOP_CONF_DIR 90
   fi
+  
+  INSTALL_HADOOP_DONE=1
 }

Modified: 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopCluster.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopCluster.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopCluster.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopCluster.java
 Sat Oct 22 17:05:07 2011
@@ -25,9 +25,30 @@ import org.apache.whirr.Cluster;
 import org.apache.whirr.RolePredicates;
 
 public class HadoopCluster {
+  
+  public static final int NAMENODE_PORT = 8020;
+  public static final int NAMENODE_WEB_UI_PORT = 50070;
+  public static final int JOBTRACKER_PORT = 8021;
+  public static final int JOBTRACKER_WEB_UI_PORT = 50030;
+  
   public static InetAddress getNamenodePublicAddress(Cluster cluster) throws 
IOException {
     return cluster.getInstanceMatching(
         RolePredicates.role(HadoopNameNodeClusterActionHandler.ROLE))
         .getPublicAddress();
   }
+  public static InetAddress getNamenodePrivateAddress(Cluster cluster) throws 
IOException {
+    return cluster.getInstanceMatching(
+        RolePredicates.role(HadoopNameNodeClusterActionHandler.ROLE))
+        .getPrivateAddress();
+  }
+  public static InetAddress getJobTrackerPublicAddress(Cluster cluster) throws 
IOException {
+    return cluster.getInstanceMatching(
+        RolePredicates.role(HadoopJobTrackerClusterActionHandler.ROLE))
+        .getPublicAddress();
+  }
+  public static InetAddress getJobTrackerPrivateAddress(Cluster cluster) 
throws IOException {
+    return cluster.getInstanceMatching(
+        RolePredicates.role(HadoopJobTrackerClusterActionHandler.ROLE))
+        .getPrivateAddress();
+  }
 }

Modified: 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopClusterActionHandler.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopClusterActionHandler.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopClusterActionHandler.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopClusterActionHandler.java
 Sat Oct 22 17:05:07 2011
@@ -18,17 +18,30 @@
 
 package org.apache.whirr.service.hadoop;
 
+import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildCommon;
+import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildHdfs;
+import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildMapReduce;
+import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildHadoopEnv;
 import static org.jclouds.scriptbuilder.domain.Statements.call;
 
+import com.google.common.base.Joiner;
+
 import java.io.IOException;
 
 import org.apache.commons.configuration.Configuration;
+import org.apache.commons.configuration.ConfigurationException;
+import org.apache.whirr.Cluster;
 import org.apache.whirr.ClusterSpec;
 import org.apache.whirr.service.ClusterActionEvent;
 import org.apache.whirr.service.ClusterActionHandlerSupport;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public abstract class HadoopClusterActionHandler extends 
ClusterActionHandlerSupport {
 
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HadoopClusterActionHandler.class);
+
   /**
    * Returns a composite configuration that is made up from the global
    * configuration coming from the Whirr core with a hadoop defaults
@@ -63,4 +76,38 @@ public abstract class HadoopClusterActio
     addStatement(event, call(getInstallFunction(conf),
         "-u", tarball));
   }
+  
+  @Override
+  protected void beforeConfigure(ClusterActionEvent event)
+      throws IOException, InterruptedException {
+    ClusterSpec clusterSpec = event.getClusterSpec();
+    Cluster cluster = event.getCluster();
+    
+    doBeforeConfigure(event);
+
+    createHadoopConfigFiles(event, clusterSpec, cluster);
+    
+    addStatement(event, call(
+      getConfigureFunction(getConfiguration(clusterSpec)),
+      Joiner.on(",").join(event.getInstanceTemplate().getRoles()),
+      "-c", clusterSpec.getProvider())
+    );
+  }
+
+  protected void doBeforeConfigure(ClusterActionEvent event) throws 
IOException {};
+
+  private void createHadoopConfigFiles(ClusterActionEvent event,
+      ClusterSpec clusterSpec, Cluster cluster) throws IOException {
+    try {
+      event.getStatementBuilder().addStatements(
+        buildCommon("/tmp/core-site.xml", clusterSpec, cluster),
+        buildHdfs("/tmp/hdfs-site.xml", clusterSpec, cluster),
+        buildMapReduce("/tmp/mapred-site.xml", clusterSpec, cluster),
+        buildHadoopEnv("/tmp/hadoop-env.sh", clusterSpec, cluster)
+      );
+    } catch (ConfigurationException e) {
+      throw new IOException(e);
+    }
+  }
+
 }

Modified: 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopConfigurationConverter.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopConfigurationConverter.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopConfigurationConverter.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopConfigurationConverter.java
 Sat Oct 22 17:05:07 2011
@@ -27,8 +27,8 @@ import java.util.List;
 import org.apache.commons.configuration.AbstractConfiguration;
 import org.apache.commons.configuration.Configuration;
 import org.apache.commons.lang.StringUtils;
+import org.apache.whirr.service.jclouds.CreateFileStatement;
 import org.jclouds.scriptbuilder.domain.Statement;
-import org.jclouds.scriptbuilder.domain.Statements;
 
 /**
  * Helper class to convert between Hadoop configuration representations.
@@ -66,7 +66,7 @@ public class HadoopConfigurationConverte
   
   public static Statement asCreateXmlConfigurationFileStatement(String path, 
       Configuration hadoopConfig) {
-    return Statements.appendFile(path, asXmlConfigurationLines(hadoopConfig));
+    return new CreateFileStatement(path, 
asXmlConfigurationLines(hadoopConfig));
   }
 
   @VisibleForTesting
@@ -93,7 +93,7 @@ public class HadoopConfigurationConverte
   
   public static Statement asCreateEnvironmentVariablesFileStatement(String 
path, 
       Configuration config) {
-    return Statements.appendFile(path, asEnvironmentVariablesLines(config));
+    return new CreateFileStatement(path, asEnvironmentVariablesLines(config));
   }
   
 }

Modified: 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopDataNodeClusterActionHandler.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopDataNodeClusterActionHandler.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopDataNodeClusterActionHandler.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopDataNodeClusterActionHandler.java
 Sat Oct 22 17:05:07 2011
@@ -18,50 +18,38 @@
 
 package org.apache.whirr.service.hadoop;
 
-import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildCommon;
-import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildHdfs;
-import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildMapReduce;
-import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildHadoopEnv;
-import static org.jclouds.scriptbuilder.domain.Statements.call;
-
 import java.io.IOException;
 
-import org.apache.commons.configuration.ConfigurationException;
 import org.apache.whirr.Cluster;
 import org.apache.whirr.ClusterSpec;
 import org.apache.whirr.service.ClusterActionEvent;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class HadoopDataNodeClusterActionHandler extends 
HadoopClusterActionHandler {
 
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HadoopDataNodeClusterActionHandler.class);
+
   public static final String ROLE = "hadoop-datanode";
   
   @Override
   public String getRole() {
     return ROLE;
   }
-  
+
   @Override
-  protected void beforeConfigure(ClusterActionEvent event)
-      throws IOException, InterruptedException {
+  protected void afterConfigure(ClusterActionEvent event) throws IOException,
+      InterruptedException {
     ClusterSpec clusterSpec = event.getClusterSpec();
     Cluster cluster = event.getCluster();
     
-    try {
-      event.getStatementBuilder().addStatements(
-        buildCommon("/tmp/core-site.xml", clusterSpec, cluster),
-        buildHdfs("/tmp/hdfs-site.xml", clusterSpec, cluster),
-        buildMapReduce("/tmp/mapred-site.xml", clusterSpec, cluster),
-        buildHadoopEnv("/tmp/hadoop-env.sh", clusterSpec, cluster)
-      );
-    } catch (ConfigurationException e) {
-      throw new IOException(e);
-    }
-
-    addStatement(event, call(
-      getConfigureFunction(getConfiguration(clusterSpec)),
-      "hadoop-datanode,hadoop-tasktracker",
-      "-c", clusterSpec.getProvider())
-    );
+    // TODO: wait for TTs to come up (done in test for the moment)
+    
+    LOG.info("Completed configuration of {} role {}", 
clusterSpec.getClusterName(), getRole());
+
+    // TODO: List data nodes + url to their WEB UI?
   }
   
+  
 }

Modified: 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopJobTrackerClusterActionHandler.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopJobTrackerClusterActionHandler.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopJobTrackerClusterActionHandler.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopJobTrackerClusterActionHandler.java
 Sat Oct 22 17:05:07 2011
@@ -18,15 +18,60 @@
 
 package org.apache.whirr.service.hadoop;
 
-import org.apache.whirr.service.ClusterActionHandlerSupport;
+import static org.apache.whirr.RolePredicates.role;
 
-// Currently the jobtracker is started by HadoopNameNodeClusterActionHandler
-public class HadoopJobTrackerClusterActionHandler extends 
ClusterActionHandlerSupport {
+import java.io.IOException;
+import java.net.InetAddress;
 
+import org.apache.whirr.Cluster;
+import org.apache.whirr.Cluster.Instance;
+import org.apache.whirr.ClusterSpec;
+import org.apache.whirr.service.ClusterActionEvent;
+import org.apache.whirr.service.FirewallManager.Rule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class HadoopJobTrackerClusterActionHandler extends 
HadoopNameNodeClusterActionHandler {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HadoopJobTrackerClusterActionHandler.class);
+    
   public static final String ROLE = "hadoop-jobtracker";
   
   @Override
   public String getRole() {
     return ROLE;
   }
+  
+  @Override
+  protected void doBeforeConfigure(ClusterActionEvent event) throws 
IOException {
+    Cluster cluster = event.getCluster();
+    
+    Instance jobtracker = cluster.getInstanceMatching(role(ROLE));
+    event.getFirewallManager().addRules(
+        Rule.create()
+          .destination(jobtracker)
+          .ports(HadoopCluster.JOBTRACKER_WEB_UI_PORT),
+        Rule.create()
+          
.source(HadoopCluster.getNamenodePublicAddress(cluster).getHostAddress())
+          .destination(jobtracker)
+          .ports(HadoopCluster.JOBTRACKER_PORT)
+    );
+    
+  }
+  
+  @Override
+  protected void afterConfigure(ClusterActionEvent event) throws IOException {
+    ClusterSpec clusterSpec = event.getClusterSpec();
+    Cluster cluster = event.getCluster();
+    
+    LOG.info("Completed configuration of {} role {}", 
clusterSpec.getClusterName(), getRole());
+
+    InetAddress jobtrackerPublicAddress = 
HadoopCluster.getJobTrackerPublicAddress(cluster);
+
+    LOG.info("Jobtracker web UI available at http://{}:{}";,
+      jobtrackerPublicAddress.getHostName(), 
HadoopCluster.JOBTRACKER_WEB_UI_PORT);
+
+  }
+  
 }

Modified: 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopNameNodeClusterActionHandler.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopNameNodeClusterActionHandler.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopNameNodeClusterActionHandler.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopNameNodeClusterActionHandler.java
 Sat Oct 22 17:05:07 2011
@@ -19,11 +19,6 @@
 package org.apache.whirr.service.hadoop;
 
 import static org.apache.whirr.RolePredicates.role;
-import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildCommon;
-import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildHadoopEnv;
-import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildHdfs;
-import static 
org.apache.whirr.service.hadoop.HadoopConfigurationBuilder.buildMapReduce;
-import static org.jclouds.scriptbuilder.domain.Statements.call;
 
 import com.google.common.base.Charsets;
 import com.google.common.base.Joiner;
@@ -35,7 +30,6 @@ import java.net.InetAddress;
 import java.util.Map.Entry;
 import java.util.Properties;
 
-import org.apache.commons.configuration.ConfigurationException;
 import org.apache.whirr.Cluster;
 import org.apache.whirr.Cluster.Instance;
 import org.apache.whirr.ClusterSpec;
@@ -51,48 +45,26 @@ public class HadoopNameNodeClusterAction
   
   public static final String ROLE = "hadoop-namenode";
   
-  public static final int NAMENODE_PORT = 8020;
-  public static final int JOBTRACKER_PORT = 8021;
-  public static final int NAMENODE_WEB_UI_PORT = 50070;
-  public static final int JOBTRACKER_WEB_UI_PORT = 50030;
-    
   @Override
   public String getRole() {
     return ROLE;
   }
   
   @Override
-  protected void beforeConfigure(ClusterActionEvent event) throws IOException, 
InterruptedException {
-    ClusterSpec clusterSpec = event.getClusterSpec();
+  protected void doBeforeConfigure(ClusterActionEvent event) throws 
IOException {
     Cluster cluster = event.getCluster();
     
     Instance namenode = cluster.getInstanceMatching(role(ROLE));
     event.getFirewallManager().addRules(
         Rule.create()
           .destination(namenode)
-          .ports(NAMENODE_WEB_UI_PORT, JOBTRACKER_WEB_UI_PORT),
+          .ports(HadoopCluster.NAMENODE_WEB_UI_PORT),
         Rule.create()
           .source(namenode.getPublicAddress().getHostAddress())
           .destination(namenode)
-          .ports(NAMENODE_PORT, JOBTRACKER_PORT)
+          .ports(HadoopCluster.NAMENODE_PORT, HadoopCluster.JOBTRACKER_PORT)
     );
     
-    try {
-      event.getStatementBuilder().addStatements(
-        buildCommon("/tmp/core-site.xml", clusterSpec, cluster),
-        buildHdfs("/tmp/hdfs-site.xml", clusterSpec, cluster),
-        buildMapReduce("/tmp/mapred-site.xml", clusterSpec, cluster),
-        buildHadoopEnv("/tmp/hadoop-env.sh", clusterSpec, cluster)
-      );
-    } catch (ConfigurationException e) {
-      throw new IOException(e);
-    }
-    
-    addStatement(event, call(
-      getConfigureFunction(getConfiguration(clusterSpec)),
-      "hadoop-namenode,hadoop-jobtracker",
-      "-c", clusterSpec.getProvider())
-    );
   }
   
   @Override
@@ -102,15 +74,13 @@ public class HadoopNameNodeClusterAction
     
     // TODO: wait for TTs to come up (done in test for the moment)
     
-    LOG.info("Completed configuration of {}", clusterSpec.getClusterName());
-    Instance instance = cluster.getInstanceMatching(role(ROLE));
-    InetAddress namenodePublicAddress = instance.getPublicAddress();
-    InetAddress jobtrackerPublicAddress = namenodePublicAddress;
+    LOG.info("Completed configuration of {} role {}", 
clusterSpec.getClusterName(), getRole());
+    InetAddress namenodePublicAddress = 
HadoopCluster.getNamenodePublicAddress(cluster);
+    InetAddress jobtrackerPublicAddress = 
HadoopCluster.getJobTrackerPublicAddress(cluster);
 
     LOG.info("Namenode web UI available at http://{}:{}";,
-      namenodePublicAddress.getHostName(), NAMENODE_WEB_UI_PORT);
-    LOG.info("Jobtracker web UI available at http://{}:{}";,
-      jobtrackerPublicAddress.getHostName(), JOBTRACKER_WEB_UI_PORT);
+      namenodePublicAddress.getHostName(), HadoopCluster.NAMENODE_WEB_UI_PORT);
+
     Properties config = createClientSideProperties(clusterSpec, 
namenodePublicAddress, jobtrackerPublicAddress);
     createClientSideHadoopSiteFile(clusterSpec, config);
     createProxyScript(clusterSpec, cluster);

Modified: 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopTaskTrackerClusterActionHandler.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopTaskTrackerClusterActionHandler.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopTaskTrackerClusterActionHandler.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/main/java/org/apache/whirr/service/hadoop/HadoopTaskTrackerClusterActionHandler.java
 Sat Oct 22 17:05:07 2011
@@ -18,15 +18,59 @@
 
 package org.apache.whirr.service.hadoop;
 
-import org.apache.whirr.service.ClusterActionHandlerSupport;
+import static org.apache.whirr.RolePredicates.role;
 
-// Currently the tasktracker is started by HadoopDataNodeClusterActionHandler
-public class HadoopTaskTrackerClusterActionHandler extends 
ClusterActionHandlerSupport {
+import java.io.IOException;
 
+import org.apache.whirr.Cluster;
+import org.apache.whirr.Cluster.Instance;
+import org.apache.whirr.ClusterSpec;
+import org.apache.whirr.service.ClusterActionEvent;
+import org.apache.whirr.service.FirewallManager.Rule;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+
+public class HadoopTaskTrackerClusterActionHandler extends 
HadoopClusterActionHandler {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(HadoopTaskTrackerClusterActionHandler.class);
+    
   public static final String ROLE = "hadoop-tasktracker";
   
   @Override
   public String getRole() {
     return ROLE;
   }
+
+  @Override
+  protected void doBeforeConfigure(ClusterActionEvent event) throws 
IOException {
+    Cluster cluster = event.getCluster();
+    
+    Instance jobtracker = cluster.getInstanceMatching(role(ROLE));
+    event.getFirewallManager().addRules(
+        Rule.create()
+          .destination(jobtracker)
+          .ports(HadoopCluster.JOBTRACKER_WEB_UI_PORT),
+        Rule.create()
+          
.source(HadoopCluster.getNamenodePublicAddress(cluster).getHostAddress())
+          .destination(jobtracker)
+          .ports(HadoopCluster.JOBTRACKER_PORT)
+    );
+    
+  }
+
+  @Override
+  protected void afterConfigure(ClusterActionEvent event) throws IOException,
+      InterruptedException {
+    ClusterSpec clusterSpec = event.getClusterSpec();
+    
+    // TODO: wait for TTs to come up (done in test for the moment)
+    
+    LOG.info("Completed configuration of {} role {}", 
clusterSpec.getClusterName(), getRole());
+
+    // TODO: List task trackers + url to their WEB UI?
+  }
+  
 }

Modified: 
whirr/trunk/services/hadoop/src/main/resources/functions/configure_hadoop.sh
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/resources/functions/configure_hadoop.sh?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/main/resources/functions/configure_hadoop.sh 
(original)
+++ 
whirr/trunk/services/hadoop/src/main/resources/functions/configure_hadoop.sh 
Sat Oct 22 17:05:07 2011
@@ -17,7 +17,12 @@
 function configure_hadoop() {
   local OPTIND
   local OPTARG
-  
+
+  if [ "$CONFIGURE_HADOOP_DONE" == "1" ]; then
+    echo "Hadoop is already configured."
+    return;
+  fi
+
   ROLES=$1
   shift
   
@@ -84,6 +89,8 @@ function configure_hadoop() {
     esac
   done
 
+  CONFIGURE_HADOOP_DONE=1
+
 }
 
 function start_namenode() {
@@ -119,5 +126,6 @@ function start_hadoop_daemon() {
     AS_HADOOP="/sbin/runuser -s /bin/bash - hadoop -c"
   fi
   $AS_HADOOP "$HADOOP_HOME/bin/hadoop-daemon.sh start $1"
+  
 }
 

Modified: 
whirr/trunk/services/hadoop/src/main/resources/functions/install_hadoop.sh
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/main/resources/functions/install_hadoop.sh?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- whirr/trunk/services/hadoop/src/main/resources/functions/install_hadoop.sh 
(original)
+++ whirr/trunk/services/hadoop/src/main/resources/functions/install_hadoop.sh 
Sat Oct 22 17:05:07 2011
@@ -25,6 +25,11 @@ function update_repo() {
 function install_hadoop() {
   local OPTIND
   local OPTARG
+
+  if [ "$INSTALL_HADOOP_DONE" == "1" ]; then
+    echo "Hadoop is already installed."
+    return;
+  fi
   
   HADOOP_TAR_URL=
   while getopts "u:" OPTION; do
@@ -48,5 +53,7 @@ function install_hadoop() {
 
   echo "export HADOOP_HOME=$HADOOP_HOME" >> ~root/.bashrc
   echo 'export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$PATH' >> ~root/.bashrc
+  
+  INSTALL_HADOOP_DONE=1
 }
 

Added: 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/AbstractHadoopServiceTest.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/AbstractHadoopServiceTest.java?rev=1187730&view=auto
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/AbstractHadoopServiceTest.java
 (added)
+++ 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/AbstractHadoopServiceTest.java
 Sat Oct 22 17:05:07 2011
@@ -0,0 +1,103 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.whirr.service.hadoop.integration;
+
+import static junit.framework.Assert.assertEquals;
+import static junit.framework.Assert.assertNull;
+import static org.hamcrest.core.Is.is;
+import static org.junit.Assert.assertThat;
+
+import java.io.BufferedReader;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.FileOutputFormat;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.lib.LongSumReducer;
+import org.apache.hadoop.mapred.lib.TokenCountMapper;
+import org.junit.AfterClass;
+import org.junit.Test;
+
+/**
+ * Subclasses should implement @BeforeClass static void setUp()
+ * to set the controller.
+ */
+public abstract class AbstractHadoopServiceTest {
+
+  protected static HadoopServiceController controller;
+  
+  @AfterClass
+  public static void tearDown() throws Exception {
+    controller.shutdown();
+  }
+
+  public AbstractHadoopServiceTest() {
+    super();
+  }
+
+  @Test
+  public void test() throws Exception {
+    Configuration conf = controller.getConfiguration();
+    JobConf job = new JobConf(conf, HadoopServiceTest.class);
+  
+    FileSystem fs = FileSystem.get(conf);
+    
+    OutputStream os = fs.create(new Path("input"));
+    Writer wr = new OutputStreamWriter(os);
+    wr.write("b a\n");
+    wr.close();
+    
+    job.setMapperClass(TokenCountMapper.class);
+    job.setReducerClass(LongSumReducer.class);
+    job.setOutputKeyClass(Text.class);
+    job.setOutputValueClass(LongWritable.class);
+    FileInputFormat.setInputPaths(job, new Path("input"));
+    FileOutputFormat.setOutputPath(job, new Path("output"));
+    
+    JobClient.runJob(job);
+  
+    FSDataInputStream in = fs.open(new Path("output/part-00000"));
+    BufferedReader reader = new BufferedReader(new InputStreamReader(in));
+    assertEquals("a\t1", reader.readLine());
+    assertEquals("b\t1", reader.readLine());
+    assertNull(reader.readLine());
+    reader.close();
+    
+  }
+
+  @Test
+  public void testExistsTemporaryFolderAndHiveWarehouse() throws Exception {
+    Configuration conf = controller.getConfiguration();
+    FileSystem fs = FileSystem.get(conf);
+  
+    assertThat(fs.exists(new Path("/tmp")), is(true));
+    assertThat(fs.exists(new Path("/user/hive/warehouse")), is(true));
+  }
+
+}

Added: 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSeparateNodesServiceTest.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSeparateNodesServiceTest.java?rev=1187730&view=auto
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSeparateNodesServiceTest.java
 (added)
+++ 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSeparateNodesServiceTest.java
 Sat Oct 22 17:05:07 2011
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.whirr.service.hadoop.integration;
+
+import org.junit.BeforeClass;
+
+public class HadoopSeparateNodesServiceTest extends AbstractHadoopServiceTest {
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    controller = 
HadoopServiceController.getInstance("whirr-hadoop-test-separate.properties");
+    controller.ensureClusterRunning();
+  }
+
+}

Modified: 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceController.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceController.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceController.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceController.java
 Sat Oct 22 17:05:07 2011
@@ -19,9 +19,12 @@
 package org.apache.whirr.service.hadoop.integration;
 
 import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
 import java.util.Map.Entry;
 
 import org.apache.commons.configuration.CompositeConfiguration;
+import org.apache.commons.configuration.ConfigurationException;
 import org.apache.commons.configuration.PropertiesConfiguration;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -42,20 +45,27 @@ public class HadoopServiceController {
   private static final Logger LOG =
     LoggerFactory.getLogger(HadoopServiceController.class);
 
-  private static final HadoopServiceController INSTANCE =
-    new HadoopServiceController();
+  private static final Map<String, HadoopServiceController> INSTANCES =
+    new HashMap<String, HadoopServiceController>();
   
-  public static HadoopServiceController getInstance() {
-    return INSTANCE;
+  public static HadoopServiceController getInstance(String config) throws 
ConfigurationException {
+    if (!INSTANCES.containsKey("config")) {
+      PropertiesConfiguration configuration = new 
PropertiesConfiguration(config);
+      INSTANCES.put(config, new HadoopServiceController(configuration));
+    }
+    return INSTANCES.get(config);
   }
   
+  private org.apache.commons.configuration.Configuration configuration;
+
   private boolean running;
   private ClusterSpec clusterSpec;
   private ClusterController controller;
   private HadoopProxy proxy;
   private Cluster cluster;
   
-  private HadoopServiceController() {
+  private 
HadoopServiceController(org.apache.commons.configuration.Configuration 
configuration) {
+    this.configuration = configuration;
   }
   
   public synchronized boolean ensureClusterRunning() throws Exception {
@@ -74,7 +84,7 @@ public class HadoopServiceController {
     if (System.getProperty("config") != null) {
       config.addConfiguration(new 
PropertiesConfiguration(System.getProperty("config")));
     }
-    config.addConfiguration(new 
PropertiesConfiguration("whirr-hadoop-test.properties"));
+    config.addConfiguration(configuration);
     clusterSpec = ClusterSpec.withTemporaryKeys(config);
     controller = new ClusterController();
     

Modified: 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceTest.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceTest.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceTest.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopServiceTest.java
 Sat Oct 22 17:05:07 2011
@@ -18,86 +18,14 @@
 
 package org.apache.whirr.service.hadoop.integration;
 
-import static junit.framework.Assert.assertEquals;
-import static junit.framework.Assert.assertNull;
-import static org.hamcrest.core.Is.is;
-import static org.junit.Assert.assertThat;
-
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.FileInputFormat;
-import org.apache.hadoop.mapred.FileOutputFormat;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.lib.LongSumReducer;
-import org.apache.hadoop.mapred.lib.TokenCountMapper;
-import org.junit.AfterClass;
 import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class HadoopServiceTest {
 
-  private static HadoopServiceController controller =
-    HadoopServiceController.getInstance();
+public class HadoopServiceTest extends AbstractHadoopServiceTest {
 
-        
   @BeforeClass
   public static void setUp() throws Exception {
+    controller = 
HadoopServiceController.getInstance("whirr-hadoop-test.properties");
     controller.ensureClusterRunning();
   }
-        
-  @AfterClass
-  public static void tearDown() throws Exception {
-    controller.shutdown();
-  }
-  
-  @Test
-  public void test() throws Exception {
-    Configuration conf = controller.getConfiguration();
-    JobConf job = new JobConf(conf, HadoopServiceTest.class);
-
-    FileSystem fs = FileSystem.get(conf);
-    
-    OutputStream os = fs.create(new Path("input"));
-    Writer wr = new OutputStreamWriter(os);
-    wr.write("b a\n");
-    wr.close();
-    
-    job.setMapperClass(TokenCountMapper.class);
-    job.setReducerClass(LongSumReducer.class);
-    job.setOutputKeyClass(Text.class);
-    job.setOutputValueClass(LongWritable.class);
-    FileInputFormat.setInputPaths(job, new Path("input"));
-    FileOutputFormat.setOutputPath(job, new Path("output"));
-    
-    JobClient.runJob(job);
-
-    FSDataInputStream in = fs.open(new Path("output/part-00000"));
-    BufferedReader reader = new BufferedReader(new InputStreamReader(in));
-    assertEquals("a\t1", reader.readLine());
-    assertEquals("b\t1", reader.readLine());
-    assertNull(reader.readLine());
-    reader.close();
-    
-  }
-
-  @Test
-  public void testExistsTemporaryFolderAndHiveWarehouse() throws Exception {
-    Configuration conf = controller.getConfiguration();
-    FileSystem fs = FileSystem.get(conf);
-
-    assertThat(fs.exists(new Path("/tmp")), is(true));
-    assertThat(fs.exists(new Path("/user/hive/warehouse")), is(true));
-  }
 
 }

Added: 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSingleNodeServiceTest.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSingleNodeServiceTest.java?rev=1187730&view=auto
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSingleNodeServiceTest.java
 (added)
+++ 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/HadoopSingleNodeServiceTest.java
 Sat Oct 22 17:05:07 2011
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.whirr.service.hadoop.integration;
+
+import org.junit.BeforeClass;
+
+public class HadoopSingleNodeServiceTest extends AbstractHadoopServiceTest {
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    controller = 
HadoopServiceController.getInstance("whirr-hadoop-test-single.properties");
+    controller.ensureClusterRunning();
+  }
+
+}

Modified: 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopBenchmarkSuite.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopBenchmarkSuite.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopBenchmarkSuite.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopBenchmarkSuite.java
 Sat Oct 22 17:05:07 2011
@@ -30,11 +30,11 @@ import org.junit.runners.Suite;
 })
 public class HadoopBenchmarkSuite {
   
-  private static HadoopServiceController controller =
-    HadoopServiceController.getInstance();
+  private static HadoopServiceController controller;
   
   @BeforeClass
   public static void setUp() throws Exception {
+    controller = HadoopServiceController.getInstance("whirr-hadoop-test");
     controller.ensureClusterRunning();
   }
   

Modified: 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTeraSortBenchmark.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTeraSortBenchmark.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTeraSortBenchmark.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTeraSortBenchmark.java
 Sat Oct 22 17:05:07 2011
@@ -47,12 +47,11 @@ public class HadoopServiceTeraSortBenchm
     LoggerFactory.getLogger(HadoopServiceTeraSortBenchmark.class);
   
   private static boolean shutdownClusterOnTearDown;
-  private static HadoopServiceController controller =
-    HadoopServiceController.getInstance();
-
+  private static HadoopServiceController controller;
   
   @BeforeClass
   public static void setUp() throws Exception {
+    controller = 
HadoopServiceController.getInstance("whirr-hadoop-test.properties");
     shutdownClusterOnTearDown = controller.ensureClusterRunning();
   }
   

Modified: 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTestDFSIOBenchmark.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTestDFSIOBenchmark.java?rev=1187730&r1=1187729&r2=1187730&view=diff
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTestDFSIOBenchmark.java
 (original)
+++ 
whirr/trunk/services/hadoop/src/test/java/org/apache/whirr/service/hadoop/integration/benchmark/HadoopServiceTestDFSIOBenchmark.java
 Sat Oct 22 17:05:07 2011
@@ -31,12 +31,11 @@ public class HadoopServiceTestDFSIOBench
     LoggerFactory.getLogger(HadoopServiceTestDFSIOBenchmark.class);
 
   private static boolean shutdownClusterOnTearDown;
-  private static HadoopServiceController controller =
-    HadoopServiceController.getInstance();
-
+  private static HadoopServiceController controller;
   
   @BeforeClass
   public static void setUp() throws Exception {
+    HadoopServiceController.getInstance("whirr-hadoop-test.properties");
     shutdownClusterOnTearDown = controller.ensureClusterRunning();
   }
   

Added: 
whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-separate.properties
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-separate.properties?rev=1187730&view=auto
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-separate.properties
 (added)
+++ 
whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-separate.properties
 Sat Oct 22 17:05:07 2011
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+whirr.cluster-name=hadoopclustertest-separate
+whirr.instance-templates=1 hadoop-jobtracker,1 hadoop-namenode,1 
hadoop-datanode,1 hadoop-tasktracker
+whirr.provider=${sys:whirr.test.provider}
+whirr.identity=${sys:whirr.test.identity}
+whirr.credential=${sys:whirr.test.credential}
+whirr.aws-ec2-spot-price=0.34

Added: 
whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-single.properties
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-single.properties?rev=1187730&view=auto
==============================================================================
--- 
whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-single.properties
 (added)
+++ 
whirr/trunk/services/hadoop/src/test/resources/whirr-hadoop-test-single.properties
 Sat Oct 22 17:05:07 2011
@@ -0,0 +1,23 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+whirr.cluster-name=hadoopclustertest-single
+whirr.instance-templates=1 
hadoop-jobtracker+hadoop-namenode+hadoop-datanode+hadoop-tasktracker
+whirr.provider=${sys:whirr.test.provider}
+whirr.identity=${sys:whirr.test.identity}
+whirr.credential=${sys:whirr.test.credential}
+whirr.aws-ec2-spot-price=0.34

Added: 
whirr/trunk/services/hbase/src/test/java/org/apache/whirr/service/hbase/integration/HBase090SingleNodeServiceTest.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hbase/src/test/java/org/apache/whirr/service/hbase/integration/HBase090SingleNodeServiceTest.java?rev=1187730&view=auto
==============================================================================
--- 
whirr/trunk/services/hbase/src/test/java/org/apache/whirr/service/hbase/integration/HBase090SingleNodeServiceTest.java
 (added)
+++ 
whirr/trunk/services/hbase/src/test/java/org/apache/whirr/service/hbase/integration/HBase090SingleNodeServiceTest.java
 Sat Oct 22 17:05:07 2011
@@ -0,0 +1,29 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.whirr.service.hbase.integration;
+
+import org.junit.BeforeClass;
+
+public class HBase090SingleNodeServiceTest extends HBaseServiceTest {
+  @BeforeClass
+  public static void setUp() throws Exception {
+    controller = 
HBaseServiceController.getInstance("whirr-hbase-0.90-singlenode-test.properties");
+    controller.ensureClusterRunning();
+  }
+}

Added: 
whirr/trunk/services/hbase/src/test/resources/whirr-hbase-0.90-singlenode-test.properties
URL: 
http://svn.apache.org/viewvc/whirr/trunk/services/hbase/src/test/resources/whirr-hbase-0.90-singlenode-test.properties?rev=1187730&view=auto
==============================================================================
--- 
whirr/trunk/services/hbase/src/test/resources/whirr-hbase-0.90-singlenode-test.properties
 (added)
+++ 
whirr/trunk/services/hbase/src/test/resources/whirr-hbase-0.90-singlenode-test.properties
 Sat Oct 22 17:05:07 2011
@@ -0,0 +1,26 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+whirr.cluster-name=hbaseclustertest
+whirr.instance-templates=1 
hadoop-namenode+hadoop-jobtracker+zookeeper+hbase-master+hbase-thriftserver,hadoop-datanode+hadoop-tasktracker+hbase-regionserver
+
+whirr.provider=${sys:whirr.test.provider}
+whirr.identity=${sys:whirr.test.identity}
+whirr.credential=${sys:whirr.test.credential}
+
+whirr.hbase.tarball.url=http://apache.cu.be/hbase/hbase-0.90.3/hbase-0.90.3.tar.gz
+whirr.hadoop.tarball.url=http://archive.cloudera.com/cdh/3/hadoop-0.20.2-cdh3u0.tar.gz


Reply via email to