Author: asavu
Date: Mon Nov  7 10:05:09 2011
New Revision: 1198689

URL: http://svn.apache.org/viewvc?rev=1198689&view=rev
Log:
WHIRR-398. Implement the execution of scripts on DestroyClusterAction (David 
Alves via asavu)

Modified:
    whirr/trunk/CHANGES.txt
    whirr/trunk/core/src/main/java/org/apache/whirr/ClusterController.java
    
whirr/trunk/core/src/main/java/org/apache/whirr/actions/DestroyClusterAction.java
    whirr/trunk/core/src/main/java/org/apache/whirr/service/DryRunModule.java
    
whirr/trunk/core/src/test/java/org/apache/whirr/service/DryRunModuleTest.java

Modified: whirr/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/whirr/trunk/CHANGES.txt?rev=1198689&r1=1198688&r2=1198689&view=diff
==============================================================================
--- whirr/trunk/CHANGES.txt (original)
+++ whirr/trunk/CHANGES.txt Mon Nov  7 10:05:09 2011
@@ -56,6 +56,9 @@ Trunk (unreleased changes)
 
     WHIRR-423. Refactor StartupProcess.cleanupFailedNodes (Adrian Cole via 
asavu)
 
+    WHIRR-398. Implement the execution of scripts on DestroyClusterAction 
+    (David Alves via asavu)
+
   BUG FIXES
 
     WHIRR-377. Fix broken CLI logging config. (asavu via tomwhite)

Modified: whirr/trunk/core/src/main/java/org/apache/whirr/ClusterController.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/core/src/main/java/org/apache/whirr/ClusterController.java?rev=1198689&r1=1198688&r2=1198689&view=diff
==============================================================================
--- whirr/trunk/core/src/main/java/org/apache/whirr/ClusterController.java 
(original)
+++ whirr/trunk/core/src/main/java/org/apache/whirr/ClusterController.java Mon 
Nov  7 10:05:09 2011
@@ -116,15 +116,22 @@ public class ClusterController {
 
   /**
    * Stop the cluster and destroy all resources associated with it.
-   *
-   * @throws IOException if there is a problem while stopping the cluster. The
-   * cluster may or may not have been stopped.
-   * @throws InterruptedException if the thread is interrupted.
+   * 
+   * @throws IOException
+   *           if there is a problem while stopping the cluster. The cluster 
may
+   *           or may not have been stopped.
+   * @throws InterruptedException
+   *           if the thread is interrupted.
    */
   public void destroyCluster(ClusterSpec clusterSpec) throws IOException,
       InterruptedException {
-    DestroyClusterAction destroyer = new DestroyClusterAction(getCompute());
-    destroyer.execute(clusterSpec, null);
+
+    ClusterStateStore store = getClusterStateStore(clusterSpec);
+    Cluster cluster = store.load();
+
+    DestroyClusterAction destroyer = new DestroyClusterAction(getCompute(),
+        HandlerMapFactory.create());
+    destroyer.execute(clusterSpec, cluster);
 
     getClusterStateStore(clusterSpec).destroy();
   }

Modified: 
whirr/trunk/core/src/main/java/org/apache/whirr/actions/DestroyClusterAction.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/core/src/main/java/org/apache/whirr/actions/DestroyClusterAction.java?rev=1198689&r1=1198688&r2=1198689&view=diff
==============================================================================
--- 
whirr/trunk/core/src/main/java/org/apache/whirr/actions/DestroyClusterAction.java
 (original)
+++ 
whirr/trunk/core/src/main/java/org/apache/whirr/actions/DestroyClusterAction.java
 Mon Nov  7 10:05:09 2011
@@ -18,32 +18,57 @@
 
 package org.apache.whirr.actions;
 
+import static org.apache.whirr.RolePredicates.onlyRolesIn;
+import static 
org.jclouds.compute.options.RunScriptOptions.Builder.overrideCredentialsWith;
 import static org.jclouds.compute.predicates.NodePredicates.inGroup;
 
 import java.io.IOException;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+
+import javax.annotation.Nullable;
 
 import org.apache.whirr.Cluster;
-import org.apache.whirr.ClusterAction;
+import org.apache.whirr.Cluster.Instance;
 import org.apache.whirr.ClusterSpec;
+import org.apache.whirr.InstanceTemplate;
+import org.apache.whirr.service.ClusterActionEvent;
 import org.apache.whirr.service.ClusterActionHandler;
+import org.apache.whirr.service.jclouds.StatementBuilder;
 import org.jclouds.compute.ComputeService;
 import org.jclouds.compute.ComputeServiceContext;
+import org.jclouds.compute.domain.ExecResponse;
+import org.jclouds.domain.Credentials;
+import org.jclouds.scriptbuilder.domain.OsFamily;
+import org.jclouds.scriptbuilder.domain.Statement;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Function;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Sets;
 
 /**
- * A {@link ClusterAction} for tearing down a running cluster and freeing up
- * all its resources.
+ * A {@link ClusterAction} for tearing down a running cluster and freeing up 
all
+ * its resources.
  */
-public class DestroyClusterAction extends ClusterAction {
+public class DestroyClusterAction extends ScriptBasedClusterAction {
+
+  private static final Logger LOG = LoggerFactory
+      .getLogger(DestroyClusterAction.class);
 
-  private static final Logger LOG =
-    LoggerFactory.getLogger(DestroyClusterAction.class);
-  
-  public DestroyClusterAction(Function<ClusterSpec, ComputeServiceContext> 
getCompute) {
-    super(getCompute);
+  public DestroyClusterAction(
+      final Function<ClusterSpec, ComputeServiceContext> getCompute,
+      final Map<String, ClusterActionHandler> handlerMap) {
+    super(getCompute, handlerMap);
   }
 
   @Override
@@ -52,13 +77,95 @@ public class DestroyClusterAction extend
   }
 
   @Override
-  public Cluster execute(ClusterSpec clusterSpec, Cluster cluster)
+  protected void doAction(Map<InstanceTemplate, ClusterActionEvent> eventMap)
       throws IOException, InterruptedException {
+
+    final ExecutorService executorService = Executors.newCachedThreadPool();
+    final Collection<Future<ExecResponse>> futures = Sets.newHashSet();
+
+    ClusterSpec clusterSpec = eventMap.values().iterator().next()
+        .getClusterSpec();
+
+    for (Entry<InstanceTemplate, ClusterActionEvent> entry : eventMap
+        .entrySet()) {
+
+      Cluster cluster = entry.getValue().getCluster();
+
+      StatementBuilder statementBuilder = entry.getValue()
+          .getStatementBuilder();
+
+      ComputeServiceContext computeServiceContext = getCompute().apply(
+          clusterSpec);
+      final ComputeService computeService = computeServiceContext
+          .getComputeService();
+
+      final Credentials credentials = new Credentials(
+          clusterSpec.getClusterUser(), clusterSpec.getPrivateKey());
+
+      Set<Instance> instances = cluster.getInstancesMatching(onlyRolesIn(entry
+          .getKey().getRoles()));
+
+      String instanceIds = Joiner.on(", ").join(
+          Iterables.transform(instances, new Function<Instance, String>() {
+            @Override
+            public String apply(@Nullable Instance instance) {
+              return instance == null ? "<null>" : instance.getId();
+            }
+          }));
+
+      LOG.info("Starting to run destroy scripts on cluster " + "instances: {}",
+          instanceIds);
+
+      for (final Instance instance : instances) {
+        final Statement statement = statementBuilder.build(clusterSpec,
+            instance);
+
+        futures.add(executorService.submit(new Callable<ExecResponse>() {
+          @Override
+          public ExecResponse call() {
+
+            LOG.info("Running destroy script on: {}", instance.getId());
+            if (LOG.isDebugEnabled()) {
+              LOG.debug("Destroy script for {}:\n{}", instance.getId(),
+                  statement.render(OsFamily.UNIX));
+            }
+
+            try {
+              return computeService.runScriptOnNode(
+                  instance.getId(),
+                  statement,
+                  overrideCredentialsWith(credentials)
+                      .runAsRoot(true)
+                      .nameTask(
+                          "destroy-" + 
Joiner.on('_').join(instance.getRoles())));
+
+            } finally {
+              LOG.info("Destroy script run completed on: {}", 
instance.getId());
+            }
+          }
+        }));
+      }
+    }
+
+    for (Future<ExecResponse> future : futures) {
+      try {
+        ExecResponse execResponse = future.get();
+        if (execResponse.getExitCode() != 0) {
+          LOG.error("Error running script: {}\n{}", execResponse.getError(),
+              execResponse.getOutput());
+        }
+      } catch (ExecutionException e) {
+        throw new IOException(e.getCause());
+      }
+    }
+
+    LOG.info("Finished running destroy scripts on all cluster instances.");
+
     LOG.info("Destroying " + clusterSpec.getClusterName() + " cluster");
-    ComputeService computeService = 
getCompute().apply(clusterSpec).getComputeService();
+    ComputeService computeService = getCompute().apply(clusterSpec)
+        .getComputeService();
     computeService.destroyNodesMatching(inGroup(clusterSpec.getClusterName()));
     LOG.info("Cluster {} destroyed", clusterSpec.getClusterName());
-    return null;
   }
 
 }

Modified: 
whirr/trunk/core/src/main/java/org/apache/whirr/service/DryRunModule.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/core/src/main/java/org/apache/whirr/service/DryRunModule.java?rev=1198689&r1=1198688&r2=1198689&view=diff
==============================================================================
--- whirr/trunk/core/src/main/java/org/apache/whirr/service/DryRunModule.java 
(original)
+++ whirr/trunk/core/src/main/java/org/apache/whirr/service/DryRunModule.java 
Mon Nov  7 10:05:09 2011
@@ -29,6 +29,9 @@ import static com.google.inject.matcher.
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentMap;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -42,6 +45,7 @@ import org.aopalliance.intercept.MethodI
 import org.jclouds.compute.callables.RunScriptOnNode;
 import org.jclouds.compute.domain.ExecResponse;
 import org.jclouds.compute.domain.NodeMetadata;
+import org.jclouds.compute.domain.internal.NodeMetadataImpl;
 import org.jclouds.crypto.CryptoStreams;
 import org.jclouds.domain.Credentials;
 import org.jclouds.io.Payload;
@@ -54,6 +58,7 @@ import org.slf4j.LoggerFactory;
 import com.google.common.base.Function;
 import com.google.common.base.Objects;
 import com.google.common.base.Predicate;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableListMultimap;
 import com.google.common.collect.LinkedListMultimap;
 import com.google.common.collect.ListMultimap;
@@ -72,282 +77,307 @@ import com.google.inject.AbstractModule;
 // note that most of this logic will be pulled into jclouds 1.0-beta-10 per
 // http://code.google.com/p/jclouds/issues/detail?id=490
 public class DryRunModule extends AbstractModule {
-    private static final Logger LOG = LoggerFactory
-            .getLogger(DryRunModule.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(DryRunModule.class);
 
-    // an example showing how to intercept any internal method for logging
-    // purposes
-    public class LogCallToRunScriptOnNode implements MethodInterceptor {
-
-        public Object invoke(MethodInvocation i) throws Throwable {
-            if (i.getMethod().getName().equals("call")) {
-                RunScriptOnNode runScriptOnNode = RunScriptOnNode.class.cast(i
-                        .getThis());
-                String nodeName = runScriptOnNode.getNode().getName();
-                LOG.info(nodeName + " >> running script");
-                Object returnVal = i.proceed();
-                LOG.info(nodeName + " << " + returnVal);
-                return returnVal;
-            } else {
-                return i.proceed();
-            }
-        }
+  // an example showing how to intercept any internal method for logging
+  // purposes
+  public class LogCallToRunScriptOnNode implements MethodInterceptor {
+
+    public Object invoke(MethodInvocation i) throws Throwable {
+      if (i.getMethod().getName().equals("call")) {
+        RunScriptOnNode runScriptOnNode = RunScriptOnNode.class.cast(i
+            .getThis());
+        String nodeName = runScriptOnNode.getNode().getName();
+        LOG.info(nodeName + " >> running script");
+        Object returnVal = i.proceed();
+        LOG.info(nodeName + " << " + returnVal);
+        return returnVal;
+      } else {
+        return i.proceed();
+      }
     }
+  }
 
-    public static synchronized DryRun getDryRun() {
-        return DryRun.INSTANCE;
-    }
+  public static synchronized DryRun getDryRun() {
+    return DryRun.INSTANCE;
+  }
+
+  public static void resetDryRun() {
+    DryRun.INSTANCE.executedScripts.clear();
+  }
+
+  // enum singleton pattern
+  public static enum DryRun {
+    INSTANCE;
+    
+    // stores the scripts executed, per node, in the order they were executed
+    private final ListMultimap<NodeMetadata, RunScriptOnNode> executedScripts 
= synchronizedListMultimap(LinkedListMultimap
+        .<NodeMetadata, RunScriptOnNode> create());
+    
+    private final List<RunScriptOnNode> totallyOrderedScripts = 
Collections.synchronizedList(new ArrayList<RunScriptOnNode>());
 
-    public static void resetDryRun() {
-        DryRun.INSTANCE.executedScripts.clear();
+    DryRun() {
     }
 
-    // enum singleton pattern
-    public static enum DryRun {
-        INSTANCE;
-        // allow duplicate mappings and use deterministic ordering
-        private final ListMultimap<NodeMetadata, RunScriptOnNode> 
executedScripts = synchronizedListMultimap(LinkedListMultimap
-                .<NodeMetadata, RunScriptOnNode> create());
+    void newExecution(RunScriptOnNode runScript) {
+      NodeMetadata original = runScript.getNode();
+      //duplicate the NodeMetadata instance without credentials because
+      //NodeMetadata equals() contract uses credentials.
+      NodeMetadataImpl stored = new NodeMetadataImpl(
+          original.getProviderId(),
+          original.getName(),
+          original.getId(),
+          original.getLocation(),
+          original.getUri(),
+          original.getUserMetadata(),
+          original.getTags(),
+          original.getGroup(),
+          original.getHardware(),
+          original.getImageId(),
+          original.getOperatingSystem(),
+          original.getState(),
+          original.getLoginPort(),
+          original.getPrivateAddresses(),
+          original.getPublicAddresses(),
+          null,
+          null,
+          original.getHostname());
+      executedScripts.put(stored, runScript);
+      totallyOrderedScripts.add(runScript);
+    }
 
-        DryRun() {
-        }
+    public synchronized ListMultimap<NodeMetadata, RunScriptOnNode> 
getExecutions() {
+      return ImmutableListMultimap.copyOf(executedScripts);
+    }
+    
+    public synchronized List<RunScriptOnNode> getTotallyOrderedExecutions() {
+      return ImmutableList.copyOf(totallyOrderedScripts);
+    }
 
-        void newExecution(RunScriptOnNode runScript) {
-            executedScripts.put(runScript.getNode(), runScript);
-        }
+  }
 
-        public synchronized ListMultimap<NodeMetadata, RunScriptOnNode> 
getExecutions() {
-            return ImmutableListMultimap.copyOf(executedScripts);
-        }
+  public class SaveDryRunsByInterceptingRunScriptOnNodeCreation implements
+      MethodInterceptor {
 
+    public Object invoke(MethodInvocation i) throws Throwable {
+      if (i.getMethod().getName().equals("create")) {
+        Object returnVal = i.proceed();
+        getDryRun().newExecution(RunScriptOnNode.class.cast(returnVal));
+        return returnVal;
+      } else {
+        return i.proceed();
+      }
     }
+  }
 
-    public class SaveDryRunsByInterceptingRunScriptOnNodeCreation implements
-            MethodInterceptor {
-
-        public Object invoke(MethodInvocation i) throws Throwable {
-            if (i.getMethod().getName().equals("create")) {
-                Object returnVal = i.proceed();
-                
getDryRun().newExecution(RunScriptOnNode.class.cast(returnVal));
-                return returnVal;
-            } else {
-                return i.proceed();
-            }
-        }
+  @Override
+  protected void configure() {
+    bind(SshClient.Factory.class).to(LogSshClient.Factory.class);
+    bindInterceptor(subclassesOf(RunScriptOnNode.class),
+        returns(identicalTo(ExecResponse.class)),
+        new LogCallToRunScriptOnNode());
+    bindInterceptor(subclassesOf(RunScriptOnNode.Factory.class),
+        returns(identicalTo(RunScriptOnNode.class)),
+        new SaveDryRunsByInterceptingRunScriptOnNodeCreation());
+  }
+
+  private static class Key {
+    private final IPSocket socket;
+    private final Credentials creds;
+    private final NodeMetadata node;
+
+    Key(IPSocket socket, Credentials creds, @Nullable NodeMetadata node) {
+      this.socket = socket;
+      this.creds = creds;
+      this.node = node;
     }
 
+    // only the user, not password should be used to identify this
+    // connection
     @Override
-    protected void configure() {
-        bind(SshClient.Factory.class).to(LogSshClient.Factory.class);
-        bindInterceptor(subclassesOf(RunScriptOnNode.class),
-                returns(identicalTo(ExecResponse.class)),
-                new LogCallToRunScriptOnNode());
-        bindInterceptor(subclassesOf(RunScriptOnNode.Factory.class),
-                returns(identicalTo(RunScriptOnNode.class)),
-                new SaveDryRunsByInterceptingRunScriptOnNodeCreation());
-    }
-
-    private static class Key {
-        private final IPSocket socket;
-        private final Credentials creds;
-        private final NodeMetadata node;
-
-        Key(IPSocket socket, Credentials creds, @Nullable NodeMetadata node) {
-            this.socket = socket;
-            this.creds = creds;
-            this.node = node;
-        }
-
-        // only the user, not password should be used to identify this
-        // connection
-        @Override
-        public int hashCode() {
-            return Objects.hashCode(socket, creds.identity);
-        }
-
-        @Override
-        public boolean equals(Object that) {
-            if (that == null)
-                return false;
-            return Objects.equal(this.toString(), that.toString());
-        }
+    public int hashCode() {
+      return Objects.hashCode(socket, creds.identity);
+    }
 
-        @Override
-        public String toString() {
-            return String.format("%s#%s@%s:%d", node.getName(), creds.identity,
-                    socket.getAddress(), socket.getPort());
-        }
+    @Override
+    public boolean equals(Object that) {
+      if (that == null)
+        return false;
+      return Objects.equal(this.toString(), that.toString());
     }
 
-    @Singleton
-    private static class LogSshClient implements SshClient {
-        private final Key key;
+    @Override
+    public String toString() {
+      return String.format("%s#%s@%s:%d", node.getName(), creds.identity,
+          socket.getAddress(), socket.getPort());
+    }
+  }
 
-        public LogSshClient(Key key) {
-            this.key = key;
-        }
+  @Singleton
+  private static class LogSshClient implements SshClient {
+    private final Key key;
 
-        private static class NodeHasAddress implements Predicate<NodeMetadata> 
{
-            private final String address;
+    public LogSshClient(Key key) {
+      this.key = key;
+    }
 
-            private NodeHasAddress(String address) {
-                this.address = address;
-            }
-
-            @Override
-            public boolean apply(NodeMetadata arg0) {
-                return contains(
-                        concat(arg0.getPrivateAddresses(),
-                                arg0.getPublicAddresses()), address);
-            }
-        }
+    private static class NodeHasAddress implements Predicate<NodeMetadata> {
+      private final String address;
 
-        @Singleton
-        public static class Factory implements SshClient.Factory {
+      private NodeHasAddress(String address) {
+        this.address = address;
+      }
+
+      @Override
+      public boolean apply(NodeMetadata arg0) {
+        return contains(
+            concat(arg0.getPrivateAddresses(), arg0.getPublicAddresses()),
+            address);
+      }
+    }
 
-            // this will ensure only one state per ip socket/user
-            private final Map<Key, SshClient> clientMap;
-            // easy access to node metadata
-            private final ConcurrentMap<String, NodeMetadata> nodes;
-
-            @SuppressWarnings("unused")
-            @Inject
-            public Factory(final ConcurrentMap<String, NodeMetadata> nodes) {
-                this.clientMap = new MapMaker()
-                        .makeComputingMap(new Function<Key, SshClient>() {
-
-                            @Override
-                            public SshClient apply(Key key) {
-                                return new LogSshClient(key);
-                            }
-
-                        });
-                this.nodes = nodes;
-            }
-
-            @Override
-            public SshClient create(final IPSocket socket,
-                    Credentials loginCreds) {
-                return clientMap.get(new Key(socket, loginCreds,
-                        find(nodes.values(),
-                                new NodeHasAddress(socket.getAddress()))));
-            }
-
-            @Override
-            public SshClient create(IPSocket socket, String username,
-                    String password) {
-                return create(socket, new Credentials(username, password));
-            }
-
-            @Override
-            public SshClient create(IPSocket socket, String username,
-                    byte[] privateKey) {
-                return create(socket, new Credentials(username, new String(
-                        privateKey)));
-            }
-        }
+    @Singleton
+    public static class Factory implements SshClient.Factory {
 
-        private final Map<String, Payload> contents = Maps.newConcurrentMap();
+      // this will ensure only one state per ip socket/user
+      private final Map<Key, SshClient> clientMap;
+      // easy access to node metadata
+      private final ConcurrentMap<String, NodeMetadata> nodes;
+
+      @SuppressWarnings("unused")
+      @Inject
+      public Factory(final ConcurrentMap<String, NodeMetadata> nodes) {
+        this.clientMap = new MapMaker()
+            .makeComputingMap(new Function<Key, SshClient>() {
+
+              @Override
+              public SshClient apply(Key key) {
+                return new LogSshClient(key);
+              }
+
+            });
+        this.nodes = nodes;
+      }
+
+      @Override
+      public SshClient create(final IPSocket socket, Credentials loginCreds) {
+        return clientMap.get(new Key(socket, loginCreds, find(nodes.values(),
+            new NodeHasAddress(socket.getAddress()))));
+      }
+
+      @Override
+      public SshClient create(IPSocket socket, String username, String 
password) {
+        return create(socket, new Credentials(username, password));
+      }
+
+      @Override
+      public SshClient create(IPSocket socket, String username,
+          byte[] privateKey) {
+        return create(socket, new Credentials(username, new 
String(privateKey)));
+      }
+    }
 
-        @Override
-        public void connect() {
-            LOG.info(toString() + " >> connect()");
-        }
+    private final Map<String, Payload> contents = Maps.newConcurrentMap();
 
-        @Override
-        public void disconnect() {
-            LOG.info(toString() + " >> disconnect()");
-        }
+    @Override
+    public void connect() {
+      LOG.info(toString() + " >> connect()");
+    }
 
-        public ThreadLocal<AtomicInteger> delay = new 
ThreadLocal<AtomicInteger>();
-        public static int callDelay = 5;
+    @Override
+    public void disconnect() {
+      LOG.info(toString() + " >> disconnect()");
+    }
 
-        @Override
-        public ExecResponse exec(String script) {
-            LOG.info(toString() + " >> exec(" + script + ")");
-            // jclouds checks the status code, but only when seeing if a job
-            // completed. to emulate real scripts all calls are delayed by
-            // forcing
-            // jclouds to call status multiple times (5 by default) before
-            // returning exitCode 1.
-            if (delay.get() == null) {
-                delay.set(new AtomicInteger(0));
-            }
-            ExecResponse exec;
-            if (script.endsWith(" status")) {
-                if (delay.get().get() >= callDelay) {
-                    exec = new ExecResponse("", "", 1);
-                } else {
-                    exec = new ExecResponse("", "", 0);
-                }
-            } else {
-                exec = new ExecResponse("", "", 0);
-            }
+    public ThreadLocal<AtomicInteger> delay = new ThreadLocal<AtomicInteger>();
+    public static int callDelay = 5;
 
-            LOG.info(toString() + " << " + exec);
+    @Override
+    public ExecResponse exec(String script) {
+      LOG.info(toString() + " >> exec(" + script + ")");
+      // jclouds checks the status code, but only when seeing if a job
+      // completed. to emulate real scripts all calls are delayed by
+      // forcing
+      // jclouds to call status multiple times (5 by default) before
+      // returning exitCode 1.
+      if (delay.get() == null) {
+        delay.set(new AtomicInteger(0));
+      }
+      ExecResponse exec;
+      if (script.endsWith(" status")) {
+        if (delay.get().get() >= callDelay) {
+          exec = new ExecResponse("", "", 1);
+        } else {
+          exec = new ExecResponse("", "", 0);
+        }
+      } else {
+        exec = new ExecResponse("", "", 0);
+      }
 
-            delay.get().getAndIncrement();
-            return exec;
-        }
+      LOG.info(toString() + " << " + exec);
 
-        @Override
-        public Payload get(String path) {
-            LOG.info(toString() + " >> get(" + path + ")");
-            Payload returnVal = contents.get(path);
-            LOG.info(toString() + " << md5[" + md5Hex(returnVal) + "]");
-            return returnVal;
-        }
+      delay.get().getAndIncrement();
+      return exec;
+    }
 
-        @Override
-        public String getHostAddress() {
-            return key.socket.getAddress();
-        }
+    @Override
+    public Payload get(String path) {
+      LOG.info(toString() + " >> get(" + path + ")");
+      Payload returnVal = contents.get(path);
+      LOG.info(toString() + " << md5[" + md5Hex(returnVal) + "]");
+      return returnVal;
+    }
 
-        @Override
-        public String getUsername() {
-            return key.creds.identity;
-        }
+    @Override
+    public String getHostAddress() {
+      return key.socket.getAddress();
+    }
 
-        @Override
-        public void put(String path, Payload payload) {
-            LOG.info(toString() + " >> put(" + path + ", md5["
-                    + md5Hex(payload) + "])");
-            contents.put(path, payload);
-        }
+    @Override
+    public String getUsername() {
+      return key.creds.identity;
+    }
 
-        @Override
-        public boolean equals(Object obj) {
-            if (obj == null)
-                return false;
-            return toString().equals(obj.toString());
-        }
+    @Override
+    public void put(String path, Payload payload) {
+      LOG.info(toString() + " >> put(" + path + ", md5[" + md5Hex(payload)
+          + "])");
+      contents.put(path, payload);
+    }
 
-        @Override
-        public int hashCode() {
-            return key.hashCode();
-        }
+    @Override
+    public boolean equals(Object obj) {
+      if (obj == null)
+        return false;
+      return toString().equals(obj.toString());
+    }
 
-        @Override
-        public String toString() {
-            return key.toString();
-        }
+    @Override
+    public int hashCode() {
+      return key.hashCode();
+    }
 
-        @Override
-        public void put(String path, String text) {
-            put(path, new StringPayload(text));
-        }
+    @Override
+    public String toString() {
+      return key.toString();
     }
 
-    public static String md5Hex(String in) {
-        return md5Hex(newInputStreamSupplier(in.getBytes()));
+    @Override
+    public void put(String path, String text) {
+      put(path, new StringPayload(text));
     }
+  }
 
-    public static String md5Hex(InputSupplier<? extends InputStream> supplier) 
{
-        try {
-            return CryptoStreams.md5Hex(supplier);
-        } catch (IOException e) {
-            throw new RuntimeException(e);
-        }
+  public static String md5Hex(String in) {
+    return md5Hex(newInputStreamSupplier(in.getBytes()));
+  }
+
+  public static String md5Hex(InputSupplier<? extends InputStream> supplier) {
+    try {
+      return CryptoStreams.md5Hex(supplier);
+    } catch (IOException e) {
+      throw new RuntimeException(e);
     }
+  }
 
 }

Modified: 
whirr/trunk/core/src/test/java/org/apache/whirr/service/DryRunModuleTest.java
URL: 
http://svn.apache.org/viewvc/whirr/trunk/core/src/test/java/org/apache/whirr/service/DryRunModuleTest.java?rev=1198689&r1=1198688&r2=1198689&view=diff
==============================================================================
--- 
whirr/trunk/core/src/test/java/org/apache/whirr/service/DryRunModuleTest.java 
(original)
+++ 
whirr/trunk/core/src/test/java/org/apache/whirr/service/DryRunModuleTest.java 
Mon Nov  7 10:05:09 2011
@@ -18,8 +18,14 @@
 
 package org.apache.whirr.service;
 
-import com.google.common.collect.ListMultimap;
-import com.jcraft.jsch.JSchException;
+import static junit.framework.Assert.assertSame;
+import static junit.framework.Assert.assertTrue;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map.Entry;
+
 import org.apache.commons.configuration.CompositeConfiguration;
 import org.apache.commons.configuration.ConfigurationException;
 import org.apache.whirr.ClusterController;
@@ -31,80 +37,115 @@ import org.jclouds.compute.domain.NodeMe
 import org.jclouds.scriptbuilder.InitBuilder;
 import org.junit.Test;
 
-import java.io.IOException;
-
-import static junit.framework.Assert.assertFalse;
+import com.google.common.collect.ListMultimap;
+import com.jcraft.jsch.JSchException;
 
 public class DryRunModuleTest {
 
-    public static class Noop2ClusterActionHandler extends
-            ClusterActionHandlerSupport {
-
-        @Override
-        public String getRole() {
-            return "noop2";
-        }
+  public static class Noop2ClusterActionHandler extends
+      ClusterActionHandlerSupport {
 
+    @Override
+    public String getRole() {
+      return "noop2";
     }
 
-    public static class Noop3ClusterActionHandler extends
-            ClusterActionHandlerSupport {
+  }
 
-        @Override
-        public String getRole() {
-            return "noop3";
-        }
+  public static class Noop3ClusterActionHandler extends
+      ClusterActionHandlerSupport {
+
+    @Override
+    public String getRole() {
+      return "noop3";
     }
+  }
 
-    /**
-     * Simple test that asserts that a 1 node cluster was launched and 
bootstrap
-     * and configure scripts were executed.
-     * 
-     * @throws ConfigurationException
-     * @throws IOException
-     * @throws JSchException
-     * @throws InterruptedException
-     */
-    @Test
-    public void testNoInitScriptsAfterConfigurationStarted()
-            throws ConfigurationException, JSchException, IOException,
-            InterruptedException {
-
-        CompositeConfiguration config = new CompositeConfiguration();
-        config.setProperty("whirr.provider", "stub");
-        config.setProperty("whirr.cluster-name", "stub-test");
-        config.setProperty("whirr.instance-templates",
-                "10 noop+noop3,10 noop2+noop,10 noop3+noop2");
-
-        ClusterSpec clusterSpec = ClusterSpec.withTemporaryKeys(config);
-        ClusterController controller = new ClusterController();
-
-        controller.launchCluster(clusterSpec);
-
-        DryRun dryRun = DryRunModule.getDryRun();
-        ListMultimap<NodeMetadata, RunScriptOnNode> list = dryRun
-                .getExecutions();
-
-        // this tests the barrier by making sure that once a configure
-        // script is executed no more setup scripts are executed.
-
-        boolean configStarted = false;
-        for (RunScriptOnNode script : list.values()) {
-            if (!configStarted && 
getScriptName(script).startsWith("configure")) {
-                configStarted = true;
-                continue;
-            }
-            if (configStarted) {
-                assertFalse(
-                  "A setup script was executed after the first configure 
script.",
-                  getScriptName(script).startsWith("setup"));
-            }
-        }
+  /**
+   * Simple test that tests dry run module and at the same time enforces clear
+   * separation of script execution phases.
+   * 
+   * @throws ConfigurationException
+   * @throws IOException
+   * @throws JSchException
+   * @throws InterruptedException
+   */
+  @Test
+  public void 
testNoInitScriptsAfterConfigurationStartedAndNoConfigScriptsAfterDestroy()
+      throws ConfigurationException, JSchException, IOException,
+      InterruptedException {
+
+    CompositeConfiguration config = new CompositeConfiguration();
+    config.setProperty("whirr.provider", "stub");
+    config.setProperty("whirr.cluster-name", "stub-test");
+    config.setProperty("whirr.instance-templates",
+        "10 noop+noop3,10 noop2+noop,10 noop3+noop2");
+
+    ClusterSpec clusterSpec = ClusterSpec.withTemporaryKeys(config);
+    ClusterController controller = new ClusterController();
+
+    controller.launchCluster(clusterSpec);
+    controller.destroyCluster(clusterSpec);
+
+    DryRun dryRun = DryRunModule.getDryRun();
+    ListMultimap<NodeMetadata, RunScriptOnNode> perNodeExecutions = dryRun
+        .getExecutions();
+    List<RunScriptOnNode> totalExecutions = dryRun
+        .getTotallyOrderedExecutions();
+
+    // assert that all nodes executed all three phases and in the right order
+    for (Entry<NodeMetadata, Collection<RunScriptOnNode>> entry : 
perNodeExecutions
+        .asMap().entrySet()) {
+      assertSame("An incorrect number of scripts was executed in the node",
+          entry.getValue().size(), 3);
+      List<RunScriptOnNode> asList = (List<RunScriptOnNode>) entry.getValue();
+      assertTrue("The bootstrap script was executed in the wrong order",
+          getScriptName(asList.get(0)).startsWith("setup"));
+      assertTrue("The configure script was executed in the wrong order",
+          getScriptName(asList.get(1)).startsWith("configure"));
+      assertTrue("The destroy script was executed in the wrong order",
+          getScriptName(asList.get(2)).startsWith("destroy"));
     }
 
-    private String getScriptName(RunScriptOnNode script) {
-        return ((InitBuilder) ((RunScriptOnNodeAsInitScriptUsingSsh) script)
-                .getStatement()).getInstanceName();
+    // this tests the barrier by making sure that once a configure
+    // script is executed no more setup scripts are executed.
+
+    boolean bootPhase = true;
+    boolean configPhase = false;
+    boolean destroyPhase = false;
+    for (RunScriptOnNode script : totalExecutions) {
+      if (bootPhase && !configPhase && 
getScriptName(script).startsWith("configure")) {
+        configPhase = true;
+        bootPhase = false;
+        continue;
+      }
+      if (configPhase && !destroyPhase && 
getScriptName(script).startsWith("destroy")) {
+        destroyPhase = true;
+        configPhase = false;
+        continue;
+      }
+      if (bootPhase) {
+        assertTrue(
+            "A script other than setup was executed in the bootstrap phase",
+            getScriptName(script).startsWith("setup"));
+      }
+      if (configPhase) {
+        assertTrue(
+            "A script other than configure was executed in the configure 
phase",
+            getScriptName(script).startsWith("configure"));
+      }
+      if (destroyPhase) {
+        assertTrue(
+            "A non-destroy script was executed after the first destroy script. 
["
+                + getScriptName(script) + "]", getScriptName(script)
+                .startsWith("destroy"));
+      }
     }
+  }
+
+  private String getScriptName(RunScriptOnNode script) {
+    return ((InitBuilder) ((RunScriptOnNodeAsInitScriptUsingSsh) script)
+        .getStatement()).getInstanceName();
+  }
 
 }


Reply via email to