Repository: incubator-geode
Updated Branches:
  refs/heads/feature/e2e-testing 49bea47c5 -> 905003c71


Add snapshot (export/import) test


Project: http://git-wip-us.apache.org/repos/asf/incubator-geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-geode/commit/905003c7
Tree: http://git-wip-us.apache.org/repos/asf/incubator-geode/tree/905003c7
Diff: http://git-wip-us.apache.org/repos/asf/incubator-geode/diff/905003c7

Branch: refs/heads/feature/e2e-testing
Commit: 905003c71c45e8a6bcb240be0be4ecc6dbb6f90d
Parents: 49bea47
Author: Jens Deppe <jde...@pivotal.io>
Authored: Thu Oct 13 08:41:13 2016 -0700
Committer: Jens Deppe <jde...@pivotal.io>
Committed: Thu Oct 13 08:41:13 2016 -0700

----------------------------------------------------------------------
 .../apache/geode/e2e/CopyingCacheListener.java  | 31 +++++++
 .../geode/e2e/FnGetPrimaryBucketSize.java       | 50 +++++++++++
 .../test/java/org/apache/geode/e2e/GetPut.java  | 33 -------
 .../java/org/apache/geode/e2e/GetPutRunner.java |  7 ++
 .../java/org/apache/geode/e2e/GetPutSteps.java  | 53 ++++++++---
 .../org/apache/geode/e2e/SnapshotRunner.java    |  7 ++
 .../java/org/apache/geode/e2e/StoryRunner.java  | 14 ++-
 .../test/java/org/apache/geode/e2e/Utils.java   | 92 ++++++++++++++++++++
 .../geode/e2e/container/DockerCluster.java      | 20 +++--
 .../org/apache/geode/e2e/get_put.story          |  3 +-
 .../org/apache/geode/e2e/snapshot.story         | 13 +++
 11 files changed, 269 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/CopyingCacheListener.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/test/java/org/apache/geode/e2e/CopyingCacheListener.java 
b/geode-core/src/test/java/org/apache/geode/e2e/CopyingCacheListener.java
new file mode 100644
index 0000000..e801bb8
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/e2e/CopyingCacheListener.java
@@ -0,0 +1,31 @@
+package org.apache.geode.e2e;
+
+import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.CacheFactory;
+import org.apache.geode.cache.EntryEvent;
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.util.CacheListenerAdapter;
+
+public class CopyingCacheListener<K, V> extends CacheListenerAdapter<String, 
String> {
+
+  private Cache cache;
+
+  public void afterCreate(EntryEvent event) {
+    Region r  = getDstRegion(event);
+    r.put(event.getKey(), event.getNewValue());
+  }
+
+
+  private Region<K, V> getDstRegion(EntryEvent e) {
+    String srcRegion = e.getRegion().getName();
+    return getCache().getRegion(srcRegion + "-copy");
+  }
+
+  private synchronized Cache getCache() {
+    if (cache == null) {
+      cache = CacheFactory.getAnyInstance();
+    }
+
+    return cache;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/FnGetPrimaryBucketSize.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/test/java/org/apache/geode/e2e/FnGetPrimaryBucketSize.java 
b/geode-core/src/test/java/org/apache/geode/e2e/FnGetPrimaryBucketSize.java
new file mode 100644
index 0000000..56895ae
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/e2e/FnGetPrimaryBucketSize.java
@@ -0,0 +1,50 @@
+package org.apache.geode.e2e;
+
+import java.util.Properties;
+
+import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.CacheFactory;
+import org.apache.geode.cache.Declarable;
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.execute.Function;
+import org.apache.geode.cache.execute.FunctionContext;
+import org.apache.geode.cache.execute.ResultSender;
+import org.apache.geode.cache.partition.PartitionRegionHelper;
+
+public class FnGetPrimaryBucketSize implements Function, Declarable {
+
+  @Override
+  public boolean hasResult() {
+    return true;
+  }
+
+  @Override
+  public void execute(final FunctionContext context) {
+    String args = (String) context.getArguments();
+
+    Cache cache = CacheFactory.getAnyInstance();
+    Region region = 
PartitionRegionHelper.getLocalPrimaryData(cache.getRegion(args));
+    ResultSender rs = context.getResultSender();
+    rs.lastResult(region.size());
+  }
+
+  @Override
+  public String getId() {
+    return "region-size";
+  }
+
+  @Override
+  public boolean optimizeForWrite() {
+    return true;
+  }
+
+  @Override
+  public boolean isHA() {
+    return false;
+  }
+
+  @Override
+  public void init(final Properties props) {
+    // Empty
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/GetPut.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/e2e/GetPut.java 
b/geode-core/src/test/java/org/apache/geode/e2e/GetPut.java
deleted file mode 100644
index 4b4a79f..0000000
--- a/geode-core/src/test/java/org/apache/geode/e2e/GetPut.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package org.apache.geode.e2e;
-
-import org.jbehave.core.configuration.Configuration;
-import org.jbehave.core.configuration.MostUsefulConfiguration;
-import org.jbehave.core.io.LoadFromClasspath;
-import org.jbehave.core.junit.JUnitStory;
-import org.jbehave.core.reporters.Format;
-import org.jbehave.core.reporters.StoryReporterBuilder;
-import org.jbehave.core.steps.InjectableStepsFactory;
-import org.jbehave.core.steps.InstanceStepsFactory;
-
-public class GetPut extends JUnitStory {
-
-  // Here we specify the configuration, starting from default 
MostUsefulConfiguration,
-  // and changing only what is needed
-  @Override
-  public Configuration configuration() {
-    return new MostUsefulConfiguration()
-      // where to find the stories
-      .useStoryLoader(new LoadFromClasspath(this.getClass()))
-      // CONSOLE and TXT reporting
-      .useStoryReporterBuilder(new StoryReporterBuilder().withDefaultFormats()
-      .withFormats(Format.ANSI_CONSOLE, Format.TXT));
-  }
-
-  // Here we specify the steps classes
-  @Override
-  public InjectableStepsFactory stepsFactory() {
-    // varargs, can have more that one steps classes
-    return new InstanceStepsFactory(configuration(), new GetPutSteps());
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/GetPutRunner.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/e2e/GetPutRunner.java 
b/geode-core/src/test/java/org/apache/geode/e2e/GetPutRunner.java
new file mode 100644
index 0000000..a9985d3
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/e2e/GetPutRunner.java
@@ -0,0 +1,7 @@
+package org.apache.geode.e2e;
+
+public class GetPutRunner extends StoryRunner {
+  protected String storyGlob() {
+    return "**/get_put.story";
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/GetPutSteps.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/e2e/GetPutSteps.java 
b/geode-core/src/test/java/org/apache/geode/e2e/GetPutSteps.java
index a457dcf..a4e1b5e 100644
--- a/geode-core/src/test/java/org/apache/geode/e2e/GetPutSteps.java
+++ b/geode-core/src/test/java/org/apache/geode/e2e/GetPutSteps.java
@@ -1,6 +1,8 @@
 package org.apache.geode.e2e;
 
-import static org.junit.Assert.assertEquals;
+import static com.sun.jmx.snmp.EnumRowStatus.destroy;
+import static org.bouncycastle.asn1.x500.style.RFC4519Style.member;
+import static org.junit.Assert.*;
 
 import java.io.IOException;
 import java.util.List;
@@ -65,18 +67,18 @@ public class GetPutSteps {
   }
 
   @When("I put $count entries into region $name")
-  public void when(int count, String name) throws Exception {
+  public void putEntries(int count, String name) throws Exception {
     ClientCache cache = getClientCache();
-    Region region = 
cache.createClientRegionFactory(ClientRegionShortcut.PROXY).create(name);
+    Region region = getOrCreateRegion(name);
     for (int i = 0; i < count; i++) {
       region.put("key_" + i, "value_" + i);
     }
   }
 
   @Then("I can get $count entries from region $name")
-  public void then(int count, String name) throws Exception {
+  public void getEntries(int count, String name) throws Exception {
     ClientCache cache = getClientCache();
-    Region region = cache.getRegion(name);
+    Region region = getOrCreateRegion(name);
 
     assertEquals(count, region.keySetOnServer().size());
     for (int i = 0; i < count; i++) {
@@ -92,15 +94,36 @@ public class GetPutSteps {
     }
   }
 
-  @When("I call function with id $fnId on region $regionName with argument 
$arg it returns $returns")
-  public void testRegionBucketSizeWithFunction(String fnId, String regionName, 
String arg, int returns) {
-    ClientCache cache = getClientCache();
-    Region region = cache.getRegion(regionName);
+  @When("I call function with id $fnId on region $regionName it returns 
$result")
+  public void testRegionBucketSizeWithFunction(String fnId, String regionName, 
int result) {
+    Region region = getOrCreateRegion(regionName);
     Execution exe = FunctionService.onServers(region.getRegionService());
     ResultCollector rs = exe.withArgs(regionName).execute(fnId);
     List<Integer> results = (List<Integer>) rs.getResult();
 
-    assertEquals(returns, results.stream().mapToInt(i -> i.intValue()).sum());
+    assertEquals(result, results.stream().mapToInt(i -> i.intValue()).sum());
+  }
+
+  @When("I export region $regionName")
+  public void exportRegion(String regionName) throws Exception {
+    for (String member : cluster.getServerMembers()) {
+      int rc = cluster.gfshCommand(String.format("export data --region=%1$s 
--file=export-%1$s.gfd --member=%2$s", regionName, member));
+      assertEquals(0, rc);
+    }
+  }
+
+  @Then("I import region $regionName")
+  public void importRegion(String regionName) throws Exception {
+    for (String member : cluster.getServerMembers()) {
+      int rc = cluster.gfshCommand(String.format("import data --region=%1$s 
--file=export-%1$s.gfd --member=%2$s", regionName, member));
+      assertEquals(0, rc);
+    }
+  }
+
+  @When("I destroy region $regionName")
+  public void destroyRegion(String regionName) throws Exception {
+    int rc = cluster.gfshCommand("destroy region --name=" + regionName);
+    assertEquals(0, rc);
   }
 
   private ClientCache getClientCache() {
@@ -120,5 +143,15 @@ public class GetPutSteps {
     return cache;
   }
 
+  private Region getOrCreateRegion(String name) {
+    ClientCache cache = getClientCache();
+
+    Region region = cache.getRegion(name);
+    if (region == null) {
+      region = 
cache.createClientRegionFactory(ClientRegionShortcut.PROXY).create(name);
+    }
+
+    return region;
+  }
 }
 

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/SnapshotRunner.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/e2e/SnapshotRunner.java 
b/geode-core/src/test/java/org/apache/geode/e2e/SnapshotRunner.java
new file mode 100644
index 0000000..95a63da
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/e2e/SnapshotRunner.java
@@ -0,0 +1,7 @@
+package org.apache.geode.e2e;
+
+public class SnapshotRunner extends StoryRunner {
+  protected String storyGlob() {
+    return "**/snapshot.story";
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/StoryRunner.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/e2e/StoryRunner.java 
b/geode-core/src/test/java/org/apache/geode/e2e/StoryRunner.java
index 21921cb..2e2832d 100644
--- a/geode-core/src/test/java/org/apache/geode/e2e/StoryRunner.java
+++ b/geode-core/src/test/java/org/apache/geode/e2e/StoryRunner.java
@@ -11,7 +11,6 @@ import org.jbehave.core.configuration.MostUsefulConfiguration;
 import org.jbehave.core.io.LoadFromClasspath;
 import org.jbehave.core.io.StoryFinder;
 import org.jbehave.core.junit.JUnitStories;
-import org.jbehave.core.junit.JUnitStory;
 import org.jbehave.core.reporters.Format;
 import org.jbehave.core.reporters.StoryReporterBuilder;
 import org.jbehave.core.steps.InjectableStepsFactory;
@@ -26,8 +25,15 @@ public class StoryRunner extends JUnitStories {
       // where to find the stories
       .useStoryLoader(new LoadFromClasspath(this.getClass()))
       // CONSOLE and TXT reporting
-      .useStoryReporterBuilder(new StoryReporterBuilder().withDefaultFormats()
-      .withFormats(Format.ANSI_CONSOLE, Format.TXT));
+      .useStoryReporterBuilder(new StoryReporterBuilder()
+        .withDefaultFormats()
+        .withFormats(Format.ANSI_CONSOLE, Format.TXT)
+        .withFailureTrace(true)
+      );
+  }
+
+  protected String storyGlob() {
+    return "**/*.story";
   }
 
   // Here we specify the steps classes
@@ -41,7 +47,7 @@ public class StoryRunner extends JUnitStories {
   protected List<String> storyPaths() {
     String codeLocation = codeLocationFromClass(this.getClass()).getFile() + 
"../../resources/test";
     List<String> stories = new ArrayList<>();
-    stories.add("**/*.story");
+    stories.add(storyGlob());
     return new StoryFinder().findPaths(codeLocation, stories, 
Collections.EMPTY_LIST);
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/Utils.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/e2e/Utils.java 
b/geode-core/src/test/java/org/apache/geode/e2e/Utils.java
new file mode 100644
index 0000000..9d9cd4d
--- /dev/null
+++ b/geode-core/src/test/java/org/apache/geode/e2e/Utils.java
@@ -0,0 +1,92 @@
+package org.apache.geode.e2e;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.UnsupportedEncodingException;
+import java.net.URLDecoder;
+import java.nio.charset.Charset;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+import org.apache.commons.io.IOUtils;
+
+public class Utils {
+
+  /**
+   * If the provided class has been loaded from a jar file that is on the 
local file system,
+   * will find the absolute path to that jar file. If the provided class is 
simply within a directory,
+   * the class will be zipped up into a temporary file which will be returned 
to the caller.
+   *
+   * @param clazz the name of the class for which we want to know it's 
location in the classpath
+   *
+   * @throws IllegalStateException If the specified class was loaded from a 
non-local location
+   * (such as via HTTP, from a database, or some other custom classloading 
device).
+   */
+  public static String getJarForClassName(String clazz)
+    throws IOException, IllegalStateException, ClassNotFoundException {
+    Class context = Class.forName(clazz);
+    String rawName = context.getName();
+    String classFileName;
+    // rawName is something like package.name.ContainingClass$ClassName.
+    // We need to turn this into ContainingClass$ClassName.class.
+    {
+      int idx = rawName.lastIndexOf('.');
+      classFileName = (idx == -1 ? rawName : rawName.substring(idx + 1)) + 
".class";
+    }
+
+    String uri = context.getResource(classFileName).toString();
+    if (uri.startsWith("file:")) {
+      return jarredClass(context.getPackage().getName(), 
uri.substring("file:".length()));
+    }
+    if (!uri.startsWith("jar:file:")) {
+      int idx = uri.indexOf(':');
+      String protocol = idx == -1 ? "(unknown)" : uri.substring(0, idx);
+      throw new IllegalStateException("This class has been loaded remotely via 
the " + protocol + " protocol. Only loading from a jar on the local file system 
is supported.");
+    }
+
+    int idx = uri.indexOf('!');
+    //As far as I know, the if statement below can't ever trigger, so it's 
more of a sanity check thing.
+    if (idx == -1) {
+      throw new IllegalStateException("You appear to have loaded this class 
from a local jar file, but I can't make sense of the URL! " + uri.toString());
+    }
+
+    try {
+      String fileName = URLDecoder.decode(uri.substring("jar:file:".length(), 
idx), Charset.defaultCharset().name());
+      return new File(fileName).getAbsolutePath();
+    } catch (UnsupportedEncodingException e) {
+      throw new InternalError("default charset doesn't exist. Your VM is 
borked.");
+    }
+  }
+
+  /**
+   * Used to temporarily jar up a class file. Given a file, create a jar 
containing on the file.
+   * The jar will be deleted on JVM exit.
+   * @param packageName the name of the package to which the given classes 
belong
+   * @param classFiles a vararg list of local class files, belonging to the 
given package
+   * @return a zip file containing the given URI
+   */
+  private static String jarredClass(String packageName, String... classFiles) 
throws IOException {
+    File tempFile = File.createTempFile("class-", ".zip");
+    tempFile.deleteOnExit();
+    ZipOutputStream zipfile = new ZipOutputStream(new 
FileOutputStream(tempFile));
+
+    String packagePath = packageName.replace(".", File.separator);
+
+    for (String file : classFiles) {
+      int idx = file.lastIndexOf("/");
+      String baseClassName = file.substring(idx + 1);
+      String zipFileName = packagePath + File.separator + baseClassName;
+      zipfile.putNextEntry(new ZipEntry(zipFileName));
+
+      InputStream fileInputStream = new FileInputStream(file);
+      IOUtils.copy(fileInputStream, zipfile);
+      fileInputStream.close();
+    }
+    zipfile.close();
+
+    return tempFile.getAbsolutePath();
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/java/org/apache/geode/e2e/container/DockerCluster.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/test/java/org/apache/geode/e2e/container/DockerCluster.java 
b/geode-core/src/test/java/org/apache/geode/e2e/container/DockerCluster.java
index fde5060..cc514a2 100644
--- a/geode-core/src/test/java/org/apache/geode/e2e/container/DockerCluster.java
+++ b/geode-core/src/test/java/org/apache/geode/e2e/container/DockerCluster.java
@@ -1,9 +1,7 @@
 package org.apache.geode.e2e.container;
 
 import static com.google.common.base.Charsets.*;
-import static org.apache.geode.internal.cache.CacheServerLauncher.serverPort;
 
-import java.io.File;
 import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;
@@ -36,7 +34,7 @@ public class DockerCluster {
   private int locatorPort;
   private final String geodeHome;
   private final String scratchDir;
-  private int containerCount = 0;
+  private final List<String> serverMembers;
 
   private static final String SCRATCH_DIR_BASENAME = "scratch";
 
@@ -46,15 +44,19 @@ public class DockerCluster {
       throw new IllegalStateException("GEODE_HOME environment variable is not 
set");
     }
 
-    Path scratch = Files.createDirectory(Paths.get(geodeHome, 
SCRATCH_DIR_BASENAME));
-    scratch.toFile().deleteOnExit();
-    scratchDir = scratch.toString();
+    Path scratchDir = Paths.get(geodeHome, SCRATCH_DIR_BASENAME);
+    if (! Files.exists(scratchDir)) {
+      scratchDir = Files.createDirectory(Paths.get(geodeHome, 
SCRATCH_DIR_BASENAME));
+    }
+    scratchDir.toFile().deleteOnExit();
+    this.scratchDir = scratchDir.toString();
 
     docker = DefaultDockerClient.builder().
       uri("unix:///var/run/docker.sock").build();
 
     this.name = name;
     this.nodeIds = new ArrayList<>();
+    this.serverMembers = new ArrayList<>();
   }
 
   public void setServerCount(int count) {
@@ -69,6 +71,10 @@ public class DockerCluster {
     return scratchDir;
   }
 
+  public List<String> getServerMembers() {
+    return serverMembers;
+  }
+
   /**
    * Given a file on the local host's filesystem, copy that file into the 
cluster's
    * global scratch area. The scratch file is deleted on exit.
@@ -181,6 +187,8 @@ public class DockerCluster {
 
       String id = startContainer(memberName, ports);
       execCommand(id, true, null, command);
+
+      serverMembers.add(memberName);
     }
 
     int runningServers = 0;

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/resources/org/apache/geode/e2e/get_put.story
----------------------------------------------------------------------
diff --git a/geode-core/src/test/resources/org/apache/geode/e2e/get_put.story 
b/geode-core/src/test/resources/org/apache/geode/e2e/get_put.story
index 0905390..b36a647 100644
--- a/geode-core/src/test/resources/org/apache/geode/e2e/get_put.story
+++ b/geode-core/src/test/resources/org/apache/geode/e2e/get_put.story
@@ -10,8 +10,9 @@ When I put 100 entries into region BAR
 Then I can get 100 entries from region BAR
 
 Given class org.apache.geode.e2e.FnGetPrimaryBucketSize is deployed
-When I call function with id org.apache.geode.e2e.FnGetPrimaryBucketSize on 
region BAR with argument BAZ it returns 100
+When I call function with id region-size on region BAR it returns 100
 
 Given server 0 is killed
 Then I can get 100 entries from region FOO
 Then I can get 100 entries from region BAR
+When I call function with id region-size on region BAR it returns 100

http://git-wip-us.apache.org/repos/asf/incubator-geode/blob/905003c7/geode-core/src/test/resources/org/apache/geode/e2e/snapshot.story
----------------------------------------------------------------------
diff --git a/geode-core/src/test/resources/org/apache/geode/e2e/snapshot.story 
b/geode-core/src/test/resources/org/apache/geode/e2e/snapshot.story
new file mode 100644
index 0000000..6a3ec48
--- /dev/null
+++ b/geode-core/src/test/resources/org/apache/geode/e2e/snapshot.story
@@ -0,0 +1,13 @@
+Scenario: Use gfsh to export and import data into Geode
+
+Given cluster is started with 1 locator and 3 servers
+Given region BAR is created as PARTITION_REDUNDANT with redundancy 1
+When I put 1000 entries into region BAR
+Then I can get 1000 entries from region BAR
+
+When I export region BAR
+When I destroy region BAR
+Given region BAR is created as PARTITION_REDUNDANT with redundancy 1
+Then I can get 0 entries from region BAR
+Then I import region BAR
+Then I can get 1000 entries from region BAR

Reply via email to