Repository: geode
Updated Branches:
  refs/heads/feature/GEODE-2267 c2fdea18a -> 6c141f9e1


GEODE-2414: Destroy export logs region after using it to avoid unnecessary 
network traffic


Project: http://git-wip-us.apache.org/repos/asf/geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/geode/commit/6c141f9e
Tree: http://git-wip-us.apache.org/repos/asf/geode/tree/6c141f9e
Diff: http://git-wip-us.apache.org/repos/asf/geode/diff/6c141f9e

Branch: refs/heads/feature/GEODE-2267
Commit: 6c141f9e1f6c086ddb7507418036efc449bcf22a
Parents: c2fdea1
Author: Jared Stewart <jstew...@pivotal.io>
Authored: Fri Feb 17 11:51:26 2017 -0800
Committer: Jared Stewart <jstew...@pivotal.io>
Committed: Fri Feb 17 11:51:26 2017 -0800

----------------------------------------------------------------------
 .../cli/commands/MiscellaneousCommands.java     |  14 +-
 .../cli/functions/ExportLogsFunction.java       |  18 ++-
 .../cli/util/ExportLogsCacheWriter.java         |   6 +-
 .../internal/cli/commands/ExportLogsDUnit.java  | 132 ++++++++++++++-----
 .../ExportLogsFunctionIntegrationTest.java      |  15 ++-
 ...xportLogsFunctionLocatorIntegrationTest.java |  44 -------
 .../apache/geode/test/dunit/rules/Member.java   |   2 +-
 7 files changed, 138 insertions(+), 93 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/geode/blob/6c141f9e/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
index ffbbc69..e720d09 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/MiscellaneousCommands.java
@@ -723,21 +723,24 @@ public class MiscellaneousCommands implements 
CommandMarker {
 
     try {
       GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
-      Region region = ExportLogsFunction.createOrGetExistingExportLogsRegion();
-
-      ExportLogsCacheWriter cacheWriter =
-          (ExportLogsCacheWriter) region.getAttributes().getCacheWriter();
 
       Set<DistributedMember> targetMembers = 
CliUtil.findMembersIncludingLocators(groups, memberIds);
 
+
       Map<String, Path> zipFilesFromMembers = new HashMap<>();
       for (DistributedMember server : targetMembers) {
+        Region region = 
ExportLogsFunction.createOrGetExistingExportLogsRegion(true);
+
+        ExportLogsCacheWriter cacheWriter =
+            (ExportLogsCacheWriter) region.getAttributes().getCacheWriter();
+
         cacheWriter.startFile(server.getName());
 
         CliUtil.executeFunction(new ExportLogsFunction(),
                 new ExportLogsFunction.Args(start, end, logLevel, 
onlyLogLevel), server)
             .getResult();
         Path zipFile = cacheWriter.endFile();
+        ExportLogsFunction.destroyExportLogsRegion();
         logger.info("Recieved zip file from member " + server.getId() + ": " + 
zipFile.toString());
         zipFilesFromMembers.put(server.getId(), zipFile);
       }
@@ -760,8 +763,11 @@ public class MiscellaneousCommands implements 
CommandMarker {
       FileUtils.deleteDirectory(tempDir.toFile());
       result = ResultBuilder.createInfoResult("File exported to: " + 
exportedLogsZipFile.toString());
     } catch (Exception ex) {
+      ex.printStackTrace();
       logger.error(ex, ex);
       result = ResultBuilder.createUserErrorResult(ex.getMessage());
+    } finally {
+      ExportLogsFunction.destroyExportLogsRegion();
     }
 
     LogWrapper.getInstance().fine("Exporting logs returning =" + result);

http://git-wip-us.apache.org/repos/asf/geode/blob/6c141f9e/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
index b562b7e..1dc89e9 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunction.java
@@ -68,7 +68,7 @@ public class ExportLogsFunction implements Function, 
InternalEntity {
       String memberId = cache.getDistributedSystem().getMemberId();
       LOGGER.info("ExportLogsFunction started for member {}", memberId);
 
-      Region exportLogsRegion = createOrGetExistingExportLogsRegion();
+      Region exportLogsRegion = createOrGetExistingExportLogsRegion(false);
 
       Args args = (Args) context.getArguments();
       LogFilter logFilter =
@@ -103,7 +103,7 @@ public class ExportLogsFunction implements Function, 
InternalEntity {
     return cache.getMyId().getVmKind() == LOCATOR_DM_TYPE;
   }
 
-  public static Region createOrGetExistingExportLogsRegion()
+  public static Region createOrGetExistingExportLogsRegion(boolean 
isInitiatingMember)
       throws IOException, ClassNotFoundException {
     GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
 
@@ -114,7 +114,7 @@ public class ExportLogsFunction implements Function, 
InternalEntity {
       regionAttrsFactory.setDataPolicy(DataPolicy.EMPTY);
       regionAttrsFactory.setScope(Scope.DISTRIBUTED_ACK);
 
-      if (isLocator(cache)) {
+      if (isInitiatingMember) {
         regionAttrsFactory.setCacheWriter(new ExportLogsCacheWriter());
       }
       InternalRegionArguments internalArgs = new InternalRegionArguments();
@@ -126,6 +126,18 @@ public class ExportLogsFunction implements Function, 
InternalEntity {
     return exportLogsRegion;
   }
 
+  public static void destroyExportLogsRegion() {
+    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+
+    Region exportLogsRegion = cache.getRegion(EXPORT_LOGS_REGION);
+    if (exportLogsRegion == null) {
+      return;
+    }
+
+      exportLogsRegion.destroyRegion();
+
+  }
+
   @Override
   public boolean isHA() {
     return false;

http://git-wip-us.apache.org/repos/asf/geode/blob/6c141f9e/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java
index e5457d4..a8b7225 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/util/ExportLogsCacheWriter.java
@@ -38,9 +38,11 @@ public class ExportLogsCacheWriter extends 
CacheWriterAdapter implements Seriali
 
   @Override
   public void beforeCreate(EntryEvent event) throws CacheWriterException {
+    if (currentFile.getFileName().endsWith("server-2.zip")) {
+      System.out.println("We got data from server 2");
+    }
     if (currentOutputStream == null) {
-      //If no OutputStream is open, then this file chunk is intended for a 
different locator
-      return;
+      throw new IllegalStateException("No outputStream is open.  You must call 
startFile before sending data.");
     }
 
     try {

http://git-wip-us.apache.org/repos/asf/geode/blob/6c141f9e/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
 
b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
index 41ebc0b..a405d0d 100644
--- 
a/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
+++ 
b/geode-core/src/test/java/org/apache/geode/management/internal/cli/commands/ExportLogsDUnit.java
@@ -19,14 +19,19 @@ package org.apache.geode.management.internal.cli.commands;
 import static java.util.stream.Collectors.joining;
 import static java.util.stream.Collectors.toList;
 import static java.util.stream.Collectors.toSet;
+import static 
org.apache.geode.management.internal.cli.commands.MiscellaneousCommands.FORMAT;
+import static 
org.apache.geode.management.internal.cli.commands.MiscellaneousCommands.ONLY_DATE_FORMAT;
 import static org.assertj.core.api.Assertions.assertThat;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.Region;
 import org.apache.geode.distributed.ConfigurationProperties;
+import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.management.internal.cli.functions.ExportLogsFunction;
 import org.apache.geode.management.internal.cli.result.CommandResult;
+import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
 import org.apache.geode.management.internal.configuration.utils.ZipUtils;
 import org.apache.geode.test.dunit.IgnoredException;
 import org.apache.geode.test.dunit.rules.GfshShellConnectionRule;
@@ -43,17 +48,21 @@ import java.io.File;
 import java.io.IOException;
 import java.io.Serializable;
 import java.nio.charset.Charset;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
 import java.util.Properties;
 import java.util.Set;
+import java.util.UUID;
 import java.util.stream.Stream;
 
 
 public class ExportLogsDUnit {
-
   private static final String ERROR_LOG_PREFIX = "[IGNORE]";
 
   @Rule
@@ -66,7 +75,7 @@ public class ExportLogsDUnit {
   private Server server1;
   private Server server2;
 
-  Map<Member, List<LogLine>> expectedMessages;
+  private Map<Member, List<LogLine>> defaultExpectedMessages;
 
   @Before
   public void setup() throws Exception {
@@ -79,14 +88,14 @@ public class ExportLogsDUnit {
 
     IgnoredException.addIgnoredException(ERROR_LOG_PREFIX);
 
-    expectedMessages = new HashMap<>();
-    expectedMessages.put(locator, listOfLogLines(locator.getName(), "info", 
"error", "debug"));
-    expectedMessages.put(server1, listOfLogLines(server1.getName(), "info", 
"error", "debug"));
-    expectedMessages.put(server2, listOfLogLines(server2.getName(), "info", 
"error", "debug"));
+    defaultExpectedMessages = new HashMap<>();
+    defaultExpectedMessages.put(locator, listOfLogLines(locator, "info", 
"error", "debug"));
+    defaultExpectedMessages.put(server1, listOfLogLines(server1, "info", 
"error", "debug"));
+    defaultExpectedMessages.put(server2, listOfLogLines(server2, "info", 
"error", "debug"));
 
     // log the messages in each of the members
-    for (Member member : expectedMessages.keySet()) {
-      List<LogLine> logLines = expectedMessages.get(member);
+    for (Member member : defaultExpectedMessages.keySet()) {
+      List<LogLine> logLines = defaultExpectedMessages.get(member);
 
       member.invoke(() -> {
         Logger logger = LogService.getLogger();
@@ -98,29 +107,52 @@ public class ExportLogsDUnit {
   }
 
   @Test
+  public void testExportWithStartDateFiltering() throws Exception {
+    ZonedDateTime cutoffTime = 
LocalDateTime.now().atZone(ZoneId.systemDefault());
+
+    String messageAfterCutoffTime = "[this message should not show up since it 
is after cutoffTime]";
+    LogLine logLineAfterCutoffTime = new LogLine(messageAfterCutoffTime, 
"info",  true);
+    server1.invoke(() -> {
+      Logger logger = LogService.getLogger();
+      logLineAfterCutoffTime.writeLog(logger);
+    });
+
+    DateTimeFormatter dateTimeFormatter =  DateTimeFormatter.ofPattern(FORMAT);
+    String cutoffTimeString = dateTimeFormatter.format(cutoffTime);
+
+    CommandStringBuilder commandStringBuilder = new 
CommandStringBuilder("export logs");
+    commandStringBuilder.addOption("end-time", cutoffTimeString);
+    commandStringBuilder.addOption("log-level", "debug");
+    commandStringBuilder.addOption("dir", "someDir");
+
+    gfshConnector.executeAndVerifyCommand(commandStringBuilder.toString());
+
+    defaultExpectedMessages.get(server1).add(logLineAfterCutoffTime);
+    Set<String> acceptedLogLevels = Stream.of("info", "error", 
"debug").collect(toSet());
+    verifyZipFileContents(acceptedLogLevels);
+  }
+
+  @Test
   public void testExportWithThresholdLogLevelFilter() throws Exception {
 
     CommandResult result = gfshConnector.executeAndVerifyCommand(
         "export logs --log-level=info --only-log-level=false --dir=" + 
lsRule.getTempFolder()
             .getRoot().getCanonicalPath());
 
-    File unzippedLogFileDir = unzipExportedLogs();
     Set<String> acceptedLogLevels = Stream.of("info", 
"error").collect(toSet());
-    verifyZipFileContents(unzippedLogFileDir, acceptedLogLevels);
+    verifyZipFileContents(acceptedLogLevels);
 
   }
 
-
   @Test
   public void testExportWithExactLogLevelFilter() throws Exception {
     CommandResult result = gfshConnector.executeAndVerifyCommand(
         "export logs --log-level=info --only-log-level=true --dir=" + 
lsRule.getTempFolder()
             .getRoot().getCanonicalPath());
 
-    File unzippedLogFileDir = unzipExportedLogs();
 
     Set<String> acceptedLogLevels = Stream.of("info").collect(toSet());
-    verifyZipFileContents(unzippedLogFileDir, acceptedLogLevels);
+    verifyZipFileContents(acceptedLogLevels);
   }
 
   @Test
@@ -128,34 +160,53 @@ public class ExportLogsDUnit {
     CommandResult result = gfshConnector.executeAndVerifyCommand(
         "export logs  --dir=" + "someDir" /*  
lsRule.getTempFolder().getRoot().getCanonicalPath() */);
 
-    File unzippedLogFileDir = unzipExportedLogs();
     Set<String> acceptedLogLevels = Stream.of("info", "error", 
"debug").collect(toSet());
-    verifyZipFileContents(unzippedLogFileDir, acceptedLogLevels);
+    verifyZipFileContents(acceptedLogLevels);
 
-    // Ensure export logs region does not accumulate data
-    server1.invoke(() -> {
-      Region exportLogsRegion = 
ExportLogsFunction.createOrGetExistingExportLogsRegion();
-      assertThat(exportLogsRegion.size()).isEqualTo(0);
+    // Ensure export logs region gets cleaned up
+    server1.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
+    server2.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
+    locator.invoke(ExportLogsDUnit::verifyExportLogsRegionWasDestroyed);
+  }
+
+@Test
+public void regionBehavesProperly() throws IOException, ClassNotFoundException 
{
+    locator.invoke(() -> {
+      ExportLogsFunction.createOrGetExistingExportLogsRegion(true);
+      Cache cache = GemFireCacheImpl.getInstance();
+      
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
     });
-    server2.invoke(() -> {
-      Region exportLogsRegion = 
ExportLogsFunction.createOrGetExistingExportLogsRegion();
-      assertThat(exportLogsRegion.size()).isEqualTo(0);
+
+    server1.invoke(() -> {
+      ExportLogsFunction.createOrGetExistingExportLogsRegion(false);
+      Cache cache = GemFireCacheImpl.getInstance();
+      
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
     });
+
     locator.invoke(() -> {
-      Region exportLogsRegion = 
ExportLogsFunction.createOrGetExistingExportLogsRegion();
-      assertThat(exportLogsRegion.size()).isEqualTo(0);
+      ExportLogsFunction.destroyExportLogsRegion();
+
+      Cache cache = GemFireCacheImpl.getInstance();
+      
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
     });
-  }
+
+    server1.invoke(() -> {
+      Cache cache = GemFireCacheImpl.getInstance();
+      
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
+    });
+}
 
 
-  public void verifyZipFileContents(File unzippedLogFileDir, Set<String> 
acceptedLogLevels)
+  public void verifyZipFileContents(Set<String> acceptedLogLevels)
       throws IOException {
+    File unzippedLogFileDir = unzipExportedLogs();
+
     Set<File> dirsFromZipFile =
         
Stream.of(unzippedLogFileDir.listFiles()).filter(File::isDirectory).collect(toSet());
-    assertThat(dirsFromZipFile).hasSize(expectedMessages.keySet().size());
+    
assertThat(dirsFromZipFile).hasSize(defaultExpectedMessages.keySet().size());
 
     Set<String> expectedDirNames =
-        
expectedMessages.keySet().stream().map(Member::getName).collect(toSet());
+        
defaultExpectedMessages.keySet().stream().map(Member::getName).collect(toSet());
     Set<String> actualDirNames = 
dirsFromZipFile.stream().map(File::getName).collect(toSet());
     assertThat(actualDirNames).isEqualTo(expectedDirNames);
 
@@ -169,7 +220,7 @@ public class ExportLogsDUnit {
       throws IOException {
 
     String memberName = dirForMember.getName();
-    Member member = expectedMessages.keySet().stream()
+    Member member = defaultExpectedMessages.keySet().stream()
         .filter((Member aMember) -> aMember.getName().equals(memberName))
         .findFirst()
         .get();
@@ -189,8 +240,8 @@ public class ExportLogsDUnit {
         FileUtils.readLines(logFileForMember, 
Charset.defaultCharset()).stream()
             .collect(joining("\n"));
 
-    for (LogLine logLine : expectedMessages.get(member)) {
-      boolean shouldExpectLogLine = acceptedLogLevels.contains(logLine.level);
+    for (LogLine logLine : defaultExpectedMessages.get(member)) {
+      boolean shouldExpectLogLine = acceptedLogLevels.contains(logLine.level) 
&& !logLine.shouldBeIgnoredDueToTimestamp;
 
       if (shouldExpectLogLine) {
         assertThat(logFileContents).contains(logLine.getMessage());
@@ -216,18 +267,29 @@ public class ExportLogsDUnit {
     return unzippedLogFileDir;
   }
 
-  private List<LogLine> listOfLogLines(String memberName, String... levels) {
-    return Stream.of(levels).map(level -> new LogLine(level, 
memberName)).collect(toList());
+  private List<LogLine> listOfLogLines(Member member, String... levels) {
+    return Stream.of(levels).map(level -> new LogLine(member, 
level)).collect(toList());
   }
 
+  private static void verifyExportLogsRegionWasDestroyed() {
+    Cache cache = GemFireCacheImpl.getInstance();
+    
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
+  }
 
   public static class LogLine implements Serializable {
     String level;
     String message;
+    boolean shouldBeIgnoredDueToTimestamp;
+
+    public LogLine(String message, String level, boolean 
shouldBeIgnoredDueToTimestamp) {
+      this.message = message;
+      this.level = level;
+      this.shouldBeIgnoredDueToTimestamp = shouldBeIgnoredDueToTimestamp;
+    }
 
-    public LogLine(String level, String memberName) {
+    public LogLine(Member member, String level) {
       this.level = level;
-      this.message = buildMessage(memberName);
+      this.message = buildMessage(member.getName());
     }
 
     public String getMessage() {

http://git-wip-us.apache.org/repos/asf/geode/blob/6c141f9e/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
 
b/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
index 43c7c43..abae1de 100644
--- 
a/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
+++ 
b/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionIntegrationTest.java
@@ -36,6 +36,7 @@ import org.junit.experimental.categories.Category;
 import org.junit.rules.TemporaryFolder;
 
 import java.io.File;
+import java.io.IOException;
 
 @Category(IntegrationTest.class)
 public class ExportLogsFunctionIntegrationTest {
@@ -84,21 +85,27 @@ public class ExportLogsFunctionIntegrationTest {
     if (resultSender.getThrowable() != null) {
       throw resultSender.getThrowable();
     }
+
+    Cache cache = GemFireCacheImpl.getInstance();
+    
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
   }
 
   @Test
   public void createOrGetExistingExportLogsRegionDoesNotBlowUp() throws 
Exception {
-    ExportLogsFunction.createOrGetExistingExportLogsRegion();
+    ExportLogsFunction.createOrGetExistingExportLogsRegion(false);
 
     Cache cache = GemFireCacheImpl.getInstance();
     
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
   }
 
   @Test
-  public void isLocatorReturnsFalseForServer() {
-    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+  public void destroyExportLogsRegionWorksAsExpectedForInitiatingMember() 
throws IOException, ClassNotFoundException {
+    ExportLogsFunction.createOrGetExistingExportLogsRegion(true);
+    Cache cache = GemFireCacheImpl.getInstance();
+    
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNotNull();
 
-    assertThat(ExportLogsFunction.isLocator(cache)).isFalse();
+    ExportLogsFunction.destroyExportLogsRegion();
+    
assertThat(cache.getRegion(ExportLogsFunction.EXPORT_LOGS_REGION)).isNull();
   }
 
 

http://git-wip-us.apache.org/repos/asf/geode/blob/6c141f9e/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionLocatorIntegrationTest.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionLocatorIntegrationTest.java
 
b/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionLocatorIntegrationTest.java
deleted file mode 100644
index 3500b7a..0000000
--- 
a/geode-core/src/test/java/org/apache/geode/management/internal/cli/functions/ExportLogsFunctionLocatorIntegrationTest.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.geode.management.internal.cli.functions;
-
-import static org.assertj.core.api.Assertions.assertThat;
-
-import org.apache.geode.internal.cache.GemFireCacheImpl;
-import org.apache.geode.test.dunit.rules.LocatorStarterRule;
-import org.apache.geode.test.junit.categories.IntegrationTest;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.experimental.categories.Category;
-
-import java.util.Properties;
-
-@Category(IntegrationTest.class)
-public class ExportLogsFunctionLocatorIntegrationTest {
-
-  @Rule
-  public LocatorStarterRule locatorStarterRule = new LocatorStarterRule( new 
Properties());
-
-  @Test
-  public void isLocatorReturnsTrueForLocators() {
-    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
-    assertThat(ExportLogsFunction.isLocator(cache)).isTrue();
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/geode/blob/6c141f9e/geode-core/src/test/java/org/apache/geode/test/dunit/rules/Member.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/test/java/org/apache/geode/test/dunit/rules/Member.java 
b/geode-core/src/test/java/org/apache/geode/test/dunit/rules/Member.java
index 7cc1eea..f993642 100644
--- a/geode-core/src/test/java/org/apache/geode/test/dunit/rules/Member.java
+++ b/geode-core/src/test/java/org/apache/geode/test/dunit/rules/Member.java
@@ -26,7 +26,7 @@ import java.io.Serializable;
  * A server or locator inside a DUnit {@link VM}.
  */
 public abstract class Member implements Serializable {
-  private VM vm;
+  private transient VM vm;
   private int port;
   private File workingDir;
   private String name;

Reply via email to