GEODE-2198: close and re-create the cache on a server when importing new 
cluster configuration

* When importing cluster config first check if there is any non-empty region
* close and re-create cache if no data exists when importing new cluster 
configuration
* put the acquire/release lock inside the ClusterConfigurationService instead 
of command execution strategy.


Project: http://git-wip-us.apache.org/repos/asf/geode/repo
Commit: http://git-wip-us.apache.org/repos/asf/geode/commit/6f418be1
Tree: http://git-wip-us.apache.org/repos/asf/geode/tree/6f418be1
Diff: http://git-wip-us.apache.org/repos/asf/geode/diff/6f418be1

Branch: refs/heads/develop
Commit: 6f418be119d72704215b73e13de902057b18ad4a
Parents: 0bfd2ea
Author: Jinmei Liao <[email protected]>
Authored: Fri Jan 6 12:38:23 2017 -0800
Committer: Jinmei Liao <[email protected]>
Committed: Thu Jan 19 10:32:24 2017 -0800

----------------------------------------------------------------------
 .../internal/ClusterConfigurationService.java   | 286 ++++++++-------
 .../geode/internal/cache/GemFireCacheImpl.java  |   6 +
 .../geode/management/cli/CliMetaData.java       |   8 +-
 .../CreateAlterDestroyRegionCommands.java       |   7 +-
 .../internal/cli/commands/DeployCommands.java   |   5 +-
 .../cli/commands/DiskStoreCommands.java         |  49 ++-
 ...xportImportClusterConfigurationCommands.java | 319 +++++++++++++++++
 ...ExportImportSharedConfigurationCommands.java | 270 ---------------
 .../internal/cli/commands/IndexCommands.java    |  15 +-
 .../internal/cli/commands/PDXCommands.java      |   2 +-
 .../internal/cli/commands/QueueCommands.java    |  15 +-
 .../internal/cli/commands/WanCommands.java      |  46 +--
 .../cli/remote/RemoteExecutionStrategy.java     |  52 +--
 .../functions/RecreateCacheFunction.java        |  50 +++
 .../RegionsWithDataOnServerFunction.java        |  43 +++
 .../extension/mock/MockExtensionCommands.java   |  17 +-
 ...erConfigurationServiceCommandsDUnitTest.java | 344 -------------------
 .../internal/configuration/ClusterConfig.java   |   3 +-
 .../configuration/ClusterConfigBaseTest.java    |   6 +-
 .../ClusterConfigImportDUnitTest.java           |  68 +++-
 .../ClusterConfigStartMemberDUnitTest.java      |   2 +-
 .../ClusterConfigWithSecurityDUnitTest.java     | 109 ++++++
 .../test/dunit/rules/ServerStarterRule.java     |   3 +
 .../codeAnalysis/sanctionedSerializables.txt    |  10 +-
 .../configuration/cluster_config_security.zip   | Bin 0 -> 950 bytes
 .../internal/cli/LuceneIndexCommands.java       |  26 +-
 .../cli/commands/CommandOverHttpDUnitTest.java  |   5 +-
 27 files changed, 850 insertions(+), 916 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/distributed/internal/ClusterConfigurationService.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/distributed/internal/ClusterConfigurationService.java
 
b/geode-core/src/main/java/org/apache/geode/distributed/internal/ClusterConfigurationService.java
index 1d4030a..95d1a5b 100644
--- 
a/geode-core/src/main/java/org/apache/geode/distributed/internal/ClusterConfigurationService.java
+++ 
b/geode-core/src/main/java/org/apache/geode/distributed/internal/ClusterConfigurationService.java
@@ -117,23 +117,6 @@ public class ClusterConfigurationService {
   private GemFireCacheImpl cache;
   private final DistributedLockService sharedConfigLockingService;
 
-  /**
-   * Gets or creates (if not created) shared configuration lock service
-   */
-  public static DistributedLockService 
getSharedConfigLockService(DistributedSystem ds) {
-    DistributedLockService sharedConfigDls =
-        DLockService.getServiceNamed(SHARED_CONFIG_LOCK_SERVICE_NAME);
-    try {
-      if (sharedConfigDls == null) {
-        sharedConfigDls = DLockService.create(SHARED_CONFIG_LOCK_SERVICE_NAME,
-            (InternalDistributedSystem) ds, true, true);
-      }
-    } catch (IllegalArgumentException e) {
-      return DLockService.getServiceNamed(SHARED_CONFIG_LOCK_SERVICE_NAME);
-    }
-    return sharedConfigDls;
-  }
-
   public ClusterConfigurationService(Cache cache) throws IOException {
     this.cache = (GemFireCacheImpl) cache;
     Properties properties = cache.getDistributedSystem().getProperties();
@@ -160,36 +143,57 @@ public class ClusterConfigurationService {
     status.set(SharedConfigurationStatus.NOT_STARTED);
   }
 
+  /**
+   * Gets or creates (if not created) shared configuration lock service
+   */
+  private DistributedLockService getSharedConfigLockService(DistributedSystem 
ds) {
+    DistributedLockService sharedConfigDls =
+        DLockService.getServiceNamed(SHARED_CONFIG_LOCK_SERVICE_NAME);
+    try {
+      if (sharedConfigDls == null) {
+        sharedConfigDls = DLockService.create(SHARED_CONFIG_LOCK_SERVICE_NAME,
+            (InternalDistributedSystem) ds, true, true);
+      }
+    } catch (IllegalArgumentException e) {
+      return DLockService.getServiceNamed(SHARED_CONFIG_LOCK_SERVICE_NAME);
+    }
+    return sharedConfigDls;
+  }
 
   /**
    * Adds/replaces the xml entity in the shared configuration we don't need to 
trigger the change
    * listener for this modification, so it's ok to operate on the original 
configuration object
    */
   public void addXmlEntity(XmlEntity xmlEntity, String[] groups) {
-    Region<String, Configuration> configRegion = getConfigurationRegion();
-    if (groups == null || groups.length == 0) {
-      groups = new String[] {ClusterConfigurationService.CLUSTER_CONFIG};
-    }
-    for (String group : groups) {
-      Configuration configuration = (Configuration) configRegion.get(group);
-      if (configuration == null) {
-        configuration = new Configuration(group);
-      }
-      String xmlContent = configuration.getCacheXmlContent();
-      if (xmlContent == null || xmlContent.isEmpty()) {
-        StringWriter sw = new StringWriter();
-        PrintWriter pw = new PrintWriter(sw);
-        CacheXmlGenerator.generateDefault(pw);
-        xmlContent = sw.toString();
+    lockSharedConfiguration();
+    try {
+      Region<String, Configuration> configRegion = getConfigurationRegion();
+      if (groups == null || groups.length == 0) {
+        groups = new String[] {ClusterConfigurationService.CLUSTER_CONFIG};
       }
-      try {
-        final Document doc = 
XmlUtils.createAndUpgradeDocumentFromXml(xmlContent);
-        XmlUtils.addNewNode(doc, xmlEntity);
-        configuration.setCacheXmlContent(XmlUtils.prettyXml(doc));
-        configRegion.put(group, configuration);
-      } catch (Exception e) {
-        logger.error("error updating cluster configuration for group " + 
group, e);
+      for (String group : groups) {
+        Configuration configuration = (Configuration) configRegion.get(group);
+        if (configuration == null) {
+          configuration = new Configuration(group);
+        }
+        String xmlContent = configuration.getCacheXmlContent();
+        if (xmlContent == null || xmlContent.isEmpty()) {
+          StringWriter sw = new StringWriter();
+          PrintWriter pw = new PrintWriter(sw);
+          CacheXmlGenerator.generateDefault(pw);
+          xmlContent = sw.toString();
+        }
+        try {
+          final Document doc = 
XmlUtils.createAndUpgradeDocumentFromXml(xmlContent);
+          XmlUtils.addNewNode(doc, xmlEntity);
+          configuration.setCacheXmlContent(XmlUtils.prettyXml(doc));
+          configRegion.put(group, configuration);
+        } catch (Exception e) {
+          logger.error("error updating cluster configuration for group " + 
group, e);
+        }
       }
+    } finally {
+      unlockSharedConfiguration();
     }
   }
 
@@ -197,66 +201,76 @@ public class ClusterConfigurationService {
    * Deletes the xml entity from the shared configuration.
    */
   public void deleteXmlEntity(final XmlEntity xmlEntity, String[] groups) {
-    Region<String, Configuration> configRegion = getConfigurationRegion();
-    // No group is specified, so delete in every single group if it exists.
-    if (groups == null) {
-      Set<String> groupSet = configRegion.keySet();
-      groups = groupSet.toArray(new String[groupSet.size()]);
-    }
-    for (String group : groups) {
-      Configuration configuration = (Configuration) configRegion.get(group);
-      if (configuration != null) {
-        String xmlContent = configuration.getCacheXmlContent();
-        try {
-          if (xmlContent != null && !xmlContent.isEmpty()) {
-            Document doc = 
XmlUtils.createAndUpgradeDocumentFromXml(xmlContent);
-            XmlUtils.deleteNode(doc, xmlEntity);
-            configuration.setCacheXmlContent(XmlUtils.prettyXml(doc));
-            configRegion.put(group, configuration);
+    lockSharedConfiguration();
+    try {
+      Region<String, Configuration> configRegion = getConfigurationRegion();
+      // No group is specified, so delete in every single group if it exists.
+      if (groups == null) {
+        Set<String> groupSet = configRegion.keySet();
+        groups = groupSet.toArray(new String[groupSet.size()]);
+      }
+      for (String group : groups) {
+        Configuration configuration = (Configuration) configRegion.get(group);
+        if (configuration != null) {
+          String xmlContent = configuration.getCacheXmlContent();
+          try {
+            if (xmlContent != null && !xmlContent.isEmpty()) {
+              Document doc = 
XmlUtils.createAndUpgradeDocumentFromXml(xmlContent);
+              XmlUtils.deleteNode(doc, xmlEntity);
+              configuration.setCacheXmlContent(XmlUtils.prettyXml(doc));
+              configRegion.put(group, configuration);
+            }
+          } catch (Exception e) {
+            logger.error("error updating cluster configuration for group " + 
group, e);
           }
-        } catch (Exception e) {
-          logger.error("error updating cluster configuration for group " + 
group, e);
         }
       }
+    } finally {
+      unlockSharedConfiguration();
     }
   }
 
   // we don't need to trigger the change listener for this modification, so 
it's ok to
   // operate on the original configuration object
   public void modifyXmlAndProperties(Properties properties, XmlEntity 
xmlEntity, String[] groups) {
-    if (groups == null) {
-      groups = new String[] {ClusterConfigurationService.CLUSTER_CONFIG};
-    }
-    Region<String, Configuration> configRegion = getConfigurationRegion();
-    for (String group : groups) {
-      Configuration configuration = configRegion.get(group);
-      if (configuration == null) {
-        configuration = new Configuration(group);
+    lockSharedConfiguration();
+    try {
+      if (groups == null) {
+        groups = new String[] {ClusterConfigurationService.CLUSTER_CONFIG};
       }
-
-      if (xmlEntity != null) {
-        String xmlContent = configuration.getCacheXmlContent();
-        if (xmlContent == null || xmlContent.isEmpty()) {
-          StringWriter sw = new StringWriter();
-          PrintWriter pw = new PrintWriter(sw);
-          CacheXmlGenerator.generateDefault(pw);
-          xmlContent = sw.toString();
+      Region<String, Configuration> configRegion = getConfigurationRegion();
+      for (String group : groups) {
+        Configuration configuration = configRegion.get(group);
+        if (configuration == null) {
+          configuration = new Configuration(group);
         }
-        try {
-          Document doc = XmlUtils.createAndUpgradeDocumentFromXml(xmlContent);
-          // Modify the cache attributes
-          XmlUtils.modifyRootAttributes(doc, xmlEntity);
-          // Change the xml content of the configuration and put it the config 
region
-          configuration.setCacheXmlContent(XmlUtils.prettyXml(doc));
-        } catch (Exception e) {
-          logger.error("error updating cluster configuration for group " + 
group, e);
+
+        if (xmlEntity != null) {
+          String xmlContent = configuration.getCacheXmlContent();
+          if (xmlContent == null || xmlContent.isEmpty()) {
+            StringWriter sw = new StringWriter();
+            PrintWriter pw = new PrintWriter(sw);
+            CacheXmlGenerator.generateDefault(pw);
+            xmlContent = sw.toString();
+          }
+          try {
+            Document doc = 
XmlUtils.createAndUpgradeDocumentFromXml(xmlContent);
+            // Modify the cache attributes
+            XmlUtils.modifyRootAttributes(doc, xmlEntity);
+            // Change the xml content of the configuration and put it the 
config region
+            configuration.setCacheXmlContent(XmlUtils.prettyXml(doc));
+          } catch (Exception e) {
+            logger.error("error updating cluster configuration for group " + 
group, e);
+          }
         }
-      }
 
-      if (properties != null) {
-        configuration.getGemfireProperties().putAll(properties);
+        if (properties != null) {
+          configuration.getGemfireProperties().putAll(properties);
+        }
+        configRegion.put(group, configuration);
       }
-      configRegion.put(group, configuration);
+    } finally {
+      unlockSharedConfiguration();
     }
   }
 
@@ -269,6 +283,7 @@ public class ClusterConfigurationService {
    */
   public boolean addJarsToThisLocator(String[] jarNames, byte[][] jarBytes, 
String[] groups) {
     boolean success = true;
+    lockSharedConfiguration();
     try {
       if (groups == null) {
         groups = new String[] {ClusterConfigurationService.CLUSTER_CONFIG};
@@ -304,28 +319,13 @@ public class ClusterConfigurationService {
     } catch (Exception e) {
       success = false;
       logger.info(e.getMessage(), e);
+    } finally {
+      unlockSharedConfiguration();
     }
     return success;
   }
 
   /**
-   * read the jar bytes in the file system
-   */
-  // used when creating cluster config response
-  // and used when uploading the jars to another locator
-  public byte[] getJarBytesFromThisLocator(String group, String jarName) 
throws Exception {
-    Configuration configuration = getConfiguration(group);
-
-    File jar = getPathToJarOnThisLocator(group, jarName).toFile();
-
-    if (configuration == null || 
!configuration.getJarNames().contains(jarName) || !jar.exists()) {
-      return null;
-    }
-
-    return FileUtils.readFileToByteArray(jar);
-  }
-
-  /**
    * Removes the jar files from the shared configuration. used when undeploy 
jars
    *
    * @param jarNames Names of the jar files.
@@ -334,6 +334,7 @@ public class ClusterConfigurationService {
    */
   public boolean removeJars(final String[] jarNames, String[] groups) {
     boolean success = true;
+    lockSharedConfiguration();
     try {
       Region<String, Configuration> configRegion = getConfigurationRegion();
       if (groups == null) {
@@ -351,10 +352,29 @@ public class ClusterConfigurationService {
     } catch (Exception e) {
       logger.info("Exception occurred while deleting the jar files", e);
       success = false;
+    } finally {
+      unlockSharedConfiguration();
     }
     return success;
   }
 
+  /**
+   * read the jar bytes in the file system
+   */
+  // used when creating cluster config response
+  // and used when uploading the jars to another locator
+  public byte[] getJarBytesFromThisLocator(String group, String jarName) 
throws Exception {
+    Configuration configuration = getConfiguration(group);
+
+    File jar = getPathToJarOnThisLocator(group, jarName).toFile();
+
+    if (configuration == null || 
!configuration.getJarNames().contains(jarName) || !jar.exists()) {
+      return null;
+    }
+
+    return FileUtils.readFileToByteArray(jar);
+  }
+
   // used in the cluster config change listener when jarnames are changed in 
the internal region
   public void downloadJarFromOtherLocators(String groupName, String jarName) 
throws Exception {
     logger.info("Getting Jar files from other locators");
@@ -375,7 +395,6 @@ public class ClusterConfigurationService {
     FileUtils.writeByteArrayToFile(jarToWrite, jarBytes);
   }
 
-
   // used when creating cluster config response
   public Map<String, byte[]> getAllJarsFromThisLocator(Set<String> groups) 
throws Exception {
     Map<String, byte[]> jarNamesToJarBytes = new HashMap<>();
@@ -396,11 +415,6 @@ public class ClusterConfigurationService {
     return jarNamesToJarBytes;
   }
 
-  public void clearSharedConfiguration() throws Exception {
-    Region<String, Configuration> configRegion = getConfigurationRegion();
-    configRegion.clear();
-  }
-
   /**
    * Creates the shared configuration service
    * 
@@ -411,19 +425,18 @@ public class ClusterConfigurationService {
     status.set(SharedConfigurationStatus.STARTED);
     Region<String, Configuration> configRegion = this.getConfigurationRegion();
     lockSharedConfiguration();
-
     try {
       if (loadSharedConfigFromDir) {
         logger.info("Reading cluster configuration from '{}' directory",
             ClusterConfigurationService.CLUSTER_CONFIG_ARTIFACTS_DIR_NAME);
-        this.loadSharedConfigurationFromDisk();
+        loadSharedConfigurationFromDisk();
       } else {
-        putSecurityPropsIntoClusterConfig(configRegion);
+        persistSecuritySettings(configRegion);
+        // for those groups that have jar files, need to download the jars 
from other locators
+        // if it doesn't exist yet
         Set<String> groups = configRegion.keySet();
         for (String group : groups) {
           Configuration config = configRegion.get(group);
-          // for those groups that have jar files, need to download the jars 
from other locators
-          // if it doesn't exist yet
           for (String jar : config.getJarNames()) {
             if (!(getPathToJarOnThisLocator(group, jar).toFile()).exists()) {
               downloadJarFromOtherLocators(group, jar);
@@ -438,8 +451,9 @@ public class ClusterConfigurationService {
     status.set(SharedConfigurationStatus.RUNNING);
   }
 
-  private void putSecurityPropsIntoClusterConfig(final Region<String, 
Configuration> configRegion) {
+  private void persistSecuritySettings(final Region<String, Configuration> 
configRegion) {
     Properties securityProps = 
cache.getDistributedSystem().getSecurityProperties();
+
     Configuration clusterPropertiesConfig =
         configRegion.get(ClusterConfigurationService.CLUSTER_CONFIG);
     if (clusterPropertiesConfig == null) {
@@ -448,6 +462,7 @@ public class ClusterConfigurationService {
     }
     // put security-manager and security-post-processor in the cluster config
     Properties clusterProperties = 
clusterPropertiesConfig.getGemfireProperties();
+
     if (securityProps.containsKey(SECURITY_MANAGER)) {
       clusterProperties.setProperty(SECURITY_MANAGER, 
securityProps.getProperty(SECURITY_MANAGER));
     }
@@ -585,20 +600,28 @@ public class ClusterConfigurationService {
    * Loads the internal region with the configuration in the configDirPath
    */
   public void loadSharedConfigurationFromDisk() throws Exception {
+    lockSharedConfiguration();
     File[] groupNames =
         new File(configDirPath).listFiles((FileFilter) 
DirectoryFileFilter.INSTANCE);
     Map<String, Configuration> sharedConfiguration = new HashMap<String, 
Configuration>();
 
-    for (File groupName : groupNames) {
-      Configuration configuration = readConfiguration(groupName);
-      sharedConfiguration.put(groupName.getName(), configuration);
-    }
-
-    getConfigurationRegion().clear();
-    getConfigurationRegion().putAll(sharedConfiguration);
-  }
+    try {
+      for (File groupName : groupNames) {
+        Configuration configuration = readConfiguration(groupName);
+        sharedConfiguration.put(groupName.getName(), configuration);
+      }
+      Region clusterRegion = getConfigurationRegion();
+      clusterRegion.clear();
+      clusterRegion.putAll(sharedConfiguration);
 
+      // Overwrite the security settings using the locator's properties, 
ignoring whatever
+      // in the import
+      persistSecuritySettings(clusterRegion);
 
+    } finally {
+      unlockSharedConfiguration();
+    }
+  }
 
   public void renameExistingSharedConfigDirectory() {
     File configDirFile = new File(configDirPath);
@@ -616,7 +639,7 @@ public class ClusterConfigurationService {
 
 
   // Write the content of xml and properties into the file system for 
exporting purpose
-  public void writeConfig(final Configuration configuration) throws Exception {
+  public void writeConfigToFile(final Configuration configuration) throws 
Exception {
     File configDir = createConfigDirIfNecessary(configuration.getConfigName());
 
     File propsFile = new File(configDir, 
configuration.getPropertiesFileName());
@@ -745,4 +768,21 @@ public class ClusterConfigurationService {
 
     return configDir;
   }
+
+  // check if it's ok from populate the properties from one member to another
+  public static boolean isMisConfigured(Properties fromProps, Properties 
toProps, String key) {
+    String fromPropValue = fromProps.getProperty(key);
+    String toPropValue = toProps.getProperty(key);
+
+    // if this to prop is not specified, this is always OK.
+    if (org.apache.commons.lang.StringUtils.isBlank(toPropValue))
+      return false;
+
+    // to props is not blank, but from props is blank, NOT OK.
+    if (org.apache.commons.lang.StringUtils.isBlank(fromPropValue))
+      return true;
+
+    // at this point check for eqality
+    return !fromPropValue.equals(toPropValue);
+  }
 }

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
 
b/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
index 795164d..6e374ec 100755
--- 
a/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
+++ 
b/geode-core/src/main/java/org/apache/geode/internal/cache/GemFireCacheImpl.java
@@ -783,6 +783,8 @@ public class GemFireCacheImpl
     return basicCreate(system, existingOk, cacheConfig, null, false, 
ASYNC_EVENT_LISTENERS, null);
   }
 
+
+
   private static GemFireCacheImpl basicCreate(DistributedSystem system, 
boolean existingOk,
       CacheConfig cacheConfig, PoolFactory pf, boolean isClient, boolean 
asyncEventListeners,
       TypeRegistry typeRegistry) throws CacheExistsException, TimeoutException,
@@ -1982,6 +1984,10 @@ public class GemFireCacheImpl
     close(false);
   }
 
+  public void close(String reason, boolean keepalive, boolean keepDS) {
+    close(reason, null, keepalive, keepDS);
+  }
+
   public void close(boolean keepalive) {
     close("Normal disconnect", null, keepalive, false);
   }

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/cli/CliMetaData.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/cli/CliMetaData.java 
b/geode-core/src/main/java/org/apache/geode/management/cli/CliMetaData.java
index 2e272fc..a20fba5 100644
--- a/geode-core/src/main/java/org/apache/geode/management/cli/CliMetaData.java
+++ b/geode-core/src/main/java/org/apache/geode/management/cli/CliMetaData.java
@@ -14,13 +14,13 @@
  */
 package org.apache.geode.management.cli;
 
+import org.apache.geode.management.internal.cli.CliAroundInterceptor;
+
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-import org.apache.geode.management.internal.cli.CliAroundInterceptor;
-
 /**
  * An annotation to define additional meta-data for commands.
  *
@@ -49,10 +49,6 @@ public @interface CliMetaData {
    */
   boolean isPersisted() default false;
 
-  boolean readsSharedConfiguration() default false;
-
-  boolean writesToSharedConfiguration() default false;
-
   /** In help, topics that are related to this command **/
   String[] relatedTopic() default 
org.apache.geode.management.internal.cli.i18n.CliStrings.DEFAULT_TOPIC_GEODE;
 

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CreateAlterDestroyRegionCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CreateAlterDestroyRegionCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CreateAlterDestroyRegionCommands.java
index 7df4112..e8173f1 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CreateAlterDestroyRegionCommands.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CreateAlterDestroyRegionCommands.java
@@ -109,7 +109,7 @@ public class CreateAlterDestroyRegionCommands extends 
AbstractCommandsSupport {
   }
 
   @CliCommand(value = CliStrings.CREATE_REGION, help = 
CliStrings.CREATE_REGION__HELP)
-  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_REGION, 
writesToSharedConfiguration = true)
+  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_REGION)
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public Result createRegion(
       @CliOption(key = CliStrings.CREATE_REGION__REGION, mandatory = true,
@@ -417,7 +417,7 @@ public class CreateAlterDestroyRegionCommands extends 
AbstractCommandsSupport {
   }
 
   @CliCommand(value = CliStrings.ALTER_REGION, help = 
CliStrings.ALTER_REGION__HELP)
-  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_REGION, 
writesToSharedConfiguration = true)
+  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_REGION)
   public Result alterRegion(
       @CliOption(key = CliStrings.ALTER_REGION__REGION, mandatory = true,
           unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,
@@ -1041,8 +1041,7 @@ public class CreateAlterDestroyRegionCommands extends 
AbstractCommandsSupport {
   }
 
   @CliCommand(value = {CliStrings.DESTROY_REGION}, help = 
CliStrings.DESTROY_REGION__HELP)
-  @CliMetaData(shellOnly = false, relatedTopic = CliStrings.TOPIC_GEODE_REGION,
-      writesToSharedConfiguration = true)
+  @CliMetaData(shellOnly = false, relatedTopic = CliStrings.TOPIC_GEODE_REGION)
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public Result destroyRegion(
       @CliOption(key = CliStrings.DESTROY_REGION__REGION, optionContext = 
ConverterHint.REGIONPATH,

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DeployCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DeployCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DeployCommands.java
index f076cec..a6b4575 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DeployCommands.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DeployCommands.java
@@ -38,7 +38,6 @@ import 
org.apache.geode.management.internal.security.ResourceOperation;
 import org.apache.geode.security.NotAuthorizedException;
 import org.apache.geode.security.ResourcePermission.Operation;
 import org.apache.geode.security.ResourcePermission.Resource;
-import org.springframework.shell.core.CommandMarker;
 import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.core.annotation.CliOption;
@@ -75,7 +74,7 @@ public final class DeployCommands extends 
AbstractCommandsSupport {
   @CliCommand(value = {CliStrings.DEPLOY}, help = CliStrings.DEPLOY__HELP)
   @CliMetaData(
       interceptor = 
"org.apache.geode.management.internal.cli.commands.DeployCommands$Interceptor",
-      relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG}, 
writesToSharedConfiguration = true)
+      relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
   public final Result deploy(
       @CliOption(key = {CliStrings.DEPLOY__GROUP}, help = 
CliStrings.DEPLOY__GROUP__HELP,
           optionContext = ConverterHint.MEMBERGROUP) @CliMetaData(
@@ -166,7 +165,7 @@ public final class DeployCommands extends 
AbstractCommandsSupport {
    * @return The result of the attempt to undeploy
    */
   @CliCommand(value = {CliStrings.UNDEPLOY}, help = CliStrings.UNDEPLOY__HELP)
-  @CliMetaData(relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG}, 
writesToSharedConfiguration = true)
+  @CliMetaData(relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public final Result undeploy(
       @CliOption(key = {CliStrings.UNDEPLOY__GROUP}, help = 
CliStrings.UNDEPLOY__GROUP__HELP,

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DiskStoreCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DiskStoreCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DiskStoreCommands.java
index 14114cf..ab8d02a 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DiskStoreCommands.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/DiskStoreCommands.java
@@ -14,26 +14,6 @@
  */
 package org.apache.geode.management.internal.cli.commands;
 
-import java.io.BufferedReader;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.PrintStream;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicReference;
-
 import org.apache.geode.GemFireIOException;
 import org.apache.geode.SystemFailure;
 import org.apache.geode.admin.BackupStatus;
@@ -55,13 +35,11 @@ import org.apache.geode.internal.cache.DiskStoreImpl;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.internal.cache.execute.AbstractExecution;
 import org.apache.geode.internal.cache.partitioned.ColocatedRegionDetails;
-import org.apache.geode.internal.cache.persistence.PersistentMemberID;
 import org.apache.geode.internal.cache.persistence.PersistentMemberPattern;
 import org.apache.geode.internal.lang.ClassUtils;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.management.DistributedSystemMXBean;
 import org.apache.geode.management.ManagementService;
-import org.apache.geode.management.PersistentMemberDetails;
 import org.apache.geode.management.cli.CliMetaData;
 import org.apache.geode.management.cli.ConverterHint;
 import org.apache.geode.management.cli.Result;
@@ -95,11 +73,30 @@ import 
org.apache.geode.management.internal.messages.CompactRequest;
 import org.apache.geode.management.internal.security.ResourceOperation;
 import org.apache.geode.security.ResourcePermission.Operation;
 import org.apache.geode.security.ResourcePermission.Resource;
-
 import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.core.annotation.CliOption;
 
+import java.io.BufferedReader;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.PrintStream;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+
 
 /**
  * The DiskStoreCommands class encapsulates all GemFire Disk Store commands in 
Gfsh.
@@ -293,8 +290,7 @@ public class DiskStoreCommands extends 
AbstractCommandsSupport {
   }
 
   @CliCommand(value = CliStrings.CREATE_DISK_STORE, help = 
CliStrings.CREATE_DISK_STORE__HELP)
-  @CliMetaData(shellOnly = false, relatedTopic = 
{CliStrings.TOPIC_GEODE_DISKSTORE},
-      writesToSharedConfiguration = true)
+  @CliMetaData(shellOnly = false, relatedTopic = 
{CliStrings.TOPIC_GEODE_DISKSTORE})
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public Result createDiskStore(
       @CliOption(key = CliStrings.CREATE_DISK_STORE__NAME, mandatory = true,
@@ -1434,8 +1430,7 @@ public class DiskStoreCommands extends 
AbstractCommandsSupport {
   }
 
   @CliCommand(value = CliStrings.DESTROY_DISK_STORE, help = 
CliStrings.DESTROY_DISK_STORE__HELP)
-  @CliMetaData(shellOnly = false, relatedTopic = 
{CliStrings.TOPIC_GEODE_DISKSTORE},
-      writesToSharedConfiguration = true)
+  @CliMetaData(shellOnly = false, relatedTopic = 
{CliStrings.TOPIC_GEODE_DISKSTORE})
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public Result destroyDiskStore(
       @CliOption(key = CliStrings.DESTROY_DISK_STORE__NAME, mandatory = true,

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportClusterConfigurationCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportClusterConfigurationCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportClusterConfigurationCommands.java
new file mode 100644
index 0000000..5e45f3b
--- /dev/null
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportClusterConfigurationCommands.java
@@ -0,0 +1,319 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software 
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
+ * or implied. See the License for the specific language governing permissions 
and limitations under
+ * the License.
+ */
+package org.apache.geode.management.internal.cli.commands;
+
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toSet;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.geode.cache.execute.ResultCollector;
+import org.apache.geode.distributed.DistributedMember;
+import org.apache.geode.distributed.internal.ClusterConfigurationService;
+import org.apache.geode.distributed.internal.InternalLocator;
+import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.lang.StringUtils;
+import org.apache.geode.internal.logging.LogService;
+import org.apache.geode.management.cli.CliMetaData;
+import org.apache.geode.management.cli.Result;
+import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor;
+import org.apache.geode.management.internal.cli.CliUtil;
+import org.apache.geode.management.internal.cli.GfshParseResult;
+import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
+import org.apache.geode.management.internal.cli.i18n.CliStrings;
+import org.apache.geode.management.internal.cli.remote.CommandExecutionContext;
+import org.apache.geode.management.internal.cli.result.ErrorResultData;
+import org.apache.geode.management.internal.cli.result.FileResult;
+import org.apache.geode.management.internal.cli.result.InfoResultData;
+import org.apache.geode.management.internal.cli.result.ResultBuilder;
+import org.apache.geode.management.internal.configuration.domain.Configuration;
+import 
org.apache.geode.management.internal.configuration.functions.RecreateCacheFunction;
+import 
org.apache.geode.management.internal.configuration.functions.RegionsWithDataOnServerFunction;
+import org.apache.geode.management.internal.configuration.utils.ZipUtils;
+import org.apache.geode.management.internal.security.ResourceOperation;
+import org.apache.geode.security.ResourcePermission.Operation;
+import org.apache.geode.security.ResourcePermission.Resource;
+import org.apache.logging.log4j.Logger;
+import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
+import org.springframework.shell.core.annotation.CliCommand;
+import org.springframework.shell.core.annotation.CliOption;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/****
+ * Commands for the cluster configuration
+ *
+ */
+@SuppressWarnings("unused")
+public class ExportImportClusterConfigurationCommands extends 
AbstractCommandsSupport {
+  @CliCommand(value = {CliStrings.EXPORT_SHARED_CONFIG},
+      help = CliStrings.EXPORT_SHARED_CONFIG__HELP)
+  @CliMetaData(
+      interceptor = 
"org.apache.geode.management.internal.cli.commands.ExportImportClusterConfigurationCommands$ExportInterceptor",
+      relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
+  @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
+  public Result exportSharedConfig(@CliOption(key = 
{CliStrings.EXPORT_SHARED_CONFIG__FILE},
+      mandatory = true, help = CliStrings.EXPORT_SHARED_CONFIG__FILE__HELP) 
String zipFileName,
+
+      @CliOption(key = {CliStrings.EXPORT_SHARED_CONFIG__DIR},
+          help = CliStrings.EXPORT_SHARED_CONFIG__DIR__HELP) String dir) {
+
+    InternalLocator locator = InternalLocator.getLocator();
+    if (!locator.isSharedConfigurationRunning()) {
+      return 
ResultBuilder.createGemFireErrorResult(CliStrings.SHARED_CONFIGURATION_NOT_STARTED);
+    }
+
+    ClusterConfigurationService sc = locator.getSharedConfiguration();
+    File zipFile = new File(zipFileName);
+    zipFile.getParentFile().mkdirs();
+
+    Result result;
+    try {
+      for (Configuration config : sc.getEntireConfiguration().values()) {
+        sc.writeConfigToFile(config);
+      }
+      ZipUtils.zip(sc.getSharedConfigurationDirPath(), 
zipFile.getCanonicalPath());
+
+      InfoResultData infoData = ResultBuilder.createInfoResultData();
+      byte[] byteData = FileUtils.readFileToByteArray(zipFile);
+      infoData.addAsFile(zipFileName, byteData, 
InfoResultData.FILE_TYPE_BINARY,
+          CliStrings.EXPORT_SHARED_CONFIG__DOWNLOAD__MSG, false);
+      result = ResultBuilder.buildResult(infoData);
+    } catch (Exception e) {
+      ErrorResultData errorData = ResultBuilder.createErrorResultData();
+      errorData.addLine("Export failed");
+      logSevere(e);
+      result = ResultBuilder.buildResult(errorData);
+    } finally {
+      zipFile.delete();
+    }
+
+    return result;
+  }
+
+
+  @CliCommand(value = {CliStrings.IMPORT_SHARED_CONFIG},
+      help = CliStrings.IMPORT_SHARED_CONFIG__HELP)
+  @CliMetaData(
+      interceptor = 
"org.apache.geode.management.internal.cli.commands.ExportImportClusterConfigurationCommands$ImportInterceptor",
+      relatedTopic = {CliStrings.TOPIC_GEODE_CONFIG})
+  @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.MANAGE)
+  @SuppressWarnings("unchecked")
+  public Result importSharedConfig(@CliOption(key = 
{CliStrings.IMPORT_SHARED_CONFIG__ZIP},
+      mandatory = true, help = CliStrings.IMPORT_SHARED_CONFIG__ZIP__HELP) 
String zip) {
+
+    InternalLocator locator = InternalLocator.getLocator();
+
+    if (!locator.isSharedConfigurationRunning()) {
+      ErrorResultData errorData = ResultBuilder.createErrorResultData();
+      errorData.addLine(CliStrings.SHARED_CONFIGURATION_NOT_STARTED);
+      return ResultBuilder.buildResult(errorData);
+    }
+
+    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+
+    Set<DistributedMember> servers = CliUtil.getAllNormalMembers(cache);
+
+    Set<String> regionsWithData = 
servers.stream().map(this::getNonEmptyRegionsOnServer)
+        .flatMap(Collection::stream).collect(toSet());
+
+    if (!regionsWithData.isEmpty()) {
+      return ResultBuilder.createGemFireErrorResult(
+          "Cannot import cluster configuration with existing data in regions: "
+              + regionsWithData.stream().collect(joining(",")));
+    }
+
+
+    byte[][] shellBytesData = CommandExecutionContext.getBytesFromShell();
+    String zipFileName = CliUtil.bytesToNames(shellBytesData)[0];
+    byte[] zipBytes = CliUtil.bytesToData(shellBytesData)[0];
+
+    Result result;
+    InfoResultData infoData = ResultBuilder.createInfoResultData();
+    File zipFile = new File(zipFileName);
+    try {
+      ClusterConfigurationService sc = locator.getSharedConfiguration();
+
+      // backup the old config
+      for (Configuration config : sc.getEntireConfiguration().values()) {
+        sc.writeConfigToFile(config);
+      }
+      sc.renameExistingSharedConfigDirectory();
+
+      FileUtils.writeByteArrayToFile(zipFile, zipBytes);
+      ZipUtils.unzip(zipFileName, sc.getSharedConfigurationDirPath());
+
+      // load it from the disk
+      sc.loadSharedConfigurationFromDisk();
+      infoData.addLine(CliStrings.IMPORT_SHARED_CONFIG__SUCCESS__MSG);
+
+    } catch (Exception e) {
+      ErrorResultData errorData = ResultBuilder.createErrorResultData();
+      errorData.addLine("Import failed");
+      logSevere(e);
+      result = ResultBuilder.buildResult(errorData);
+      // if import is unsuccessful, don't need to bounce the server.
+      return result;
+    } finally {
+      FileUtils.deleteQuietly(zipFile);
+    }
+
+    // Bounce the cache of each member
+    Set<CliFunctionResult> functionResults =
+        servers.stream().map(this::reCreateCache).collect(toSet());
+
+    for (CliFunctionResult functionResult : functionResults) {
+      if (functionResult.isSuccessful()) {
+        infoData.addLine("Successfully applied the imported cluster 
configuration on "
+            + functionResult.getMemberIdOrName());
+      } else {
+        infoData.addLine("Failed to apply the imported cluster configuration 
on "
+            + functionResult.getMemberIdOrName() + " due to " + 
functionResult.getMessage());
+      }
+    }
+
+    result = ResultBuilder.buildResult(infoData);
+    return result;
+  }
+
+  private Set<String> getNonEmptyRegionsOnServer(DistributedMember server) {
+    ResultCollector rc =
+        CliUtil.executeFunction(new RegionsWithDataOnServerFunction(), null, 
server);
+    List<Set<String>> results = (List<Set<String>>) rc.getResult();
+
+    return results.get(0);
+  }
+
+  private CliFunctionResult reCreateCache(DistributedMember server) {
+    ResultCollector rc = CliUtil.executeFunction(new RecreateCacheFunction(), 
null, server);
+    List<CliFunctionResult> results = (List<CliFunctionResult>) rc.getResult();
+
+    return results.get(0);
+  }
+
+  @CliAvailabilityIndicator({CliStrings.EXPORT_SHARED_CONFIG, 
CliStrings.IMPORT_SHARED_CONFIG})
+  public boolean sharedConfigCommandsAvailable() {
+    boolean isAvailable = true; // always available on server
+    if (CliUtil.isGfshVM()) { // in gfsh check if connected
+      isAvailable = getGfsh() != null && getGfsh().isConnectedAndReady();
+    }
+    return isAvailable;
+  }
+
+  /**
+   * Interceptor used by gfsh to intercept execution of export shared config 
command at "shell".
+   */
+  public static class ExportInterceptor extends AbstractCliAroundInterceptor {
+    private String saveDirString;
+    private static final Logger logger = LogService.getLogger();
+
+    @Override
+    public Result preExecution(GfshParseResult parseResult) {
+      Map<String, String> paramValueMap = parseResult.getParamValueStrings();
+      String zip = paramValueMap.get(CliStrings.EXPORT_SHARED_CONFIG__FILE);
+
+      if (!zip.endsWith(".zip")) {
+        return ResultBuilder
+            
.createUserErrorResult(CliStrings.format(CliStrings.INVALID_FILE_EXTENSION, 
".zip"));
+      }
+      return ResultBuilder.createInfoResult("OK");
+    }
+
+    @Override
+    public Result postExecution(GfshParseResult parseResult, Result 
commandResult) {
+      if (commandResult.hasIncomingFiles()) {
+        try {
+          Map<String, String> paramValueMap = 
parseResult.getParamValueStrings();
+          String dir = paramValueMap.get(CliStrings.EXPORT_SHARED_CONFIG__DIR);
+          dir = (dir == null) ? null : dir.trim();
+
+          File saveDirFile = new File(".");
+
+          if (dir != null && !dir.isEmpty()) {
+            saveDirFile = new File(dir);
+            if (saveDirFile.exists()) {
+              if (!saveDirFile.isDirectory()) {
+                return ResultBuilder.createGemFireErrorResult(
+                    
CliStrings.format(CliStrings.EXPORT_SHARED_CONFIG__MSG__NOT_A_DIRECTORY, dir));
+              }
+            } else if (!saveDirFile.mkdirs()) {
+              return ResultBuilder.createGemFireErrorResult(
+                  
CliStrings.format(CliStrings.EXPORT_SHARED_CONFIG__MSG__CANNOT_CREATE_DIR, 
dir));
+            }
+          }
+          if (!saveDirFile.canWrite()) {
+            return ResultBuilder.createGemFireErrorResult(
+                
CliStrings.format(CliStrings.EXPORT_SHARED_CONFIG__MSG__NOT_WRITEABLE,
+                    saveDirFile.getCanonicalPath()));
+          }
+          saveDirString = saveDirFile.getAbsolutePath();
+          commandResult.saveIncomingFiles(saveDirString);
+          return commandResult;
+        } catch (IOException ioex) {
+          logger.error(ioex);
+          return ResultBuilder.createShellClientErrorResult(
+              CliStrings.EXPORT_SHARED_CONFIG__UNABLE__TO__EXPORT__CONFIG + ": 
"
+                  + ioex.getMessage());
+        }
+      }
+      return null;
+    }
+  }
+
+
+  public static class ImportInterceptor extends AbstractCliAroundInterceptor {
+
+    public Result preExecution(GfshParseResult parseResult) {
+      Map<String, String> paramValueMap = parseResult.getParamValueStrings();
+
+      String zip = paramValueMap.get(CliStrings.IMPORT_SHARED_CONFIG__ZIP);
+
+      zip = StringUtils.trim(zip);
+
+      if (zip == null) {
+        return ResultBuilder.createUserErrorResult(CliStrings.format(
+            CliStrings.IMPORT_SHARED_CONFIG__PROVIDE__ZIP, 
CliStrings.IMPORT_SHARED_CONFIG__ZIP));
+      }
+      if (!zip.endsWith(CliStrings.ZIP_FILE_EXTENSION)) {
+        return ResultBuilder.createUserErrorResult(
+            CliStrings.format(CliStrings.INVALID_FILE_EXTENSION, 
CliStrings.ZIP_FILE_EXTENSION));
+      }
+
+      FileResult fileResult;
+
+      try {
+        fileResult = new FileResult(new String[] {zip});
+      } catch (FileNotFoundException fnfex) {
+        return ResultBuilder.createUserErrorResult("'" + zip + "' not found.");
+      } catch (IOException ioex) {
+        return ResultBuilder
+            .createGemFireErrorResult(ioex.getClass().getName() + ": " + 
ioex.getMessage());
+      }
+
+      return fileResult;
+    }
+
+    @Override
+    public Result postExecution(GfshParseResult parseResult, Result 
commandResult) {
+      return null;
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportSharedConfigurationCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportSharedConfigurationCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportSharedConfigurationCommands.java
deleted file mode 100644
index 914576b..0000000
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/ExportImportSharedConfigurationCommands.java
+++ /dev/null
@@ -1,270 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
- * agreements. See the NOTICE file distributed with this work for additional 
information regarding
- * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
- * "License"); you may not use this file except in compliance with the 
License. You may obtain a
- * copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software 
distributed under the License
- * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
- * or implied. See the License for the specific language governing permissions 
and limitations under
- * the License.
- */
-package org.apache.geode.management.internal.cli.commands;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.geode.distributed.internal.InternalLocator;
-import org.apache.geode.distributed.internal.ClusterConfigurationService;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
-import org.apache.geode.internal.lang.StringUtils;
-import org.apache.geode.internal.logging.LogService;
-import org.apache.geode.management.cli.CliMetaData;
-import org.apache.geode.management.cli.Result;
-import org.apache.geode.management.internal.cli.AbstractCliAroundInterceptor;
-import org.apache.geode.management.internal.cli.CliUtil;
-import org.apache.geode.management.internal.cli.GfshParseResult;
-import org.apache.geode.management.internal.cli.i18n.CliStrings;
-import org.apache.geode.management.internal.cli.remote.CommandExecutionContext;
-import org.apache.geode.management.internal.cli.result.ErrorResultData;
-import org.apache.geode.management.internal.cli.result.FileResult;
-import org.apache.geode.management.internal.cli.result.InfoResultData;
-import org.apache.geode.management.internal.cli.result.ResultBuilder;
-import org.apache.geode.management.internal.configuration.domain.Configuration;
-import org.apache.geode.management.internal.configuration.utils.ZipUtils;
-import org.apache.geode.management.internal.security.ResourceOperation;
-import org.apache.geode.security.ResourcePermission.Operation;
-import org.apache.geode.security.ResourcePermission.Resource;
-import org.apache.logging.log4j.Logger;
-import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
-import org.springframework.shell.core.annotation.CliCommand;
-import org.springframework.shell.core.annotation.CliOption;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.Map;
-
-/****
- * Commands for the shared configuration
- *
- */
-@SuppressWarnings("unused")
-public class ExportImportSharedConfigurationCommands extends 
AbstractCommandsSupport {
-  @CliCommand(value = {CliStrings.EXPORT_SHARED_CONFIG},
-      help = CliStrings.EXPORT_SHARED_CONFIG__HELP)
-  @CliMetaData(
-      interceptor = 
"org.apache.geode.management.internal.cli.commands.ExportImportSharedConfigurationCommands$ExportInterceptor",
-      readsSharedConfiguration = true, relatedTopic = 
{CliStrings.TOPIC_GEODE_CONFIG})
-  @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
-  public Result exportSharedConfig(@CliOption(key = 
{CliStrings.EXPORT_SHARED_CONFIG__FILE},
-      mandatory = true, help = CliStrings.EXPORT_SHARED_CONFIG__FILE__HELP) 
String zipFileName,
-
-      @CliOption(key = {CliStrings.EXPORT_SHARED_CONFIG__DIR},
-          help = CliStrings.EXPORT_SHARED_CONFIG__DIR__HELP) String dir) {
-
-    InternalLocator locator = InternalLocator.getLocator();
-    if (!locator.isSharedConfigurationRunning()) {
-      return 
ResultBuilder.createGemFireErrorResult(CliStrings.SHARED_CONFIGURATION_NOT_STARTED);
-    }
-
-    ClusterConfigurationService sc = locator.getSharedConfiguration();
-    File zipFile = new File(zipFileName);
-    zipFile.getParentFile().mkdirs();
-
-    Result result;
-    try {
-      for (Configuration config : sc.getEntireConfiguration().values()) {
-        sc.writeConfig(config);
-      }
-      ZipUtils.zip(sc.getSharedConfigurationDirPath(), 
zipFile.getCanonicalPath());
-
-      InfoResultData infoData = ResultBuilder.createInfoResultData();
-      byte[] byteData = FileUtils.readFileToByteArray(zipFile);
-      infoData.addAsFile(zipFileName, byteData, 
InfoResultData.FILE_TYPE_BINARY,
-          CliStrings.EXPORT_SHARED_CONFIG__DOWNLOAD__MSG, false);
-      result = ResultBuilder.buildResult(infoData);
-    } catch (Exception e) {
-      ErrorResultData errorData = ResultBuilder.createErrorResultData();
-      errorData.addLine("Export failed");
-      logSevere(e);
-      result = ResultBuilder.buildResult(errorData);
-    } finally {
-      zipFile.delete();
-    }
-
-    return result;
-  }
-
-
-  @CliCommand(value = {CliStrings.IMPORT_SHARED_CONFIG},
-      help = CliStrings.IMPORT_SHARED_CONFIG__HELP)
-  @CliMetaData(
-      interceptor = 
"org.apache.geode.management.internal.cli.commands.ExportImportSharedConfigurationCommands$ImportInterceptor",
-      writesToSharedConfiguration = true, relatedTopic = 
{CliStrings.TOPIC_GEODE_CONFIG})
-  @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.MANAGE)
-  @SuppressWarnings("unchecked")
-  public Result importSharedConfig(@CliOption(key = 
{CliStrings.IMPORT_SHARED_CONFIG__ZIP},
-      mandatory = true, help = CliStrings.IMPORT_SHARED_CONFIG__ZIP__HELP) 
String zip) {
-
-    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
-
-    if (!CliUtil.getAllNormalMembers(cache).isEmpty()) {
-      return ResultBuilder
-          
.createGemFireErrorResult(CliStrings.IMPORT_SHARED_CONFIG__CANNOT__IMPORT__MSG);
-    }
-
-    byte[][] shellBytesData = CommandExecutionContext.getBytesFromShell();
-    String zipFileName = CliUtil.bytesToNames(shellBytesData)[0];
-    byte[] zipBytes = CliUtil.bytesToData(shellBytesData)[0];
-
-    InternalLocator locator = InternalLocator.getLocator();
-
-    if (!locator.isSharedConfigurationRunning()) {
-      ErrorResultData errorData = ResultBuilder.createErrorResultData();
-      errorData.addLine(CliStrings.SHARED_CONFIGURATION_NOT_STARTED);
-      return ResultBuilder.buildResult(errorData);
-    }
-
-    Result result;
-    File zipFile = new File(zipFileName);
-    try {
-      ClusterConfigurationService sc = locator.getSharedConfiguration();
-
-      // backup the old config
-      for (Configuration config : sc.getEntireConfiguration().values()) {
-        sc.writeConfig(config);
-      }
-      sc.renameExistingSharedConfigDirectory();
-
-      sc.clearSharedConfiguration();
-      FileUtils.writeByteArrayToFile(zipFile, zipBytes);
-      ZipUtils.unzip(zipFileName, sc.getSharedConfigurationDirPath());
-
-      // load it from the disk
-      sc.loadSharedConfigurationFromDisk();
-
-      InfoResultData infoData = ResultBuilder.createInfoResultData();
-      infoData.addLine(CliStrings.IMPORT_SHARED_CONFIG__SUCCESS__MSG);
-      result = ResultBuilder.buildResult(infoData);
-    } catch (Exception e) {
-      ErrorResultData errorData = ResultBuilder.createErrorResultData();
-      errorData.addLine("Import failed");
-      logSevere(e);
-      result = ResultBuilder.buildResult(errorData);
-    } finally {
-      FileUtils.deleteQuietly(zipFile);
-    }
-    return result;
-  }
-
-  @CliAvailabilityIndicator({CliStrings.EXPORT_SHARED_CONFIG, 
CliStrings.IMPORT_SHARED_CONFIG})
-  public boolean sharedConfigCommandsAvailable() {
-    boolean isAvailable = true; // always available on server
-    if (CliUtil.isGfshVM()) { // in gfsh check if connected
-      isAvailable = getGfsh() != null && getGfsh().isConnectedAndReady();
-    }
-    return isAvailable;
-  }
-
-  /**
-   * Interceptor used by gfsh to intercept execution of export shared config 
command at "shell".
-   */
-  public static class ExportInterceptor extends AbstractCliAroundInterceptor {
-    private String saveDirString;
-    private static final Logger logger = LogService.getLogger();
-
-    @Override
-    public Result preExecution(GfshParseResult parseResult) {
-      Map<String, String> paramValueMap = parseResult.getParamValueStrings();
-      String zip = paramValueMap.get(CliStrings.EXPORT_SHARED_CONFIG__FILE);
-
-      if (!zip.endsWith(".zip")) {
-        return ResultBuilder
-            
.createUserErrorResult(CliStrings.format(CliStrings.INVALID_FILE_EXTENSION, 
".zip"));
-      }
-      return ResultBuilder.createInfoResult("OK");
-    }
-
-    @Override
-    public Result postExecution(GfshParseResult parseResult, Result 
commandResult) {
-      if (commandResult.hasIncomingFiles()) {
-        try {
-          Map<String, String> paramValueMap = 
parseResult.getParamValueStrings();
-          String dir = paramValueMap.get(CliStrings.EXPORT_SHARED_CONFIG__DIR);
-          dir = (dir == null) ? null : dir.trim();
-
-          File saveDirFile = new File(".");
-
-          if (dir != null && !dir.isEmpty()) {
-            saveDirFile = new File(dir);
-            if (saveDirFile.exists()) {
-              if (!saveDirFile.isDirectory()) {
-                return ResultBuilder.createGemFireErrorResult(
-                    
CliStrings.format(CliStrings.EXPORT_SHARED_CONFIG__MSG__NOT_A_DIRECTORY, dir));
-              }
-            } else if (!saveDirFile.mkdirs()) {
-              return ResultBuilder.createGemFireErrorResult(
-                  
CliStrings.format(CliStrings.EXPORT_SHARED_CONFIG__MSG__CANNOT_CREATE_DIR, 
dir));
-            }
-          }
-          if (!saveDirFile.canWrite()) {
-            return ResultBuilder.createGemFireErrorResult(
-                
CliStrings.format(CliStrings.EXPORT_SHARED_CONFIG__MSG__NOT_WRITEABLE,
-                    saveDirFile.getCanonicalPath()));
-          }
-          saveDirString = saveDirFile.getAbsolutePath();
-          commandResult.saveIncomingFiles(saveDirString);
-          return commandResult;
-        } catch (IOException ioex) {
-          logger.error(ioex);
-          return ResultBuilder.createShellClientErrorResult(
-              CliStrings.EXPORT_SHARED_CONFIG__UNABLE__TO__EXPORT__CONFIG + ": 
"
-                  + ioex.getMessage());
-        }
-      }
-      return null;
-    }
-  }
-
-
-  public static class ImportInterceptor extends AbstractCliAroundInterceptor {
-
-    public Result preExecution(GfshParseResult parseResult) {
-      Map<String, String> paramValueMap = parseResult.getParamValueStrings();
-
-      String zip = paramValueMap.get(CliStrings.IMPORT_SHARED_CONFIG__ZIP);
-
-      zip = StringUtils.trim(zip);
-
-      if (zip == null) {
-        return ResultBuilder.createUserErrorResult(CliStrings.format(
-            CliStrings.IMPORT_SHARED_CONFIG__PROVIDE__ZIP, 
CliStrings.IMPORT_SHARED_CONFIG__ZIP));
-      }
-      if (!zip.endsWith(CliStrings.ZIP_FILE_EXTENSION)) {
-        return ResultBuilder.createUserErrorResult(
-            CliStrings.format(CliStrings.INVALID_FILE_EXTENSION, 
CliStrings.ZIP_FILE_EXTENSION));
-      }
-
-      FileResult fileResult;
-
-      try {
-        fileResult = new FileResult(new String[] {zip});
-      } catch (FileNotFoundException fnfex) {
-        return ResultBuilder.createUserErrorResult("'" + zip + "' not found.");
-      } catch (IOException ioex) {
-        return ResultBuilder
-            .createGemFireErrorResult(ioex.getClass().getName() + ": " + 
ioex.getMessage());
-      }
-
-      return fileResult;
-    }
-
-    @Override
-    public Result postExecution(GfshParseResult parseResult, Result 
commandResult) {
-      return null;
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/IndexCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/IndexCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/IndexCommands.java
index bc436ba..6e03c11 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/IndexCommands.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/IndexCommands.java
@@ -172,8 +172,7 @@ public class IndexCommands extends AbstractCommandsSupport {
 
   @CliCommand(value = CliStrings.CREATE_INDEX, help = 
CliStrings.CREATE_INDEX__HELP)
   @CliMetaData(shellOnly = false,
-      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA},
-      writesToSharedConfiguration = true)
+      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA})
   // TODO : Add optionContext for indexName
   public Result createIndex(@CliOption(key = CliStrings.CREATE_INDEX__NAME, 
mandatory = true,
       help = CliStrings.CREATE_INDEX__NAME__HELP) final String indexName,
@@ -325,8 +324,7 @@ public class IndexCommands extends AbstractCommandsSupport {
 
   @CliCommand(value = CliStrings.DESTROY_INDEX, help = 
CliStrings.DESTROY_INDEX__HELP)
   @CliMetaData(shellOnly = false,
-      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA},
-      writesToSharedConfiguration = true)
+      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA})
   public Result destroyIndex(@CliOption(key = CliStrings.DESTROY_INDEX__NAME, 
mandatory = false,
       unspecifiedDefaultValue = "",
       help = CliStrings.DESTROY_INDEX__NAME__HELP) final String indexName,
@@ -465,8 +463,7 @@ public class IndexCommands extends AbstractCommandsSupport {
 
   @CliCommand(value = CliStrings.DEFINE_INDEX, help = 
CliStrings.DEFINE_INDEX__HELP)
   @CliMetaData(shellOnly = false,
-      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA},
-      writesToSharedConfiguration = true)
+      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA})
   // TODO : Add optionContext for indexName
   public Result defineIndex(@CliOption(key = CliStrings.DEFINE_INDEX_NAME, 
mandatory = true,
       help = CliStrings.DEFINE_INDEX__HELP) final String indexName,
@@ -533,8 +530,7 @@ public class IndexCommands extends AbstractCommandsSupport {
 
   @CliCommand(value = CliStrings.CREATE_DEFINED_INDEXES, help = 
CliStrings.CREATE_DEFINED__HELP)
   @CliMetaData(shellOnly = false,
-      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA},
-      writesToSharedConfiguration = true)
+      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA})
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   // TODO : Add optionContext for indexName
   public Result createDefinedIndexes(
@@ -639,8 +635,7 @@ public class IndexCommands extends AbstractCommandsSupport {
 
   @CliCommand(value = CliStrings.CLEAR_DEFINED_INDEXES, help = 
CliStrings.CLEAR_DEFINED__HELP)
   @CliMetaData(shellOnly = false,
-      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA},
-      writesToSharedConfiguration = true)
+      relatedTopic = {CliStrings.TOPIC_GEODE_REGION, 
CliStrings.TOPIC_GEODE_DATA})
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   // TODO : Add optionContext for indexName
   public Result clearDefinedIndexes() {

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/PDXCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/PDXCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/PDXCommands.java
index 72d5c0f..4327dec 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/PDXCommands.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/PDXCommands.java
@@ -48,7 +48,7 @@ public class PDXCommands extends AbstractCommandsSupport {
 
 
   @CliCommand(value = CliStrings.CONFIGURE_PDX, help = 
CliStrings.CONFIGURE_PDX__HELP)
-  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_REGION, 
writesToSharedConfiguration = true)
+  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_REGION)
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public Result configurePDX(@CliOption(key = 
CliStrings.CONFIGURE_PDX__READ__SERIALIZED,
       unspecifiedDefaultValue = CliMetaData.ANNOTATION_NULL_VALUE,

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/QueueCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/QueueCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/QueueCommands.java
index 095bd68..8548478 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/QueueCommands.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/QueueCommands.java
@@ -14,12 +14,6 @@
  */
 package org.apache.geode.management.internal.cli.commands;
 
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicReference;
-
 import org.apache.geode.SystemFailure;
 import org.apache.geode.cache.execute.ResultCollector;
 import org.apache.geode.distributed.DistributedMember;
@@ -41,12 +35,16 @@ import 
org.apache.geode.management.internal.configuration.domain.XmlEntity;
 import org.apache.geode.management.internal.security.ResourceOperation;
 import org.apache.geode.security.ResourcePermission.Operation;
 import org.apache.geode.security.ResourcePermission.Resource;
-
-import org.springframework.shell.core.CommandMarker;
 import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.core.annotation.CliOption;
 
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+
 /**
  * The QueueCommands class encapsulates all GemFire Queue commands in Gfsh.
  * </p>
@@ -57,7 +55,6 @@ public class QueueCommands extends AbstractCommandsSupport {
 
   @CliCommand(value = CliStrings.CREATE_ASYNC_EVENT_QUEUE,
       help = CliStrings.CREATE_ASYNC_EVENT_QUEUE__HELP)
-  @CliMetaData(writesToSharedConfiguration = true)
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public Result createAsyncEventQueue(
       @CliOption(key = CliStrings.CREATE_ASYNC_EVENT_QUEUE__ID, mandatory = 
true,

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/WanCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/WanCommands.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/WanCommands.java
index cbf5891..38b47e9 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/WanCommands.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/WanCommands.java
@@ -14,22 +14,6 @@
  */
 package org.apache.geode.management.internal.cli.commands;
 
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.ThreadFactory;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicReference;
-import javax.management.ObjectName;
-
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheFactory;
 import org.apache.geode.cache.execute.ResultCollector;
@@ -45,7 +29,13 @@ import org.apache.geode.management.internal.MBeanJMXAdapter;
 import org.apache.geode.management.internal.SystemManagementService;
 import org.apache.geode.management.internal.cli.CliUtil;
 import org.apache.geode.management.internal.cli.LogWrapper;
-import org.apache.geode.management.internal.cli.functions.*;
+import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
+import 
org.apache.geode.management.internal.cli.functions.GatewayReceiverCreateFunction;
+import 
org.apache.geode.management.internal.cli.functions.GatewayReceiverFunctionArgs;
+import 
org.apache.geode.management.internal.cli.functions.GatewaySenderCreateFunction;
+import 
org.apache.geode.management.internal.cli.functions.GatewaySenderDestroyFunction;
+import 
org.apache.geode.management.internal.cli.functions.GatewaySenderDestroyFunctionArgs;
+import 
org.apache.geode.management.internal.cli.functions.GatewaySenderFunctionArgs;
 import org.apache.geode.management.internal.cli.i18n.CliStrings;
 import org.apache.geode.management.internal.cli.result.CommandResultException;
 import org.apache.geode.management.internal.cli.result.CompositeResultData;
@@ -55,15 +45,29 @@ import 
org.apache.geode.management.internal.configuration.domain.XmlEntity;
 import org.apache.geode.management.internal.security.ResourceOperation;
 import org.apache.geode.security.ResourcePermission.Operation;
 import org.apache.geode.security.ResourcePermission.Resource;
-
-import org.springframework.shell.core.CommandMarker;
 import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.core.annotation.CliOption;
 
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.ThreadFactory;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicReference;
+import javax.management.ObjectName;
+
 public class WanCommands extends AbstractCommandsSupport {
   @CliCommand(value = CliStrings.CREATE_GATEWAYSENDER, help = 
CliStrings.CREATE_GATEWAYSENDER__HELP)
-  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_WAN, 
writesToSharedConfiguration = true)
+  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_WAN)
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public Result createGatewaySender(@CliOption(key = 
CliStrings.CREATE_GATEWAYSENDER__GROUP,
       optionContext = ConverterHint.MEMBERGROUP,
@@ -1064,7 +1068,7 @@ public class WanCommands extends AbstractCommandsSupport {
 
   @CliCommand(value = CliStrings.DESTROY_GATEWAYSENDER,
       help = CliStrings.DESTROY_GATEWAYSENDER__HELP)
-  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_WAN, 
writesToSharedConfiguration = true)
+  @CliMetaData(relatedTopic = CliStrings.TOPIC_GEODE_WAN)
   @ResourceOperation(resource = Resource.DATA, operation = Operation.MANAGE)
   public Result destroyGatewaySender(
       @CliOption(key = CliStrings.DESTROY_GATEWAYSENDER__GROUP,

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/cli/remote/RemoteExecutionStrategy.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/remote/RemoteExecutionStrategy.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/remote/RemoteExecutionStrategy.java
index 1e4870f..4b827a9 100644
--- 
a/geode-core/src/main/java/org/apache/geode/management/internal/cli/remote/RemoteExecutionStrategy.java
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/cli/remote/RemoteExecutionStrategy.java
@@ -14,17 +14,7 @@
  */
 package org.apache.geode.management.internal.cli.remote;
 
-import java.lang.reflect.Method;
-
-import org.springframework.shell.event.ParseResult;
-import org.springframework.util.Assert;
-import org.springframework.util.ReflectionUtils;
-
-import org.apache.geode.distributed.DistributedLockService;
-import org.apache.geode.distributed.internal.InternalDistributedSystem;
-import org.apache.geode.distributed.internal.ClusterConfigurationService;
 import org.apache.geode.internal.ClassPathLoader;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.management.ManagementService;
 import org.apache.geode.management.cli.CliMetaData;
 import org.apache.geode.management.cli.Result;
@@ -34,6 +24,11 @@ import 
org.apache.geode.management.internal.cli.GfshParseResult;
 import org.apache.geode.management.internal.cli.LogWrapper;
 import org.apache.geode.management.internal.cli.result.FileResult;
 import org.apache.geode.management.internal.cli.result.ResultBuilder;
+import org.springframework.shell.event.ParseResult;
+import org.springframework.util.Assert;
+import org.springframework.util.ReflectionUtils;
+
+import java.lang.reflect.Method;
 
 /**
  * 
@@ -93,31 +88,8 @@ public class RemoteExecutionStrategy {
         }
         logWrapper.info("Executing " + gfshParseResult.getUserInput());
 
-        GemFireCacheImpl gfc = GemFireCacheImpl.getInstance();
-
-        // Do the locking and annotation check only if the shared 
configuration service is enabled
-        // Else go the usual route of command execution
-        // TODO: why do we need to care about this here?
-        if 
(gfc.getDistributionManager().isSharedConfigurationServiceEnabledForDS()
-            && (writesToSharedConfiguration(method) || 
readsFromSharedConfiguration(method))) {
-          DistributedLockService dls = ClusterConfigurationService
-              
.getSharedConfigLockService(InternalDistributedSystem.getAnyInstance());
-          if (dls.lock(ClusterConfigurationService.SHARED_CONFIG_LOCK_NAME, 
10000, -1)) {
-            try {
-              result = (Result) 
ReflectionUtils.invokeMethod(gfshParseResult.getMethod(),
-                  gfshParseResult.getInstance(), 
gfshParseResult.getArguments());
-            } finally {
-              dls.unlock(ClusterConfigurationService.SHARED_CONFIG_LOCK_NAME);
-            }
-          } else {
-            return ResultBuilder.createGemFireErrorResult(
-                "Unable to execute the command due to ongoing configuration 
change/member startup.");
-          }
-        } else {
-          result = (Result) 
ReflectionUtils.invokeMethod(gfshParseResult.getMethod(),
-              gfshParseResult.getInstance(), gfshParseResult.getArguments());
-        }
-
+        result = (Result) 
ReflectionUtils.invokeMethod(gfshParseResult.getMethod(),
+            gfshParseResult.getInstance(), gfshParseResult.getArguments());
 
         if (result != null && Status.ERROR.equals(result.getStatus())) {
           logWrapper
@@ -148,16 +120,6 @@ public class RemoteExecutionStrategy {
     return result;
   }
 
-  private boolean writesToSharedConfiguration(Method method) {
-    CliMetaData cliMetadata = method.getAnnotation(CliMetaData.class);
-    return cliMetadata != null && cliMetadata.writesToSharedConfiguration();
-  }
-
-  private boolean readsFromSharedConfiguration(Method method) {
-    CliMetaData cliMetadata = method.getAnnotation(CliMetaData.class);
-    return cliMetadata != null && cliMetadata.readsSharedConfiguration();
-  }
-
   private boolean isShellOnly(Method method) {
     CliMetaData cliMetadata = method.getAnnotation(CliMetaData.class);
     return cliMetadata != null && cliMetadata.shellOnly();

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/configuration/functions/RecreateCacheFunction.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/configuration/functions/RecreateCacheFunction.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/functions/RecreateCacheFunction.java
new file mode 100644
index 0000000..464a651
--- /dev/null
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/functions/RecreateCacheFunction.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software 
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
+ * or implied. See the License for the specific language governing permissions 
and limitations under
+ * the License.
+ */
+
+package org.apache.geode.management.internal.configuration.functions;
+
+import org.apache.geode.cache.execute.Function;
+import org.apache.geode.cache.execute.FunctionContext;
+import org.apache.geode.distributed.DistributedSystem;
+import org.apache.geode.internal.InternalEntity;
+import org.apache.geode.internal.cache.CacheConfig;
+import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.management.internal.cli.functions.CliFunctionResult;
+
+public class RecreateCacheFunction implements Function, InternalEntity {
+  @Override
+  public void execute(FunctionContext context) {
+    CliFunctionResult result = null;
+    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+    DistributedSystem ds = cache.getDistributedSystem();
+    CacheConfig cacheConfig = cache.getCacheConfig();
+    try {
+      cache.close("Re-create Cache", true, true);
+      GemFireCacheImpl.create(ds, cacheConfig);
+    } catch (RuntimeException e) {
+      result = new CliFunctionResult(ds.getName(), e, e.getMessage());
+      context.getResultSender().lastResult(result);
+      return;
+    }
+    result = new CliFunctionResult(ds.getName(), true, "Cache successfully 
re-created.");
+    context.getResultSender().lastResult(result);
+  }
+
+  @Override
+  public String getId() {
+    return RecreateCacheFunction.class.getName();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/main/java/org/apache/geode/management/internal/configuration/functions/RegionsWithDataOnServerFunction.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/main/java/org/apache/geode/management/internal/configuration/functions/RegionsWithDataOnServerFunction.java
 
b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/functions/RegionsWithDataOnServerFunction.java
new file mode 100644
index 0000000..c7ec2bb
--- /dev/null
+++ 
b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/functions/RegionsWithDataOnServerFunction.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more 
contributor license
+ * agreements. See the NOTICE file distributed with this work for additional 
information regarding
+ * copyright ownership. The ASF licenses this file to You under the Apache 
License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the 
License. You may obtain a
+ * copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software 
distributed under the License
+ * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 
KIND, either express
+ * or implied. See the License for the specific language governing permissions 
and limitations under
+ * the License.
+ */
+package org.apache.geode.management.internal.configuration.functions;
+
+import static java.util.stream.Collectors.toSet;
+
+import org.apache.geode.cache.execute.Function;
+import org.apache.geode.cache.execute.FunctionContext;
+import org.apache.geode.internal.InternalEntity;
+import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.LocalRegion;
+
+import java.util.Set;
+
+public class RegionsWithDataOnServerFunction implements Function, 
InternalEntity {
+  @Override
+  public void execute(FunctionContext context) {
+    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+
+    Set<String> nonEmptyRegions = cache.getApplicationRegions().stream()
+        .filter(region -> 
!region.isEmpty()).map(LocalRegion::getName).collect(toSet());
+
+    context.getResultSender().lastResult(nonEmptyRegions);
+  }
+
+  @Override
+  public String getId() {
+    return RegionsWithDataOnServerFunction.class.getName();
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/geode/blob/6f418be1/geode-core/src/test/java/org/apache/geode/internal/cache/extension/mock/MockExtensionCommands.java
----------------------------------------------------------------------
diff --git 
a/geode-core/src/test/java/org/apache/geode/internal/cache/extension/mock/MockExtensionCommands.java
 
b/geode-core/src/test/java/org/apache/geode/internal/cache/extension/mock/MockExtensionCommands.java
index 1bd4478..793abf6 100644
--- 
a/geode-core/src/test/java/org/apache/geode/internal/cache/extension/mock/MockExtensionCommands.java
+++ 
b/geode-core/src/test/java/org/apache/geode/internal/cache/extension/mock/MockExtensionCommands.java
@@ -15,17 +15,12 @@
 
 package org.apache.geode.internal.cache.extension.mock;
 
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicReference;
-
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheFactory;
 import org.apache.geode.cache.Region;
 import org.apache.geode.cache.execute.Function;
 import org.apache.geode.cache.execute.ResultCollector;
 import org.apache.geode.distributed.DistributedMember;
-import org.apache.geode.management.cli.CliMetaData;
 import org.apache.geode.management.cli.Result;
 import org.apache.geode.management.cli.Result.Status;
 import org.apache.geode.management.internal.cli.CliUtil;
@@ -37,11 +32,13 @@ import 
org.apache.geode.management.internal.configuration.domain.XmlEntity;
 import org.apache.geode.management.internal.security.ResourceOperation;
 import org.apache.geode.security.ResourcePermission.Operation;
 import org.apache.geode.security.ResourcePermission.Resource;
-
-import org.springframework.shell.core.CommandMarker;
 import org.springframework.shell.core.annotation.CliCommand;
 import org.springframework.shell.core.annotation.CliOption;
 
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicReference;
+
 /**
  * Mock Extension gfsh commands.
  * 
@@ -75,7 +72,6 @@ public class MockExtensionCommands extends 
AbstractCommandsSupport {
    * @since GemFire 8.1
    */
   @CliCommand(value = CREATE_MOCK_REGION_EXTENSION)
-  @CliMetaData(writesToSharedConfiguration = true)
   @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
   public Result createMockRegionExtension(
       @CliOption(key = OPTION_REGION_NAME, mandatory = true) final String 
regionName,
@@ -94,7 +90,6 @@ public class MockExtensionCommands extends 
AbstractCommandsSupport {
    * @since GemFire 8.1
    */
   @CliCommand(value = ALTER_MOCK_REGION_EXTENSION)
-  @CliMetaData(writesToSharedConfiguration = true)
   @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
   public Result alterMockRegionExtension(
       @CliOption(key = OPTION_REGION_NAME, mandatory = true) final String 
regionName,
@@ -112,7 +107,6 @@ public class MockExtensionCommands extends 
AbstractCommandsSupport {
    * @since GemFire 8.1
    */
   @CliCommand(value = DESTROY_MOCK_REGION_EXTENSION)
-  @CliMetaData(writesToSharedConfiguration = true)
   @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
   public Result destroyMockRegionExtension(
       @CliOption(key = OPTION_REGION_NAME, mandatory = true) final String 
regionName) {
@@ -129,7 +123,6 @@ public class MockExtensionCommands extends 
AbstractCommandsSupport {
    * @since GemFire 8.1
    */
   @CliCommand(value = CREATE_MOCK_CACHE_EXTENSION)
-  @CliMetaData(writesToSharedConfiguration = true)
   @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
   public Result createMockCacheExtension(
       @CliOption(key = OPTION_VALUE, mandatory = true) final String value) {
@@ -147,7 +140,6 @@ public class MockExtensionCommands extends 
AbstractCommandsSupport {
    */
   @CliCommand(value = ALTER_MOCK_CACHE_EXTENSION)
   @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
-  @CliMetaData(writesToSharedConfiguration = true)
   public Result alterMockCacheExtension(
       @CliOption(key = OPTION_VALUE, mandatory = true) final String value) {
     return executeFunctionOnAllMembersTabulateResultPersist(
@@ -163,7 +155,6 @@ public class MockExtensionCommands extends 
AbstractCommandsSupport {
    */
   @CliCommand(value = DESTROY_MOCK_CACHE_EXTENSION)
   @ResourceOperation(resource = Resource.CLUSTER, operation = Operation.READ)
-  @CliMetaData(writesToSharedConfiguration = true)
   public Result destroyMockCacheExtension() {
     return executeFunctionOnAllMembersTabulateResultPersist(
         DestroyMockCacheExtensionFunction.INSTANCE, false);

Reply via email to