Repository: ambari
Updated Branches:
  refs/heads/trunk 8908d3e05 -> 57682942b


AMBARI-21406. Refresh configurations without restart command (magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/57682942
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/57682942
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/57682942

Branch: refs/heads/trunk
Commit: 57682942b7368a8de3f0a76f65e45b13c1626deb
Parents: 8908d3e
Author: Sandor Magyari <smagy...@hortonworks.com>
Authored: Fri Aug 25 14:08:55 2017 +0200
Committer: Sandor Magyari <smagy...@hortonworks.com>
Committed: Tue Oct 10 16:46:30 2017 +0200

----------------------------------------------------------------------
 .../libraries/script/script.py                  |  23 ++-
 .../AmbariCustomCommandExecutionHelper.java     |  10 ++
 .../AmbariManagementControllerImpl.java         |   4 +-
 .../ServiceComponentHostResponse.java           |  15 ++
 .../internal/HostComponentResourceProvider.java |   4 +
 .../ambari/server/metadata/ActionMetadata.java  |   1 +
 .../apache/ambari/server/stack/StackModule.java |  29 +++-
 .../ambari/server/state/ConfigHelper.java       | 164 ++++++++++++++++++-
 .../ambari/server/state/PropertyInfo.java       |  29 ++++
 .../ambari/server/state/RefreshCommand.java     |  52 ++++++
 .../state/RefreshCommandConfiguration.java      |  71 ++++++++
 .../apache/ambari/server/state/StackInfo.java   |  10 ++
 .../svccomphost/ServiceComponentHostImpl.java   |  10 ++
 .../HDFS/2.1.0.2.0/configuration/core-site.xml  |  12 ++
 .../HDFS/2.1.0.2.0/configuration/hdfs-site.xml  |   3 +
 .../HDFS/2.1.0.2.0/package/scripts/datanode.py  |  13 +-
 .../HDFS/2.1.0.2.0/package/scripts/hdfs.py      |  52 +++++-
 .../2.1.0.2.0/package/scripts/hdfs_client.py    |   5 +
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |  21 +++
 .../HDFS/2.1.0.2.0/package/scripts/namenode.py  |  21 ++-
 .../HDFS/2.1.0.2.0/package/scripts/snamenode.py |  10 ++
 .../HDFS/3.0.0.3.0/configuration/hdfs-site.xml  |   6 +
 .../HDFS/3.0.0.3.0/package/scripts/datanode.py  |  13 +-
 .../HDFS/3.0.0.3.0/package/scripts/hdfs.py      |  52 +++++-
 .../3.0.0.3.0/package/scripts/hdfs_client.py    |   5 +
 .../3.0.0.3.0/package/scripts/hdfs_namenode.py  |  20 +++
 .../HDFS/3.0.0.3.0/package/scripts/namenode.py  |  21 ++-
 .../HDFS/3.0.0.3.0/package/scripts/snamenode.py |  10 ++
 .../src/main/resources/configuration-schema.xsd |  12 ++
 .../src/main/resources/properties.json          |   1 +
 .../services/HDFS/configuration/hdfs-site.xml   |   3 +
 .../ambari/server/state/ConfigHelperTest.java   |  76 ++++++++-
 .../ambari/server/state/PropertyInfoTest.java   |  20 +++
 .../python/stacks/2.0.6/HDFS/test_datanode.py   |  17 ++
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |  33 ++++
 .../services/HDFS/configuration/hdfs-site.xml   |   8 +
 36 files changed, 835 insertions(+), 21 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index bf8c0dc..12e6f98 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -1006,12 +1006,33 @@ class Script(object):
 
   def configure(self, env, upgrade_type=None, config_dir=None):
     """
-    To be overridden by subclasses
+    To be overridden by subclasses (may invoke save_configs)
     :param upgrade_type: only valid during RU/EU, otherwise will be None
     :param config_dir: for some clients during RU, the location to save 
configs to, otherwise None
     """
     self.fail_with_error('configure method isn\'t implemented')
 
+  def save_configs(self, env):
+    """
+    To be overridden by subclasses
+    Creates / updates configuration files
+    """
+    self.fail_with_error('save_configs method isn\'t implemented')
+
+  def reconfigure(self, env):
+    """
+    Default implementation of RECONFIGURE action which may be overridden by 
subclasses
+    """
+    Logger.info("Refresh config files ...")
+    self.save_configs(env)
+
+    config = self.get_config()
+    if "reconfigureAction" in config["commandParams"] and 
config["commandParams"]["reconfigureAction"] is not None:
+      reconfigure_action = config["commandParams"]["reconfigureAction"]
+      Logger.info("Call %s" % reconfigure_action)
+      method = self.choose_method_to_execute(reconfigure_action)
+      method(env)
+
   def generate_configs_get_template_file_content(self, filename, dicts):
     config = self.get_config()
     content = ''

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
index d0dd7e0..e12477e 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariCustomCommandExecutionHelper.java
@@ -89,6 +89,7 @@ import org.apache.ambari.server.state.HostState;
 import org.apache.ambari.server.state.MaintenanceState;
 import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.PropertyInfo.PropertyType;
+import org.apache.ambari.server.state.RefreshCommandConfiguration;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.ServiceComponent;
@@ -507,6 +508,15 @@ public class AmbariCustomCommandExecutionHelper {
       StageUtils.useAmbariJdkInCommandParams(commandParams, configs);
       roleParams.put(COMPONENT_CATEGORY, componentInfo.getCategory());
 
+      // set reconfigureAction in case of a RECONFIGURE command if there are 
any
+      if (commandName.equals("RECONFIGURE")) {
+        String refreshConfigsCommand = 
configHelper.getRefreshConfigsCommand(cluster, hostName, serviceName, 
componentName);
+        if (refreshConfigsCommand != null && 
!refreshConfigsCommand.equals(RefreshCommandConfiguration.REFRESH_CONFIGS)) {
+              LOG.info("Refreshing configs for {}/{} with command: ", 
componentName, hostName, refreshConfigsCommand);
+          commandParams.put("reconfigureAction", refreshConfigsCommand);
+        }
+      }
+
       execCmd.setCommandParams(commandParams);
       execCmd.setRoleParams(roleParams);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 5642575..8c4888c 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -4795,7 +4795,9 @@ public class AmbariManagementControllerImpl implements 
AmbariManagementControlle
       properties = ambariMetaInfo.getServiceProperties(stackName, 
stackVersion, serviceName);
     }
     for (PropertyInfo property: properties) {
-      response.add(property.convertToResponse());
+      if (property.shouldBeConfigured()) {
+        response.add(property.convertToResponse());
+      }
     }
 
     return response;

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentHostResponse.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentHostResponse.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentHostResponse.java
index 7b75e06..bc67117 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentHostResponse.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/ServiceComponentHostResponse.java
@@ -40,6 +40,7 @@ public class ServiceComponentHostResponse {
   private String desiredRepositoryVersion;
   private String desiredState;
   private boolean staleConfig = false;
+  private boolean reloadConfig = false;
   private String adminState = null;
   private String maintenanceState = null;
   private UpgradeState upgradeState = UpgradeState.NONE;
@@ -283,6 +284,20 @@ public class ServiceComponentHostResponse {
   }
 
   /**
+   * @return true if configs are reloadable without RESTART command
+   */
+  public boolean isReloadConfig() {
+    return reloadConfig;
+  }
+
+  /**
+   * @param reloadConfig
+   */
+  public void setReloadConfig(boolean reloadConfig) {
+    this.reloadConfig = reloadConfig;
+  }
+
+  /**
    * @return the maintenance state
    */
   public String getMaintenanceState() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
index 48e15eb..6708560 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/HostComponentResourceProvider.java
@@ -106,6 +106,8 @@ public class HostComponentResourceProvider extends 
AbstractControllerResourcePro
       = PropertyHelper.getPropertyId("HostRoles", "actual_configs");
   public static final String HOST_COMPONENT_STALE_CONFIGS_PROPERTY_ID
       = PropertyHelper.getPropertyId("HostRoles", "stale_configs");
+  public static final String HOST_COMPONENT_RELOAD_CONFIGS_PROPERTY_ID
+      = PropertyHelper.getPropertyId("HostRoles", "reload_configs");
   public static final String HOST_COMPONENT_DESIRED_ADMIN_STATE_PROPERTY_ID
       = PropertyHelper.getPropertyId("HostRoles", "desired_admin_state");
   public static final String HOST_COMPONENT_MAINTENANCE_STATE_PROPERTY_ID
@@ -244,6 +246,8 @@ public class HostComponentResourceProvider extends 
AbstractControllerResourcePro
               response.getActualConfigs(), requestedIds);
       setResourceProperty(resource, HOST_COMPONENT_STALE_CONFIGS_PROPERTY_ID,
               response.isStaleConfig(), requestedIds);
+      setResourceProperty(resource, HOST_COMPONENT_RELOAD_CONFIGS_PROPERTY_ID,
+              response.isReloadConfig(), requestedIds);
       setResourceProperty(resource, HOST_COMPONENT_UPGRADE_STATE_PROPERTY_ID,
               response.getUpgradeState(), requestedIds);
       setResourceProperty(resource, HOST_COMPONENT_DESIRED_REPOSITORY_VERSION,

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
index e0bfdcf..33dc0e8 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/metadata/ActionMetadata.java
@@ -65,6 +65,7 @@ public class ActionMetadata {
     defaultHostComponentCommands.add("CONFIGURE");
     defaultHostComponentCommands.add("CONFIGURE_FUNCTION");
     defaultHostComponentCommands.add("DISABLE_SECURITY");
+    defaultHostComponentCommands.add("RECONFIGURE");
   }
 
   private void fillServiceClients() {

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java 
b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
index e88bbf2..520764d 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/stack/StackModule.java
@@ -38,6 +38,7 @@ import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.ExtensionInfo;
 import org.apache.ambari.server.state.PropertyDependencyInfo;
 import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.RefreshCommand;
 import org.apache.ambari.server.state.RepositoryInfo;
 import org.apache.ambari.server.state.ServiceInfo;
 import org.apache.ambari.server.state.StackInfo;
@@ -579,6 +580,7 @@ public class StackModule extends BaseModule<StackModule, 
StackInfo> implements V
       }
       // Read the service and available configs for this stack
       populateServices();
+
       if (!stackInfo.isValid()) {
         setValid(false);
         addErrors(stackInfo.getErrors());
@@ -627,7 +629,7 @@ public class StackModule extends BaseModule<StackModule, 
StackInfo> implements V
     for (ServiceInfo serviceInfo : serviceInfos) {
       ServiceModule serviceModule = new ServiceModule(stackContext, 
serviceInfo, serviceDirectory);
       serviceModules.add(serviceModule);
-      if (!serviceModule.isValid()){
+      if (!serviceModule.isValid()) {
         stackInfo.setValid(false);
         setValid(false);
         stackInfo.addErrors(serviceModule.getErrors());
@@ -769,7 +771,11 @@ public class StackModule extends BaseModule<StackModule, 
StackInfo> implements V
     // relationship into map. Since we do not have the reverse {@link 
PropertyInfo},
     // we have to loop through service-configs again later.
     for (ServiceModule serviceModule : serviceModules.values()) {
+
+      Map<String, Map<String, String>> componentRefreshCommandsMap = new 
HashMap();
+
       for (PropertyInfo pi : serviceModule.getModuleInfo().getProperties()) {
+
         for (PropertyDependencyInfo pdi : pi.getDependsOnProperties()) {
           String type = ConfigHelper.fileNameToConfigType(pi.getFilename());
           String name = pi.getName();
@@ -784,7 +790,28 @@ public class StackModule extends BaseModule<StackModule, 
StackInfo> implements V
             dependedByMap.put(pdi, newDependenciesSet);
           }
         }
+
+        // set refresh commands
+        if (pi.getSupportedRefreshCommands() != null && 
pi.getSupportedRefreshCommands().size() > 0) {
+          String type = ConfigHelper.fileNameToConfigType(pi.getFilename());
+          String propertyName = type + "/" + pi.getName();
+
+          Map<String, String> refreshCommandPropertyMap = 
componentRefreshCommandsMap.get(propertyName);
+
+          for (RefreshCommand refreshCommand : 
pi.getSupportedRefreshCommands()) {
+            String componentName = refreshCommand.getComponentName();
+            if (refreshCommandPropertyMap == null) {
+              refreshCommandPropertyMap = new HashMap<>();
+              componentRefreshCommandsMap.put(propertyName, 
refreshCommandPropertyMap);
+            }
+            refreshCommandPropertyMap.put(componentName, 
refreshCommand.getCommand());
+          }
+
+        }
+
       }
+
+      
stackInfo.getRefreshCommandConfiguration().addRefreshCommands(componentRefreshCommandsMap);
     }
 
     // Go through all service-configs again and set their 'depended-by' if 
necessary.

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index bb7fcbe..eade914 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -17,12 +17,15 @@
  */
 package org.apache.ambari.server.state;
 
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
@@ -41,6 +44,7 @@ import 
org.apache.ambari.server.state.PropertyInfo.PropertyType;
 import org.apache.ambari.server.state.configgroup.ConfigGroup;
 import org.apache.ambari.server.utils.SecretReference;
 import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.math.NumberUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -72,6 +76,8 @@ public class ConfigHelper {
    */
   private final Cache<Integer, Boolean> staleConfigsCache;
 
+  private final Cache<Integer, String> refreshConfigCommandCache;
+
   private static final Logger LOG =
       LoggerFactory.getLogger(ConfigHelper.class);
 
@@ -113,6 +119,9 @@ public class ConfigHelper {
     STALE_CONFIGS_CACHE_EXPIRATION_TIME = 
configuration.staleConfigCacheExpiration();
     staleConfigsCache = CacheBuilder.newBuilder().
         expireAfterWrite(STALE_CONFIGS_CACHE_EXPIRATION_TIME, 
TimeUnit.SECONDS).build();
+
+    refreshConfigCommandCache = CacheBuilder.newBuilder().
+            expireAfterWrite(STALE_CONFIGS_CACHE_EXPIRATION_TIME, 
TimeUnit.SECONDS).build();
   }
 
   /**
@@ -1302,6 +1311,8 @@ public class ConfigHelper {
 
     StackId stackId = sch.getServiceComponent().getDesiredStackId();
 
+    StackInfo stackInfo = ambariMetaInfo.getStack(stackId);
+
     ServiceInfo serviceInfo = ambariMetaInfo.getService(stackId.getStackName(),
             stackId.getStackVersion(), sch.getServiceName());
 
@@ -1316,8 +1327,10 @@ public class ConfigHelper {
     // ---- merge values, determine changed keys, check stack: stale
 
     Iterator<Entry<String, Map<String, String>>> it = 
desired.entrySet().iterator();
+    List<String> changedProperties = new LinkedList<>();
 
-    while (it.hasNext() && !stale) {
+    while (it.hasNext()) {
+      boolean staleEntry = false;
       Entry<String, Map<String, String>> desiredEntry = it.next();
 
       String type = desiredEntry.getKey();
@@ -1325,29 +1338,108 @@ public class ConfigHelper {
 
       if (!actual.containsKey(type)) {
         // desired is set, but actual is not
-        if (!serviceInfo.hasConfigDependency(type)) {
-          stale = componentInfo != null && componentInfo.hasConfigType(type);
-        } else {
-          stale = true;
-        }
+        staleEntry = (serviceInfo.hasConfigDependency(type) || 
componentInfo.hasConfigType(type));
       } else {
         // desired and actual both define the type
         HostConfig hc = actual.get(type);
         Map<String, String> actualTags = buildTags(hc);
 
         if (!isTagChanged(tags, actualTags, 
hasGroupSpecificConfigsForType(cluster, sch.getHostName(), type))) {
-          stale = false;
+          staleEntry = false;
         } else {
-          stale = serviceInfo.hasConfigDependency(type) || 
componentInfo.hasConfigType(type);
+          staleEntry = (serviceInfo.hasConfigDependency(type) || 
componentInfo.hasConfigType(type));
+          if (staleEntry) {
+            Collection<String> changedKeys = findChangedKeys(cluster, type, 
tags.values(), actualTags.values());
+            changedProperties.addAll(changedKeys);
+          }
         }
       }
+      stale = stale | staleEntry;
     }
+    
+    String refreshCommand = 
calculateRefreshCommand(stackInfo.getRefreshCommandConfiguration(), sch, 
changedProperties);
+
     if (STALE_CONFIGS_CACHE_ENABLED) {
       staleConfigsCache.put(staleHash, stale);
+      if (refreshCommand != null) {
+        refreshConfigCommandCache.put(staleHash, refreshCommand);
+      }
     }
+
+    // gather all changed properties and see if we can find a common 
refreshConfigs command for this component
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Changed properties {} ({}) {} :  COMMAND: {}", stale, 
sch.getServiceComponentName(), sch.getHostName(), refreshCommand);
+      for (String p : changedProperties) {
+        LOG.debug(p);
+      }
+    }
+
     return stale;
   }
 
+  public String getRefreshConfigsCommand(Cluster cluster, String hostName, 
String serviceName, String componentName) throws AmbariException {
+    ServiceComponent serviceComponent = 
cluster.getService(serviceName).getServiceComponent(componentName);
+    ServiceComponentHost sch = 
serviceComponent.getServiceComponentHost(hostName);
+    return getRefreshConfigsCommand(cluster, sch);
+  }
+
+  public String getRefreshConfigsCommand(Cluster cluster, ServiceComponentHost 
sch) throws AmbariException {
+    String refreshCommand = null;
+
+    Map<String, HostConfig> actual = sch.getActualConfigs();
+    if (STALE_CONFIGS_CACHE_ENABLED) {
+      Map<String, Map<String, String>> desired = 
getEffectiveDesiredTags(cluster, sch.getHostName(),
+              cluster.getDesiredConfigs());
+      int staleHash = Objects.hashCode(actual.hashCode(),
+              desired.hashCode(),
+              sch.getHostName(),
+              sch.getServiceComponentName(),
+              sch.getServiceName());
+      refreshCommand = refreshConfigCommandCache.getIfPresent(staleHash);
+    }
+    return refreshCommand;
+  }
+
+
+  /**
+   * Calculates refresh command for a set of changed properties as follows:
+   *  - if a property has no refresh command return null
+   *  - in case of multiple refresh commands: as REFRESH_CONFIGS is executed 
by default in case of any other command as well,
+   *  can be overriden by RELOAD_CONFIGS or any other custom command, however 
in case of any other different commands return null
+   *  as it's not possible to refresh all properties with one command.
+   *
+   *  examples:
+   *     {REFRESH_CONFIGS, REFRESH_CONFIGS, RELOAD_CONFIGS} ==> RELOAD_CONFIGS
+   *     {REFRESH_CONFIGS, RELOADPROXYUSERS, RELOAD_CONFIGS} ==> null
+   *
+   * @param refreshCommandConfiguration
+   * @param sch
+   * @param changedProperties
+   * @return
+   */
+  private String calculateRefreshCommand(RefreshCommandConfiguration 
refreshCommandConfiguration,
+                                         ServiceComponentHost sch, 
List<String> changedProperties) {
+
+    String finalRefreshCommand = null;
+    for (String propertyName : changedProperties) {
+      String refreshCommand = 
refreshCommandConfiguration.getRefreshCommandForComponent(sch, propertyName);
+      if (refreshCommand == null) {
+        return null;
+      }
+      if (finalRefreshCommand == null) {
+        finalRefreshCommand = refreshCommand;
+      }
+      if (!finalRefreshCommand.equals(refreshCommand)) {
+        if 
(finalRefreshCommand.equals(RefreshCommandConfiguration.REFRESH_CONFIGS)) {
+          finalRefreshCommand = refreshCommand;
+        } else if 
(!refreshCommand.equals(RefreshCommandConfiguration.REFRESH_CONFIGS)) {
+          return null;
+        }
+      }
+    }
+    return finalRefreshCommand;
+  }
+
   /**
    * Determines if the hostname has group specific configs for the type 
specified
    *
@@ -1374,6 +1466,62 @@ public class ConfigHelper {
   }
 
   /**
+   * @return the keys that have changed values
+   */
+  private Collection<String> findChangedKeys(Cluster cluster, String type,
+                                             Collection<String> desiredTags, 
Collection<String> actualTags) {
+
+    Map<String, String> desiredValues = new HashMap<>();
+    Map<String, String> actualValues = new HashMap<>();
+
+    for (String tag : desiredTags) {
+      Config config = cluster.getConfig(type, tag);
+      if (null != config) {
+        desiredValues.putAll(config.getProperties());
+      }
+    }
+
+    for (String tag : actualTags) {
+      Config config = cluster.getConfig(type, tag);
+      if (null != config) {
+        actualValues.putAll(config.getProperties());
+      }
+    }
+
+    List<String> keys = new ArrayList<>();
+
+    for (Entry<String, String> entry : desiredValues.entrySet()) {
+      String key = entry.getKey();
+      String value = entry.getValue();
+
+      if (!actualValues.containsKey(key) || 
!valuesAreEqual(actualValues.get(key), value)) {
+        keys.add(type + "/" + key);
+      }
+    }
+
+    return keys;
+  }
+
+  /**
+   * Compares values as double in case they are numbers.
+   * @param actualValue
+   * @param newValue
+   * @return
+   */
+  private  boolean valuesAreEqual(String actualValue, String newValue) {
+    boolean actualValueIsNumber = NumberUtils.isNumber(actualValue);
+    boolean newValueIsNumber = NumberUtils.isNumber(newValue);
+    if (actualValueIsNumber && newValueIsNumber) {
+      Double ab = Double.parseDouble(actualValue);
+      Double bb = Double.parseDouble(newValue);
+      return ab.equals(bb);
+    } else if (!actualValueIsNumber && !newValueIsNumber) {
+      return actualValue.equals(newValue);
+    }
+    return false;
+  }
+
+  /**
    * @return the map of tags for a desired config
    */
   private Map<String, String> buildTags(HostConfig hc) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/state/PropertyInfo.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/PropertyInfo.java 
b/ambari-server/src/main/java/org/apache/ambari/server/state/PropertyInfo.java
index 63c850e..31fcb9d 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/PropertyInfo.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/PropertyInfo.java
@@ -90,6 +90,11 @@ public class PropertyInfo {
   private Set<PropertyDependencyInfo> usedByProperties =
           new HashSet<>();
 
+  @XmlElementWrapper(name="supported-refresh-commands")
+  @XmlElement(name="refresh-command")
+  private Set<RefreshCommand> supportedRefreshCommands = new HashSet<>();
+
+
   //This method is called after all the properties (except IDREF) are 
unmarshalled for this object,
   //but before this object is set to the parent object.
   void afterUnmarshal(Unmarshaller unmarshaller, Object parent) {
@@ -209,6 +214,30 @@ public class PropertyInfo {
     this.requireInput = requireInput;
   }
 
+  public List<Element> getPropertyAttributes() {
+    return propertyAttributes;
+  }
+
+  public void setPropertyAttributes(List<Element> propertyAttributes) {
+    this.propertyAttributes = propertyAttributes;
+  }
+
+  public Set<RefreshCommand> getSupportedRefreshCommands() {
+    return supportedRefreshCommands;
+  }
+
+  public void setSupportedRefreshCommands(Set<RefreshCommand> 
supportedRefreshCommands) {
+    this.supportedRefreshCommands = supportedRefreshCommands;
+  }
+
+  /**
+   * Willcard properties should not be included to stack configurations.
+   * @return
+   */
+  public boolean shouldBeConfigured() {
+    return !getName().contains("*");
+  }
+
   @Override
   public int hashCode() {
     final int prime = 31;

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/state/RefreshCommand.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/RefreshCommand.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/state/RefreshCommand.java
new file mode 100644
index 0000000..e09a875
--- /dev/null
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/RefreshCommand.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state;
+
+import javax.xml.bind.annotation.XmlAttribute;
+
+/**
+ * Represents a RefreshCommand defined for a component and a property.
+ */
+public class RefreshCommand {
+
+  @XmlAttribute(name="componentName", required = true)
+  private String componentName;
+
+  /**
+   * Default command is reload_configs.
+   */
+  @XmlAttribute(name="command", required = false)
+  private String command = RefreshCommandConfiguration.RELOAD_CONFIGS;
+
+  public RefreshCommand() {
+  }
+
+  public RefreshCommand(String componentName, String command) {
+    this.componentName = componentName;
+    this.command = command;
+  }
+
+  public String getComponentName() {
+    return componentName;
+  }
+
+  public String getCommand() {
+    return command;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/state/RefreshCommandConfiguration.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/RefreshCommandConfiguration.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/state/RefreshCommandConfiguration.java
new file mode 100644
index 0000000..5999c6c
--- /dev/null
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/RefreshCommandConfiguration.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.ambari.server.state;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class RefreshCommandConfiguration {
+
+  public static final String RELOAD_CONFIGS = "reload_configs";
+  public static final String REFRESH_CONFIGS = "refresh_configs";
+
+  private Map<String, Map<String, String>> propertyComponentCommandMap;
+
+  public RefreshCommandConfiguration() {
+  }
+
+  private String findKey(String propertyName) {
+    for (String keyName : propertyComponentCommandMap.keySet()) {
+      if (propertyName.startsWith(keyName)) {
+        return keyName;
+      }
+    }
+    return null;
+  }
+
+  /**
+   * If no command is defined for a component then the default command will be 
REFRESH_CONFIGS in case of a client component or
+   * if there's only one command defined for an another component. This is 
because if RELOAD_CONFIGS is defined for NAMENODE then
+   * presumably other dependent components will need just a refresh.
+   */
+  public String getRefreshCommandForComponent(ServiceComponentHost sch, String 
propertyName) {
+    if (sch.isClientComponent()) {
+      return REFRESH_CONFIGS;
+    }
+    String keyName = findKey(propertyName);
+    Map<String, String> componentCommandMap = 
propertyComponentCommandMap.get(keyName);
+    if (componentCommandMap != null) {
+      String commandForComponent = 
componentCommandMap.get(sch.getServiceComponentName());
+      if (commandForComponent != null) {
+        return commandForComponent;
+      } else if(componentCommandMap.size() == 1) {
+        return REFRESH_CONFIGS;
+      }
+    }
+    return null;
+  }
+
+  public void addRefreshCommands(Map<String, Map<String, String>> 
refreshCommands) {
+    if (propertyComponentCommandMap == null) {
+      propertyComponentCommandMap = new HashMap();
+    }
+    propertyComponentCommandMap.putAll(refreshCommands);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java 
b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
index c32e907..70d5926 100644
--- a/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
+++ b/ambari-server/src/main/java/org/apache/ambari/server/state/StackInfo.java
@@ -90,6 +90,8 @@ public class StackInfo implements Comparable<StackInfo>, 
Validable {
   * */
   private List<String> servicesWithNoConfigs = new ArrayList<>();
 
+  private RefreshCommandConfiguration refreshCommandConfiguration = new 
RefreshCommandConfiguration();
+
   public String getMinJdk() {
     return minJdk;
   }
@@ -604,4 +606,12 @@ public class StackInfo implements Comparable<StackInfo>, 
Validable {
   public VersionDefinitionXml getLatestVersionDefinition() {
     return latestVersion;
   }
+
+  public RefreshCommandConfiguration getRefreshCommandConfiguration() {
+    return refreshCommandConfiguration;
+  }
+
+  public void setRefreshCommandConfiguration(RefreshCommandConfiguration 
refreshCommandConfiguration) {
+    this.refreshCommandConfiguration = refreshCommandConfiguration;
+  }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
index f490ff0..3b8f6da 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/svccomphost/ServiceComponentHostImpl.java
@@ -1181,6 +1181,16 @@ public class ServiceComponentHostImpl implements 
ServiceComponentHost {
       LOG.error("Could not determine stale config", e);
     }
 
+    try {
+      Cluster cluster = clusters.getCluster(clusterName);
+      ServiceComponent serviceComponent = 
cluster.getService(serviceName).getServiceComponent(serviceComponentName);
+      ServiceComponentHost sch = 
serviceComponent.getServiceComponentHost(hostName);
+      String refreshConfigsCommand = 
helper.getRefreshConfigsCommand(cluster,sch);
+      r.setReloadConfig(refreshConfigsCommand != null);
+    } catch (Exception e) {
+      LOG.error("Could not determine reload config flag", e);
+    }
+
     return r;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/core-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/core-site.xml
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/core-site.xml
index 5c6f043..d39ea78 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/core-site.xml
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/core-site.xml
@@ -185,4 +185,16 @@ DEFAULT
     </description>
     <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>hadoop.proxyuser.*</name>
+    <value/>
+    <description>
+      This * property is not configured it's used just to define refresh 
commands for all properties
+      prefixed with hadoop.proxyuser.
+    </description>
+    <supported-refresh-commands>
+      <refresh-command componentName="NAMENODE" command="reloadproxyusers" />
+    </supported-refresh-commands>
+    <on-ambari-upgrade add="false"/>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
index 7fdc227..d97a52e 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/configuration/hdfs-site.xml
@@ -184,6 +184,9 @@
     <value>3</value>
     <description>Determines datanode heartbeat interval in 
seconds.</description>
     <on-ambari-upgrade add="true"/>
+    <supported-refresh-commands>
+      <refresh-command componentName="NAMENODE" command="reload_configs" />
+    </supported-refresh-commands>
   </property>
   <property>
     <name>dfs.namenode.safemode.threshold-pct</name>

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
index 0aa0bc0..c0abb15 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/datanode.py
@@ -31,7 +31,7 @@ from resource_management.libraries.functions.decorator import 
retry
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, FILE_TYPE_XML
 from resource_management.core.logger import Logger
-from hdfs import hdfs
+from hdfs import hdfs, reconfig
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 from utils import get_hdfs_binary
@@ -57,6 +57,17 @@ class DataNode(Script):
     hdfs("datanode")
     datanode(action="configure")
 
+  def save_configs(self, env):
+    import params
+    env.set_params(params)
+    hdfs("datanode")
+
+  def reload_configs(self, env):
+    import params
+    env.set_params(params)
+    Logger.info("RELOAD CONFIGS")
+    reconfig("datanode", params.dfs_dn_ipc_address)
+
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
index 07c7616..4022986 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs.py
@@ -20,12 +20,16 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.core.resources.system import Directory, File, Link
+from resource_management.core.resources.system import Execute, Directory, 
File, Link
 from resource_management.core.resources import Package
 from resource_management.core.source import Template
 from resource_management.core.resources.service import ServiceConfig
 from resource_management.libraries.resources.xml_config import XmlConfig
+
 from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions.format import format
 import os
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
@@ -157,6 +161,52 @@ def install_snappy():
        to=params.so_src_x64,
   )
 
+class ConfigStatusParser():
+    def __init__(self):
+        self.reconfig_successful = False
+
+    def handle_new_line(self, line, is_stderr):
+        if is_stderr:
+            return
+
+        if line.startswith('SUCCESS: Changed property'):
+            self.reconfig_successful = True
+
+        Logger.info('[reconfig] %s' % (line))
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def reconfig(componentName, componentAddress):
+    import params
+
+    if params.security_enabled:
+        Execute(params.nn_kinit_cmd,
+                user=params.hdfs_user
+                )
+
+    nn_reconfig_cmd = format('hdfs --config {hadoop_conf_dir} dfsadmin 
-reconfig {componentName} {componentAddress} start')
+
+    Execute (nn_reconfig_cmd,
+             user=params.hdfs_user,
+             logoutput=True,
+             path=params.hadoop_bin_dir
+             )
+
+    nn_reconfig_cmd = format('hdfs --config {hadoop_conf_dir} dfsadmin 
-reconfig {componentName} {componentAddress} status')
+    config_status_parser = ConfigStatusParser()
+    Execute (nn_reconfig_cmd,
+             user=params.hdfs_user,
+             logoutput=False,
+             path=params.hadoop_bin_dir,
+             on_new_line=config_status_parser.handle_new_line
+             )
+
+
+    if not config_status_parser.reconfig_successful:
+        Logger.info('Reconfiguration failed')
+        raise Fail('Reconfiguration failed!')
+
+    Logger.info('Reconfiguration successfully completed.')
+
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def hdfs(component=None):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
index 0896f30..f2e96c3 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_client.py
@@ -42,6 +42,11 @@ class HdfsClient(Script):
     env.set_params(params)
     hdfs()
 
+  def save_configs(self, env):
+    import params
+    env.set_params(params)
+    hdfs()
+
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index cac6e9c..2224f72 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -430,6 +430,27 @@ def is_namenode_formatted(params):
 
   return False
 
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def refreshProxyUsers():
+  import params
+
+  if params.security_enabled:
+    Execute(params.nn_kinit_cmd,
+            user=params.hdfs_user
+            )
+
+  if params.dfs_ha_enabled:
+    # due to a bug in hdfs, refreshNodes will not run on both namenodes so we
+    # need to execute each command scoped to a particular namenode
+    nn_refresh_cmd = format('dfsadmin -fs hdfs://{namenode_rpc} 
-refreshSuperUserGroupsConfiguration')
+  else:
+    nn_refresh_cmd = format('dfsadmin -fs {namenode_address} 
-refreshSuperUserGroupsConfiguration')
+  ExecuteHadoop(nn_refresh_cmd,
+                user=params.hdfs_user,
+                conf_dir=params.hadoop_conf_dir,
+                bin_dir=params.hadoop_bin_dir)
+
 @OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
 def decommission():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
index 50bf1e0..291da05 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/namenode.py
@@ -46,8 +46,8 @@ from ambari_commons import OSConst
 
 
 import namenode_upgrade
-from hdfs_namenode import namenode, wait_for_safemode_off
-from hdfs import hdfs
+from hdfs_namenode import namenode, wait_for_safemode_off, refreshProxyUsers
+from hdfs import hdfs, reconfig
 import hdfs_rebalance
 from utils import initiate_safe_zkfc_failover, get_hdfs_binary, 
get_dfsadmin_base_command
 
@@ -86,6 +86,23 @@ class NameNode(Script):
     hdfs_binary = self.get_hdfs_binary()
     namenode(action="configure", hdfs_binary=hdfs_binary, env=env)
 
+  def save_configs(self, env):
+    import params
+    env.set_params(params)
+    hdfs()
+
+  def reload_configs(self, env):
+    import params
+    env.set_params(params)
+    Logger.info("RELOAD CONFIGS")
+    reconfig("namenode", params.namenode_address)
+
+  def reloadproxyusers(self, env):
+    import params
+    env.set_params(params)
+    Logger.info("RELOAD HDFS PROXY USERS")
+    refreshProxyUsers()
+
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
index 4977e1c..3d387b4 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/snamenode.py
@@ -44,6 +44,16 @@ class SNameNode(Script):
     hdfs("secondarynamenode")
     snamenode(action="configure")
 
+  def save_configs(self, env):
+    import params
+    env.set_params(params)
+    hdfs("secondarynamenode")
+
+  def reload_configs(self, env):
+    import params
+    env.set_params(params)
+    Logger.info("RELOAD CONFIGS")
+
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
index 5c28527..940f87c 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/configuration/hdfs-site.xml
@@ -181,6 +181,9 @@
     <value>3</value>
     <description>Determines datanode heartbeat interval in 
seconds.</description>
     <on-ambari-upgrade add="false"/>
+    <supported-refresh-commands>
+      <refresh-command componentName="NAMENODE" command="reload_configs" />
+    </supported-refresh-commands>
   </property>
   <property>
     <name>dfs.namenode.safemode.threshold-pct</name>
@@ -637,5 +640,8 @@
     <name>hadoop.caller.context.enabled</name>
     <value>true</value>
     <on-ambari-upgrade add="false"/>
+    <supported-refresh-commands>
+      <refresh-command componentName="NAMENODE" command="reload_configs" />
+    </supported-refresh-commands>
   </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
index d8fb361..a843374 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/datanode.py
@@ -25,7 +25,7 @@ from resource_management.libraries.functions.stack_features 
import check_stack_f
 from resource_management.libraries.functions.security_commons import 
build_expectations, \
   cached_kinit_executor, get_params_from_filesystem, 
validate_security_config_properties, FILE_TYPE_XML
 from resource_management.core.logger import Logger
-from hdfs import hdfs
+from hdfs import hdfs, reconfig
 from ambari_commons.os_family_impl import OsFamilyImpl
 from ambari_commons import OSConst
 from utils import get_hdfs_binary
@@ -50,6 +50,17 @@ class DataNode(Script):
     hdfs("datanode")
     datanode(action="configure")
 
+  def save_configs(self, env):
+    import params
+    env.set_params(params)
+    hdfs("datanode")
+
+  def reload_configs(self, env):
+    import params
+    env.set_params(params)
+    Logger.info("RELOAD CONFIGS")
+    reconfig("datanode", params.dfs_dn_ipc_address)
+
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
index 07c7616..4022986 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs.py
@@ -20,12 +20,16 @@ Ambari Agent
 """
 
 from resource_management.libraries.script.script import Script
-from resource_management.core.resources.system import Directory, File, Link
+from resource_management.core.resources.system import Execute, Directory, 
File, Link
 from resource_management.core.resources import Package
 from resource_management.core.source import Template
 from resource_management.core.resources.service import ServiceConfig
 from resource_management.libraries.resources.xml_config import XmlConfig
+
 from resource_management.libraries.functions.get_lzo_packages import 
get_lzo_packages
+from resource_management.core.exceptions import Fail
+from resource_management.core.logger import Logger
+from resource_management.libraries.functions.format import format
 import os
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
@@ -157,6 +161,52 @@ def install_snappy():
        to=params.so_src_x64,
   )
 
+class ConfigStatusParser():
+    def __init__(self):
+        self.reconfig_successful = False
+
+    def handle_new_line(self, line, is_stderr):
+        if is_stderr:
+            return
+
+        if line.startswith('SUCCESS: Changed property'):
+            self.reconfig_successful = True
+
+        Logger.info('[reconfig] %s' % (line))
+
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def reconfig(componentName, componentAddress):
+    import params
+
+    if params.security_enabled:
+        Execute(params.nn_kinit_cmd,
+                user=params.hdfs_user
+                )
+
+    nn_reconfig_cmd = format('hdfs --config {hadoop_conf_dir} dfsadmin 
-reconfig {componentName} {componentAddress} start')
+
+    Execute (nn_reconfig_cmd,
+             user=params.hdfs_user,
+             logoutput=True,
+             path=params.hadoop_bin_dir
+             )
+
+    nn_reconfig_cmd = format('hdfs --config {hadoop_conf_dir} dfsadmin 
-reconfig {componentName} {componentAddress} status')
+    config_status_parser = ConfigStatusParser()
+    Execute (nn_reconfig_cmd,
+             user=params.hdfs_user,
+             logoutput=False,
+             path=params.hadoop_bin_dir,
+             on_new_line=config_status_parser.handle_new_line
+             )
+
+
+    if not config_status_parser.reconfig_successful:
+        Logger.info('Reconfiguration failed')
+        raise Fail('Reconfiguration failed!')
+
+    Logger.info('Reconfiguration successfully completed.')
+
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def hdfs(component=None):
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
index 0896f30..f2e96c3 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_client.py
@@ -42,6 +42,11 @@ class HdfsClient(Script):
     env.set_params(params)
     hdfs()
 
+  def save_configs(self, env):
+    import params
+    env.set_params(params)
+    hdfs()
+
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
index 5a1f368..94cd66c 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/hdfs_namenode.py
@@ -460,6 +460,26 @@ def decommission():
                   conf_dir=conf_dir,
                   bin_dir=params.hadoop_bin_dir)
 
+@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
+def refreshProxyUsers():
+  import params
+
+  if params.security_enabled:
+    Execute(params.nn_kinit_cmd,
+            user=params.hdfs_user
+            )
+
+  if params.dfs_ha_enabled:
+    # due to a bug in hdfs, refreshNodes will not run on both namenodes so we
+    # need to execute each command scoped to a particular namenode
+    nn_refresh_cmd = format('dfsadmin -fs hdfs://{namenode_rpc} 
-refreshSuperUserGroupsConfiguration')
+  else:
+    nn_refresh_cmd = format('dfsadmin -fs {namenode_address} 
-refreshSuperUserGroupsConfiguration')
+  ExecuteHadoop(nn_refresh_cmd,
+                user=params.hdfs_user,
+                conf_dir=params.hadoop_conf_dir,
+                bin_dir=params.hadoop_bin_dir)
+
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
 def decommission():
   import params

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
index 7a0e784..ffdafb8 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/namenode.py
@@ -46,8 +46,8 @@ from ambari_commons import OSConst
 
 
 import namenode_upgrade
-from hdfs_namenode import namenode, wait_for_safemode_off
-from hdfs import hdfs
+from hdfs_namenode import namenode, wait_for_safemode_off, refreshProxyUsers
+from hdfs import hdfs, reconfig
 import hdfs_rebalance
 from utils import initiate_safe_zkfc_failover, get_hdfs_binary, 
get_dfsadmin_base_command
 
@@ -86,6 +86,23 @@ class NameNode(Script):
     hdfs_binary = self.get_hdfs_binary()
     namenode(action="configure", hdfs_binary=hdfs_binary, env=env)
 
+  def save_configs(self, env):
+    import params
+    env.set_params(params)
+    hdfs()
+
+  def reload_configs(self, env):
+    import params
+    env.set_params(params)
+    Logger.info("RELOAD CONFIGS")
+    reconfig("namenode", params.namenode_address)
+
+  def reloadproxyusers(self, env):
+    import params
+    env.set_params(params)
+    Logger.info("RELOAD HDFS PROXY USERS")
+    refreshProxyUsers()
+
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
index f5ff3e1..b0ed533 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/3.0.0.3.0/package/scripts/snamenode.py
@@ -44,6 +44,16 @@ class SNameNode(Script):
     hdfs("secondarynamenode")
     snamenode(action="configure")
 
+  def save_configs(self, env):
+    import params
+    env.set_params(params)
+    hdfs("secondarynamenode")
+
+  def reload_configs(self, env):
+    import params
+    env.set_params(params)
+    Logger.info("RELOAD CONFIGS")
+
   def start(self, env, upgrade_type=None):
     import params
     env.set_params(params)

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/configuration-schema.xsd
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/configuration-schema.xsd 
b/ambari-server/src/main/resources/configuration-schema.xsd
index 9350984..12b0217 100644
--- a/ambari-server/src/main/resources/configuration-schema.xsd
+++ b/ambari-server/src/main/resources/configuration-schema.xsd
@@ -41,6 +41,13 @@
       <xs:element name="deleted" type="xs:boolean" minOccurs="0"/>
       <xs:element name="final" type="xs:boolean" minOccurs="0"/>
       <xs:element name="on-ambari-upgrade" type="propertyUpgradeBehavior" 
minOccurs="1"/>
+      <xs:element name="supported-refresh-commands" minOccurs="0">
+        <xs:complexType>
+          <xs:sequence>
+            <xs:element name="refresh-command" type="refreshCommands" 
minOccurs="1" maxOccurs="unbounded"/>
+          </xs:sequence>
+        </xs:complexType>
+      </xs:element>
       <xs:element name="on-stack-upgrade" type="propertyStackUpgradeBehavior" 
minOccurs="0"/>
       <xs:element name="property-type" minOccurs="0">
         <xs:simpleType>
@@ -84,6 +91,11 @@
     <xs:attribute name="merge" type="xs:boolean" use="optional" 
default="true"/>
   </xs:complexType>
 
+  <xs:complexType name="refreshCommands">
+    <xs:attribute name="componentName" type="xs:string" use="required"/>
+    <xs:attribute name="command" type="xs:string" use="optional"/>
+  </xs:complexType>
+
   <xs:complexType name="valueAttributesInfo">
     <xs:all>
       <xs:element name="type" type="xs:string" minOccurs="0"/>

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/properties.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/properties.json 
b/ambari-server/src/main/resources/properties.json
index e42864f..1d12f83 100644
--- a/ambari-server/src/main/resources/properties.json
+++ b/ambari-server/src/main/resources/properties.json
@@ -53,6 +53,7 @@
         "HostRoles/actual_configs",
         "params/run_smoke_test",
         "HostRoles/stale_configs",
+        "HostRoles/reload_configs",
         "HostRoles/desired_admin_state",
         "HostRoles/maintenance_state",
         "HostRoles/service_name",

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hdfs-site.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hdfs-site.xml
index 86aa3ec..14fcf6a 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hdfs-site.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.3/services/HDFS/configuration/hdfs-site.xml
@@ -76,5 +76,8 @@
     <name>hadoop.caller.context.enabled</name>
     <value>true</value>
     <on-ambari-upgrade add="false"/>
+    <supported-refresh-commands>
+      <refresh-command componentName="NAMENODE" command="reload_configs" />
+    </supported-refresh-commands>
   </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
index 38a38cc..8a0a782 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/state/ConfigHelperTest.java
@@ -157,6 +157,7 @@ public class ConfigHelperTest {
 
       cluster.addService("FLUME", repositoryVersion);
       cluster.addService("OOZIE", repositoryVersion);
+      cluster.addService("HDFS", repositoryVersion);
 
       final ClusterRequest clusterRequest2 =
           new ClusterRequest(cluster.getClusterId(), clusterName,
@@ -229,6 +230,45 @@ public class ConfigHelperTest {
       managementController.updateClusters(new HashSet<ClusterRequest>() {{
         add(clusterRequest5);
       }}, null);
+
+      // hdfs-site/hadoop.caller.context.enabled
+      ConfigurationRequest cr6 = new ConfigurationRequest();
+      cr6.setClusterName(clusterName);
+      cr6.setType("hdfs-site");
+      cr6.setVersionTag("version1");
+      cr6.setProperties(new HashMap<String, String>() {{
+        put("hadoop.caller.context.enabled", "true");
+      }});
+      cr6.setPropertiesAttributes(null);
+
+      final ClusterRequest clusterRequest6 =
+              new ClusterRequest(cluster.getClusterId(), clusterName,
+                      cluster.getDesiredStackVersion().getStackVersion(), 
null);
+
+      clusterRequest6.setDesiredConfig(Collections.singletonList(cr6));
+      managementController.updateClusters(new HashSet<ClusterRequest>() {{
+        add(clusterRequest6);
+      }}, null);
+
+      // hdfs-site/hadoop.caller.context.enabled
+      ConfigurationRequest cr7 = new ConfigurationRequest();
+      cr7.setClusterName(clusterName);
+      cr7.setType("hdfs-site");
+      cr7.setVersionTag("version2");
+      cr7.setProperties(new HashMap<String, String>() {{
+        put("hadoop.caller.context.enabled", "false");
+      }});
+      cr7.setPropertiesAttributes(null);
+
+      final ClusterRequest clusterRequest7 =
+              new ClusterRequest(cluster.getClusterId(), clusterName,
+                      cluster.getDesiredStackVersion().getStackVersion(), 
null);
+
+      clusterRequest7.setDesiredConfig(Collections.singletonList(cr7));
+      managementController.updateClusters(new HashSet<ClusterRequest>() {{
+        add(clusterRequest7);
+      }}, null);
+
     }
 
     @After
@@ -545,7 +585,7 @@ public class ConfigHelperTest {
               configHelper.getEffectiveDesiredTags(cluster, "h3"));
 
       Assert.assertNotNull(effectiveAttributes);
-      Assert.assertEquals(7, effectiveAttributes.size());
+      Assert.assertEquals(8, effectiveAttributes.size());
 
       Assert.assertTrue(effectiveAttributes.containsKey("global3"));
       Map<String, Map<String, String>> globalAttrs = 
effectiveAttributes.get("global3");
@@ -991,7 +1031,39 @@ public class ConfigHelperTest {
       Assert.assertTrue(configHelper.isStaleConfigs(sch, null));
 
       verify(sch);
-    }
+  }
+
+  @Test
+  public void testCalculateRefreshCommands() throws Exception {
+
+    Map<String, HostConfig> schReturn = new HashMap<>();
+    HostConfig hc = new HostConfig();
+    // Put a different version to check for change
+    hc.setDefaultVersionTag("version1");
+    schReturn.put("hdfs-site", hc);
+
+    ServiceComponent sc = createNiceMock(ServiceComponent.class);
+
+    // set up mocks
+    ServiceComponentHost sch = createNiceMock(ServiceComponentHost.class);
+    
expect(sc.getDesiredStackId()).andReturn(cluster.getDesiredStackVersion()).anyTimes();
+
+    // set up expectations
+    expect(sch.getActualConfigs()).andReturn(schReturn).anyTimes();
+    expect(sch.getHostName()).andReturn("h1").anyTimes();
+    expect(sch.getClusterId()).andReturn(cluster.getClusterId()).anyTimes();
+    expect(sch.getServiceName()).andReturn("HDFS").anyTimes();
+    expect(sch.getServiceComponentName()).andReturn("NAMENODE").anyTimes();
+    expect(sch.getServiceComponent()).andReturn(sc).anyTimes();
+
+    replay(sc, sch);
+
+    Assert.assertTrue(configHelper.isStaleConfigs(sch, null));
+    String refreshConfigsCommand = 
configHelper.getRefreshConfigsCommand(cluster, sch);
+    Assert.assertEquals("reload_configs", refreshConfigsCommand);
+    verify(sch);
+  }
+
   }
 
   public static class RunWithCustomModule {

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
index 7a94ebf..4a04d0b 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/state/PropertyInfoTest.java
@@ -137,6 +137,26 @@ public class PropertyInfoTest {
   }
 
   @Test
+  public void testBehaviorWithSupportedRefreshCommandsTags() throws 
JAXBException {
+    // given
+    String xml =
+    "<property>\n" +
+    " <name>prop_name</name>\n" +
+    " <value>prop_val</value>\n" +
+    " <supported-refresh-commands>\n" +
+    "   <refresh-command componentName=\"NAMENODE\" command=\"reload_configs\" 
/>\n" +
+    " </supported-refresh-commands>\n" +
+    "</property>";
+
+    // when
+    PropertyInfo propertyInfo = propertyInfoFrom(xml);
+
+    // then
+    
assertEquals(propertyInfo.getSupportedRefreshCommands().iterator().next().getCommand(),
 "reload_configs");
+    
assertEquals(propertyInfo.getSupportedRefreshCommands().iterator().next().getComponentName(),
 "NAMENODE");
+  }
+
+  @Test
   public void testUnknownPropertyType() throws Exception {
     // Given
     String xml =

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
index b1a4154..ef59e84 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_datanode.py
@@ -666,3 +666,20 @@ class TestDatanode(RMFTestCase):
     self.assertEquals(
       ('hdfs dfsadmin -fs hdfs://ns1 -D ipc.client.connect.max.retries=5 -D 
ipc.client.connect.retry.interval=1000 -getDatanodeInfo 0.0.0.0:8010'),
       mocks_dict['checked_call'].call_args_list[0][0][0])
+
+  def test_reload_configs(self):
+      with self.assertRaises(Fail):
+          self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/datanode.py",
+                             classname = "DataNode",
+                             command = "reload_configs",
+                             config_file = "default.json",
+                             stack_version = self.STACK_VERSION,
+                             target = RMFTestCase.TARGET_COMMON_SERVICES
+                             )
+
+          # self.assertResourceCalled('Execute', "hdfs dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -reconfig namenode 
c6401.ambari.apache.org:8020 start",
+          #                       tries=115,
+          #                       try_sleep=10,
+          #                       user="hdfs",
+          #                       logoutput=True
+          #                       )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 06e12f6..4e1124a 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -1745,6 +1745,39 @@ class TestNamenode(RMFTestCase):
     get_namenode_states_mock.return_value = active_namenodes, 
standby_namenodes, unknown_namenodes
     self.assertFalse(is_this_namenode_active())
 
+  def test_reloadproxyusers(self):
+      self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/namenode.py",
+                         classname = "NameNode",
+                         command = "reloadproxyusers",
+                         config_file = "default.json",
+                         stack_version = self.STACK_VERSION,
+                         target = RMFTestCase.TARGET_COMMON_SERVICES
+                         )
+
+      self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshSuperUserGroupsConfiguration',
+                                user = 'hdfs',
+                                conf_dir = '/etc/hadoop/conf',
+                                bin_dir = '/usr/bin')
+      self.assertNoMoreResources()
+
+  def test_reload_configs(self):
+      with self.assertRaises(Fail):
+          self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + 
"/scripts/namenode.py",
+                             classname = "NameNode",
+                             command = "reload_configs",
+                             config_file = "default.json",
+                             stack_version = self.STACK_VERSION,
+                             target = RMFTestCase.TARGET_COMMON_SERVICES
+                             )
+
+      # self.assertResourceCalled('Execute', "hdfs dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -reconfig namenode 
c6401.ambari.apache.org:8020 start",
+      #                       tries=115,
+      #                       try_sleep=10,
+      #                       user="hdfs",
+      #                       logoutput=True
+      #                       )
+
+
 
 class Popen_Mock:
   return_value = 1

http://git-wip-us.apache.org/repos/asf/ambari/blob/57682942/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/configuration/hdfs-site.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/configuration/hdfs-site.xml
 
b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/configuration/hdfs-site.xml
index 28657eb..9e52a33 100644
--- 
a/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/configuration/hdfs-site.xml
+++ 
b/ambari-server/src/test/resources/stacks/HDP/2.0.5/services/HDFS/configuration/hdfs-site.xml
@@ -444,4 +444,12 @@ don't exist, they will be created with this 
permission.</description>
     </description>
     <on-ambari-upgrade add="true"/>
   </property>
+  <property>
+    <name>hadoop.caller.context.enabled</name>
+    <value>true</value>
+    <on-ambari-upgrade add="false"/>
+    <supported-refresh-commands>
+      <refresh-command componentName="NAMENODE" command="reload_configs" />
+    </supported-refresh-commands>
+  </property>
 </configuration>

Reply via email to