Repository: ambari
Updated Branches:
  refs/heads/trunk edce1b29b -> d99a275f5


AMBARI-7415. Properties from Xml should be automatically added during upgrade 
(aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d99a275f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d99a275f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d99a275f

Branch: refs/heads/trunk
Commit: d99a275f552813593ff378270a8d411f34a25937
Parents: edce1b2
Author: Andrew Onishuk <aonis...@hortonworks.com>
Authored: Fri Sep 19 20:53:05 2014 +0300
Committer: Andrew Onishuk <aonis...@hortonworks.com>
Committed: Fri Sep 19 20:53:05 2014 +0300

----------------------------------------------------------------------
 .../ambari/server/state/ConfigHelper.java       |  39 ++++
 .../server/upgrade/AbstractUpgradeCatalog.java  | 215 ++++++++++++++-----
 .../server/upgrade/UpgradeCatalog161.java       |   2 +-
 .../server/upgrade/UpgradeCatalog170.java       |  27 +--
 .../server/upgrade/UpgradeCatalog161Test.java   |   2 +-
 .../server/upgrade/UpgradeCatalog170Test.java   |  35 +--
 6 files changed, 216 insertions(+), 104 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/d99a275f/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
index 191549b..8b724b1 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigHelper.java
@@ -474,6 +474,8 @@ public class ConfigHelper {
     
     for(ServiceInfo serviceInfo:stack.getServices()) {     
       Set<PropertyInfo> stackProperties = 
ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), 
serviceInfo.getName());
+      Set<PropertyInfo> stackLevelProperties = 
ambariMetaInfo.getStackProperties(stack.getName(), stack.getVersion());
+      stackProperties.addAll(stackLevelProperties);
       
       for (PropertyInfo stackProperty : stackProperties) {
         String stackPropertyConfigType = 
fileNameToConfigType(stackProperty.getFilename());
@@ -488,6 +490,43 @@ public class ConfigHelper {
     return null;
   }
   
+  public ServiceInfo getPropertyOwnerService(Cluster cluster, String 
configType, String propertyName) throws AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    for(ServiceInfo serviceInfo:stack.getServices()) {     
+      Set<PropertyInfo> stackProperties = 
ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), 
serviceInfo.getName());
+      
+      for (PropertyInfo stackProperty : stackProperties) {
+        String stackPropertyConfigType = 
fileNameToConfigType(stackProperty.getFilename());
+        
+        if(stackProperty.getName().equals(propertyName) && 
stackPropertyConfigType.equals(configType)) {
+          return serviceInfo;
+        }
+      }
+      
+    }
+    
+    return null;
+  }
+  
+  public Set<PropertyInfo> getServiceProperties(Cluster cluster, String 
serviceName) throws AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    return ambariMetaInfo.getProperties(stack.getName(), stack.getVersion(), 
serviceName);
+  }
+  
+  public Set<PropertyInfo> getStackProperties(Cluster cluster) throws 
AmbariException {
+    StackId stackId = cluster.getCurrentStackVersion();
+    StackInfo stack = ambariMetaInfo.getStackInfo(stackId.getStackName(),
+        stackId.getStackVersion());
+    
+    return ambariMetaInfo.getStackProperties(stack.getName(), 
stack.getVersion());
+  }
+  
   public void createConfigType(Cluster cluster, AmbariManagementController 
ambariManagementController, 
       String configType, Map<String, String> properties, String authName) 
throws AmbariException {
     String tag;

http://git-wip-us.apache.org/repos/asf/ambari/blob/d99a275f/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index a53159c..d245b51 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -22,6 +22,7 @@ import com.google.inject.Inject;
 import com.google.inject.Injector;
 import com.google.inject.Provider;
 import com.google.inject.persist.Transactional;
+
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.controller.AmbariManagementController;
@@ -32,16 +33,26 @@ import org.apache.ambari.server.orm.entities.MetainfoEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.ConfigHelper;
+import org.apache.ambari.server.state.PropertyInfo;
+import org.apache.ambari.server.state.Service;
+import org.apache.ambari.server.state.ServiceInfo;
+import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.StackInfo;
 import org.apache.ambari.server.utils.VersionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import javax.persistence.EntityManager;
+
 import java.sql.SQLException;
 import java.util.Collections;
 import java.util.Comparator;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
 
 public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
   @Inject
@@ -172,6 +183,158 @@ public abstract class AbstractUpgradeCatalog implements 
UpgradeCatalog {
       dbAccessor.executeQuery(String.format("ALTER ROLE %s SET search_path to 
'%s';", dbUser, schemaName));
     }
   }
+  
+  public void addNewConfigurationsFromXml() throws AmbariException {
+    ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+    AmbariManagementController controller = 
injector.getInstance(AmbariManagementController.class);
+    
+    Clusters clusters = controller.getClusters();
+    if (clusters == null) {
+      return;
+    }
+    Map<String, Cluster> clusterMap = clusters.getClusters();
+
+    if (clusterMap != null && !clusterMap.isEmpty()) {
+      for (Cluster cluster : clusterMap.values()) {
+        Map<String, Set<String>> newProperties = new HashMap<String, 
Set<String>>();
+        
+        Set<PropertyInfo> stackProperties = 
configHelper.getStackProperties(cluster);
+        for(String serviceName: cluster.getServices().keySet()) {
+          Set<PropertyInfo> properties = 
configHelper.getServiceProperties(cluster, serviceName);
+          
+          if(properties == null) {
+            continue;
+          }
+          properties.addAll(stackProperties);
+          
+          for(PropertyInfo property:properties) {
+            String configType = 
ConfigHelper.fileNameToConfigType(property.getFilename());
+            Config clusterConfigs = cluster.getDesiredConfigByType(configType);
+            if(clusterConfigs == null || 
!clusterConfigs.getProperties().containsKey(property.getName())) {
+              LOG.info("Config " + property.getName() + " from " + configType 
+ " from xml configurations" +
+                  " is not found on the cluster. Adding it...");
+              
+              if(!newProperties.containsKey(configType)) {
+                newProperties.put(configType, new HashSet<String>());
+              }
+              newProperties.get(configType).add(property.getName());
+            }
+          }
+        }
+        
+        
+        
+        for (Entry<String, Set<String>> newProperty : 
newProperties.entrySet()) {
+          updateConfigurationPropertiesWithValuesFromXml(newProperty.getKey(), 
newProperty.getValue(), false, true);
+        }
+      }
+    }
+  }
+  
+  /**
+   * Create a new cluster scoped configuration with the new properties added
+   * with the values from the coresponding xml files.
+   * 
+   * If xml owner service is not in the cluster, the configuration won't be 
added.
+   * 
+   * @param configType Configuration type. (hdfs-site, etc.)
+   * @param properties Set property names.
+   */
+  protected void updateConfigurationPropertiesWithValuesFromXml(String 
configType,
+      Set<String> propertyNames, boolean updateIfExists, boolean 
createNewConfigType) throws AmbariException {
+    ConfigHelper configHelper = injector.getInstance(ConfigHelper.class);
+    AmbariManagementController controller = 
injector.getInstance(AmbariManagementController.class);
+    
+    Clusters clusters = controller.getClusters();
+    if (clusters == null) {
+      return;
+    }
+    Map<String, Cluster> clusterMap = clusters.getClusters();
+
+    if (clusterMap != null && !clusterMap.isEmpty()) {
+      for (Cluster cluster : clusterMap.values()) {
+        Map<String, String> properties = new HashMap<String, String>();
+        
+        for(String propertyName:propertyNames) {
+          String propertyValue = 
configHelper.getPropertyValueFromStackDefenitions(cluster, configType, 
propertyName);
+          
+          if(propertyValue == null) {
+            LOG.info("Config " + propertyName + " from " + configType + " is 
not found in xml definitions." +
+                "Skipping configuration property update");
+            continue;
+          }
+          
+          ServiceInfo propertyService = 
configHelper.getPropertyOwnerService(cluster, configType, propertyName);
+          if(propertyService != null && 
!cluster.getServices().containsKey(propertyService.getName())) {
+            LOG.info("Config " + propertyName + " from " + configType + " with 
value = " + propertyValue + " " +
+                "Is not added due to service " + propertyService.getName() + " 
is not in the cluster.");
+            continue;
+          }
+          
+          properties.put(propertyName, propertyValue);
+        }
+        
+        updateConfigurationPropertiesForCluster(cluster, configType,
+            properties, updateIfExists, createNewConfigType);
+      }
+    }
+  }
+  
+  protected void updateConfigurationPropertiesForCluster(Cluster cluster, 
String configType,
+      Map<String, String> properties, boolean updateIfExists, boolean 
createNewConfigType) throws AmbariException {
+    AmbariManagementController controller = 
injector.getInstance(AmbariManagementController.class);
+    String newTag = "version" + System.currentTimeMillis();
+    
+    if (properties != null) {
+      Map<String, Config> all = cluster.getConfigsByType(configType);
+      if (all == null || !all.containsKey(newTag) || properties.size() > 0) {
+        Map<String, String> oldConfigProperties;
+        Config oldConfig = cluster.getDesiredConfigByType(configType);
+        
+        if (oldConfig == null && !createNewConfigType) {
+          LOG.info("Config " + configType + " not found. Assuming service not 
installed. " +
+              "Skipping configuration properties update");
+          return;
+        } else if (oldConfig == null) {
+          oldConfigProperties = new HashMap<String, String>();
+          newTag = "version1";
+        } else {
+          oldConfigProperties = oldConfig.getProperties();
+        }
+
+        Map<String, String> mergedProperties =
+          mergeProperties(oldConfigProperties, properties, updateIfExists);
+
+        if (!Maps.difference(oldConfigProperties, 
mergedProperties).areEqual()) {
+          LOG.info("Applying configuration with tag '{}' to " +
+            "cluster '{}'", newTag, cluster.getClusterName());
+
+          ConfigurationRequest cr = new ConfigurationRequest();
+          cr.setClusterName(cluster.getClusterName());
+          cr.setVersionTag(newTag);
+          cr.setType(configType);
+          cr.setProperties(mergedProperties);
+          controller.createConfiguration(cr);
+
+          Config baseConfig = cluster.getConfig(cr.getType(), 
cr.getVersionTag());
+          if (baseConfig != null) {
+            String authName = "ambari-upgrade";
+
+            if (cluster.addDesiredConfig(authName, 
Collections.singleton(baseConfig)) != null) {
+              String oldConfigString = (oldConfig != null) ? " from='" + 
oldConfig.getTag() + "'" : "";
+              LOG.info("cluster '" + cluster.getClusterName() + "' "
+                + "changed by: '" + authName + "'; "
+                + "type='" + baseConfig.getType() + "' "
+                + "tag='" + baseConfig.getTag() + "'"
+                + oldConfigString);
+            }
+          }
+        } else {
+          LOG.info("No changes detected to config " + configType + ". Skipping 
configuration properties update");
+        }
+      }
+    }
+  }
 
   /**
    * Create a new cluster scoped configuration with the new properties added
@@ -183,7 +346,6 @@ public abstract class AbstractUpgradeCatalog implements 
UpgradeCatalog {
         Map<String, String> properties, boolean updateIfExists, boolean 
createNewConfigType) throws
     AmbariException {
     AmbariManagementController controller = 
injector.getInstance(AmbariManagementController.class);
-    String newTag = "version" + System.currentTimeMillis();
 
     Clusters clusters = controller.getClusters();
     if (clusters == null) {
@@ -193,55 +355,8 @@ public abstract class AbstractUpgradeCatalog implements 
UpgradeCatalog {
 
     if (clusterMap != null && !clusterMap.isEmpty()) {
       for (Cluster cluster : clusterMap.values()) {
-        if (properties != null) {
-          Map<String, Config> all = cluster.getConfigsByType(configType);
-          if (all == null || !all.containsKey(newTag) || properties.size() > 
0) {
-            Map<String, String> oldConfigProperties;
-            Config oldConfig = cluster.getDesiredConfigByType(configType);
-            
-            if (oldConfig == null && !createNewConfigType) {
-              LOG.info("Config " + configType + " not found. Assuming service 
not installed. " +
-                  "Skipping configuration properties update");
-              return;
-            } else if (oldConfig == null) {
-              oldConfigProperties = new HashMap<String, String>();
-              newTag = "version1";
-            } else {
-              oldConfigProperties = oldConfig.getProperties();
-            }
-
-            Map<String, String> mergedProperties =
-              mergeProperties(oldConfigProperties, properties, updateIfExists);
-
-            if (!Maps.difference(oldConfigProperties, 
mergedProperties).areEqual()) {
-              LOG.info("Applying configuration with tag '{}' to " +
-                "cluster '{}'", newTag, cluster.getClusterName());
-
-              ConfigurationRequest cr = new ConfigurationRequest();
-              cr.setClusterName(cluster.getClusterName());
-              cr.setVersionTag(newTag);
-              cr.setType(configType);
-              cr.setProperties(mergedProperties);
-              controller.createConfiguration(cr);
-
-              Config baseConfig = cluster.getConfig(cr.getType(), 
cr.getVersionTag());
-              if (baseConfig != null) {
-                String authName = "ambari-upgrade";
-
-                if (cluster.addDesiredConfig(authName, 
Collections.singleton(baseConfig)) != null) {
-                  String oldConfigString = (oldConfig != null) ? " from='" + 
oldConfig.getTag() + "'" : "";
-                  LOG.info("cluster '" + cluster.getClusterName() + "' "
-                    + "changed by: '" + authName + "'; "
-                    + "type='" + baseConfig.getType() + "' "
-                    + "tag='" + baseConfig.getTag() + "'"
-                    + oldConfigString);
-                }
-              }
-            } else {
-              LOG.info("No changes detected to config " + configType + ". 
Skipping configuration properties update");
-            }
-          }
-        }
+        updateConfigurationPropertiesForCluster(cluster, configType,
+            properties, updateIfExists, createNewConfigType);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/d99a275f/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java
index a4bb2b2..352603b 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog161.java
@@ -311,7 +311,7 @@ public class UpgradeCatalog161 extends 
AbstractUpgradeCatalog {
             "reducer=1000000000\npig.exec.reducers.max=999\n\n# Temporary 
location to store the intermediate " +
             "data.\npig.temp.dir=/tmp/\n\n# Threshold for merging FRJoin 
fragment files\npig.files.concatenation." +
             
"threshold=100\npig.optimistic.files.concatenation=false;\n\npig.disable.counter=false\n\n"
 +
-            "hcat.bin=/usr/bin/hcat"), true, true);
+            "hcat.bin=/usr/bin/hcat"), true, false);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/ambari/blob/d99a275f/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
index a3198c1..1313a3f 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog170.java
@@ -633,11 +633,11 @@ public class UpgradeCatalog170 extends 
AbstractUpgradeCatalog {
 
     moveGlobalsToEnv();
     addEnvContentFields();
-    addMissingConfigs();
     renamePigProperties();
     upgradePermissionModel();
     addJobsViewPermissions();
     moveConfigGroupsGlobalToEnv();
+    addMissingConfigs();
   }
 
   public void moveHcatalogIntoHiveService() throws AmbariException {
@@ -993,25 +993,7 @@ public class UpgradeCatalog170 extends 
AbstractUpgradeCatalog {
   }
 
   protected void addMissingConfigs() throws AmbariException {
-    updateConfigurationProperties("hbase-env",
-        Collections.singletonMap("hbase_regionserver_xmn_max", "512"), false,
-        false);
-
-    updateConfigurationProperties("hbase-env",
-        Collections.singletonMap("hbase_regionserver_xmn_ratio", "0.2"), false,
-        false);
-
-    updateConfigurationProperties("yarn-env",
-        Collections.singletonMap("min_user_id", "1000"), false,
-        false);
-
-    updateConfigurationProperties("sqoop-env", 
Collections.singletonMap("sqoop_user", "sqoop"), false, false);
-
-    updateConfigurationProperties("hadoop-env",
-            Collections.singletonMap("hadoop_root_logger", "INFO,RFA"), false,
-            false);
-
-    updateConfigurationProperties("oozie-env", 
Collections.singletonMap("oozie_admin_port", "11001"), false, false);
+    addNewConfigurationsFromXml();
   }
 
   /**
@@ -1065,8 +1047,7 @@ public class UpgradeCatalog170 extends 
AbstractUpgradeCatalog {
             continue;
           }
 
-          String value = 
configHelper.getPropertyValueFromStackDefenitions(cluster, configType, 
CONTENT_FIELD_NAME);
-          updateConfigurationProperties(configType, 
Collections.singletonMap(CONTENT_FIELD_NAME, value), true, true);
+          updateConfigurationPropertiesWithValuesFromXml(configType, 
Collections.singleton(CONTENT_FIELD_NAME), false, true);
         }
       }
     }
@@ -1129,7 +1110,7 @@ public class UpgradeCatalog170 extends 
AbstractUpgradeCatalog {
         }
 
         for (Entry<String, Map<String, String>> newProperty : 
newProperties.entrySet()) {
-          updateConfigurationProperties(newProperty.getKey(), 
newProperty.getValue(), true, true);
+          updateConfigurationProperties(newProperty.getKey(), 
newProperty.getValue(), false, true);
         }
 
         // if have some custom properties, for own services etc., leave that 
as it was

http://git-wip-us.apache.org/repos/asf/ambari/blob/d99a275f/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java
index 4315e2c..10d4f7d 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog161Test.java
@@ -172,7 +172,7 @@ public class UpgradeCatalog161Test {
             "reducer=1000000000\npig.exec.reducers.max=999\n\n# Temporary 
location to store the intermediate " +
             "data.\npig.temp.dir=/tmp/\n\n# Threshold for merging FRJoin 
fragment files\npig.files.concatenation." +
             
"threshold=100\npig.optimistic.files.concatenation=false;\n\npig.disable.counter=false\n\n"
 +
-            "hcat.bin=/usr/bin/hcat"), true, true);
+            "hcat.bin=/usr/bin/hcat"), true, false);
     expectLastCall();
 
     replay(upgradeCatalog, dbAccessor, configuration, injector, provider, em,

http://git-wip-us.apache.org/repos/asf/ambari/blob/d99a275f/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
index 6732455..f7715fd 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog170Test.java
@@ -423,9 +423,11 @@ public class UpgradeCatalog170Test {
     Method m = AbstractUpgradeCatalog.class.getDeclaredMethod
         ("updateConfigurationProperties", String.class, Map.class, 
boolean.class, boolean.class);
     Method n = 
AbstractUpgradeCatalog.class.getDeclaredMethod("getEntityManagerProvider");
-
+    Method l = AbstractUpgradeCatalog.class.getDeclaredMethod
+        ("addNewConfigurationsFromXml");
+    
     UpgradeCatalog170 upgradeCatalog = 
createMockBuilder(UpgradeCatalog170.class)
-      .addMockedMethod(m).addMockedMethod(n).createMock();
+      .addMockedMethod(m).addMockedMethod(n).addMockedMethod(l).createMock();
 
     List<ConfigGroupConfigMappingEntity> configGroupConfigMappingEntities =
             new ArrayList<ConfigGroupConfigMappingEntity>();
@@ -456,34 +458,10 @@ public class UpgradeCatalog170Test {
     contentOfHadoopEnv.put("content", "env file contents");
 
     upgradeCatalog.updateConfigurationProperties("hadoop-env",
-        globalConfigs, true, true);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("hadoop-env",
-        contentOfHadoopEnv, true, true);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("hbase-env",
-        Collections.singletonMap("hbase_regionserver_xmn_max", "512"), false, 
false);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("hbase-env",
-        Collections.singletonMap("hbase_regionserver_xmn_ratio", "0.2"), 
false, false);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("yarn-env",
-        Collections.singletonMap("min_user_id", "1000"), false, false);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("sqoop-env", 
Collections.singletonMap("sqoop_user", "sqoop"), false, false);
-    expectLastCall();
-
-    upgradeCatalog.updateConfigurationProperties("hadoop-env",
-            Collections.singletonMap("hadoop_root_logger", "INFO,RFA"), false, 
false);
+        globalConfigs, false, true);
     expectLastCall();
 
-    upgradeCatalog.updateConfigurationProperties("oozie-env",
-            Collections.singletonMap("oozie_admin_port", "11001"), false, 
false);
+    upgradeCatalog.addNewConfigurationsFromXml();
     expectLastCall();
 
     expect(dbAccessor.executeSelect("SELECT role_name, user_id FROM 
user_roles")).andReturn(userRolesResultSet).once();
@@ -521,7 +499,6 @@ public class UpgradeCatalog170Test {
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", 
"2.1"), "smokeuser_keytab", "c1")).andReturn(new HashSet<String>()).once();
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", 
"2.1"), "content", "c1")).andReturn(envDicts).once();
     expect(configHelper.findConfigTypesByPropertyName(new StackId("HDP", 
"2.1"), "dtnode_heapsize", "c1")).andReturn(configTypes).once();
-    expect(configHelper.getPropertyValueFromStackDefenitions(cluster, 
"hadoop-env", "content")).andReturn("env file contents").once();
 
     
expect(injector.getInstance(ConfigGroupConfigMappingDAO.class)).andReturn(configGroupConfigMappingDAO).anyTimes();
     expect(injector.getInstance(UserDAO.class)).andReturn(userDAO).anyTimes();

Reply via email to