Merge branch 'branch-feature-AMBARI-18456' into trunk
Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/704170e4 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/704170e4 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/704170e4 Branch: refs/heads/branch-2.5 Commit: 704170e4e1c960d90f325660ea1137be0ac3db42 Parents: 4a565d3 Author: Jonathan Hurley <jhur...@hortonworks.com> Authored: Wed Dec 7 16:49:43 2016 -0500 Committer: Jonathan Hurley <jhur...@hortonworks.com> Committed: Wed Dec 7 20:25:16 2016 -0500 ---------------------------------------------------------------------- .../AmbariManagementControllerImpl.java | 13 +- .../internal/ConfigGroupResourceProvider.java | 60 +- .../serveraction/upgrades/ConfigureAction.java | 16 +- .../serveraction/upgrades/FixLzoCodecPath.java | 16 +- .../upgrades/FixOozieAdminUsers.java | 9 +- .../upgrades/HBaseConfigCalculation.java | 14 +- .../HBaseEnvMaxDirectMemorySizeAction.java | 13 +- .../upgrades/HiveEnvClasspathAction.java | 13 +- .../upgrades/HiveZKQuorumConfigAction.java | 2 +- .../upgrades/OozieConfigCalculation.java | 13 +- .../upgrades/RangerConfigCalculation.java | 4 +- .../RangerKerberosConfigCalculation.java | 20 +- .../upgrades/RangerKmsProxyConfig.java | 3 +- .../upgrades/SparkShufflePropertyConfig.java | 3 +- .../upgrades/YarnConfigCalculation.java | 2 +- .../org/apache/ambari/server/state/Config.java | 22 +- .../ambari/server/state/ConfigFactory.java | 20 +- .../apache/ambari/server/state/ConfigImpl.java | 480 +++++++-------- .../server/state/cluster/ClusterImpl.java | 6 +- .../server/state/configgroup/ConfigGroup.java | 33 +- .../state/configgroup/ConfigGroupFactory.java | 34 +- .../state/configgroup/ConfigGroupImpl.java | 613 +++++++++---------- .../ambari/server/topology/AmbariContext.java | 25 +- .../ambari/server/update/HostUpdateHelper.java | 10 +- .../ExecutionCommandWrapperTest.java | 17 +- .../TestActionSchedulerThreading.java | 19 +- .../server/agent/HeartbeatTestHelper.java | 6 +- .../server/agent/TestHeartbeatMonitor.java | 13 +- .../configuration/RecoveryConfigHelperTest.java | 2 +- .../AmbariManagementControllerImplTest.java | 22 +- .../AmbariManagementControllerTest.java | 109 +--- .../UpgradeResourceProviderHDP22Test.java | 14 +- .../internal/UpgradeResourceProviderTest.java | 13 +- .../ComponentVersionCheckActionTest.java | 19 +- .../upgrades/ConfigureActionTest.java | 96 +-- .../upgrades/FixOozieAdminUsersTest.java | 76 +-- .../HBaseEnvMaxDirectMemorySizeActionTest.java | 187 +++--- .../upgrades/HiveEnvClasspathActionTest.java | 148 ++--- .../upgrades/HiveZKQuorumConfigActionTest.java | 2 +- .../upgrades/KerberosKeytabsActionTest.java | 28 +- .../upgrades/RangerConfigCalculationTest.java | 72 +-- .../RangerKerberosConfigCalculationTest.java | 173 ++---- .../upgrades/RangerKmsProxyConfigTest.java | 36 +- .../SparkShufflePropertyConfigTest.java | 30 +- .../upgrades/UpgradeActionTest.java | 28 +- .../ambari/server/state/ConfigGroupTest.java | 26 +- .../ambari/server/state/ConfigHelperTest.java | 49 +- .../state/alerts/AlertReceivedListenerTest.java | 8 +- .../state/cluster/ClusterDeadlockTest.java | 17 +- .../server/state/cluster/ClusterTest.java | 133 +--- .../server/state/cluster/ClustersTest.java | 8 +- ...omponentHostConcurrentWriteDeadlockTest.java | 9 +- .../ambari/server/state/host/HostTest.java | 6 +- .../svccomphost/ServiceComponentHostTest.java | 24 +- .../server/topology/AmbariContextTest.java | 38 +- .../server/update/HostUpdateHelperTest.java | 40 +- .../ambari/server/utils/StageUtilsTest.java | 4 + 57 files changed, 1198 insertions(+), 1718 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java index 0bab2a9..c9a3e04 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java @@ -55,7 +55,6 @@ import java.util.EnumMap; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; @@ -80,10 +79,10 @@ import org.apache.ambari.server.ServiceComponentNotFoundException; import org.apache.ambari.server.ServiceNotFoundException; import org.apache.ambari.server.StackAccessException; import org.apache.ambari.server.actionmanager.ActionManager; +import org.apache.ambari.server.actionmanager.CommandExecutionType; import org.apache.ambari.server.actionmanager.HostRoleCommand; import org.apache.ambari.server.actionmanager.RequestFactory; import org.apache.ambari.server.actionmanager.Stage; -import org.apache.ambari.server.actionmanager.CommandExecutionType; import org.apache.ambari.server.actionmanager.StageFactory; import org.apache.ambari.server.agent.ExecutionCommand; import org.apache.ambari.server.agent.ExecutionCommand.KeyNames; @@ -895,17 +894,11 @@ public class AmbariManagementControllerImpl implements AmbariManagementControlle @Override public Config createConfig(Cluster cluster, String type, Map<String, String> properties, String versionTag, Map<String, Map<String, String>> propertiesAttributes) { - Config config = configFactory.createNew(cluster, type, - properties, propertiesAttributes); - if (!StringUtils.isEmpty(versionTag)) { - config.setTag(versionTag); - } - - config.persist(); + Config config = configFactory.createNew(cluster, type, versionTag, properties, + propertiesAttributes); cluster.addConfig(config); - return config; } http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java index 96bb8f9..2373068 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/controller/internal/ConfigGroupResourceProvider.java @@ -17,7 +17,16 @@ */ package org.apache.ambari.server.controller.internal; -import com.google.inject.Inject; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.ClusterNotFoundException; import org.apache.ambari.server.ConfigGroupNotFoundException; @@ -48,7 +57,7 @@ import org.apache.ambari.server.security.authorization.RoleAuthorization; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; -import org.apache.ambari.server.state.ConfigImpl; +import org.apache.ambari.server.state.ConfigFactory; import org.apache.ambari.server.state.Host; import org.apache.ambari.server.state.configgroup.ConfigGroup; import org.apache.ambari.server.state.configgroup.ConfigGroupFactory; @@ -56,15 +65,7 @@ import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.EnumSet; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; +import com.google.inject.Inject; @StaticallyInject public class ConfigGroupResourceProvider extends @@ -102,6 +103,12 @@ public class ConfigGroupResourceProvider extends private static HostDAO hostDAO; /** + * Used for creating {@link Config} instances to return in the REST response. + */ + @Inject + private static ConfigFactory configFactory; + + /** * Create a new resource provider for the given management controller. * * @param propertyIds the property ids @@ -568,22 +575,19 @@ public class ConfigGroupResourceProvider extends } } + configLogger.info("User {} is creating new configuration group {} for tag {} in cluster {}", + getManagementController().getAuthName(), request.getGroupName(), request.getTag(), + cluster.getClusterName()); + ConfigGroup configGroup = configGroupFactory.createNew(cluster, request.getGroupName(), request.getTag(), request.getDescription(), request.getConfigs(), hosts); - verifyConfigs(configGroup.getConfigurations(), cluster.getClusterName()); configGroup.setServiceName(serviceName); - // Persist before add, since id is auto-generated - configLogger.info("Persisting new Config group" - + ", clusterName = " + cluster.getClusterName() - + ", name = " + configGroup.getName() - + ", tag = " + configGroup.getTag() - + ", user = " + getManagementController().getAuthName()); + verifyConfigs(configGroup.getConfigurations(), cluster.getClusterName()); - configGroup.persist(); cluster.addConfigGroup(configGroup); if (serviceName != null) { cluster.createServiceConfigVersion(serviceName, getManagementController().getAuthName(), @@ -634,6 +638,11 @@ public class ConfigGroupResourceProvider extends + ", clusterName = " + request.getClusterName() + ", groupId = " + request.getId()); } + + configLogger.info("User {} is updating configuration group {} for tag {} in cluster {}", + getManagementController().getAuthName(), request.getGroupName(), request.getTag(), + cluster.getClusterName()); + String serviceName = configGroup.getServiceName(); String requestServiceName = cluster.getServiceForConfigTypes(request.getConfigs().keySet()); if (StringUtils.isEmpty(serviceName) && StringUtils.isEmpty(requestServiceName)) { @@ -682,13 +691,6 @@ public class ConfigGroupResourceProvider extends configGroup.setDescription(request.getDescription()); configGroup.setTag(request.getTag()); - configLogger.info("Persisting updated Config group" - + ", clusterName = " + configGroup.getClusterName() - + ", id = " + configGroup.getId() - + ", tag = " + configGroup.getTag() - + ", user = " + getManagementController().getAuthName()); - - configGroup.persist(); if (serviceName != null) { cluster.createServiceConfigVersion(serviceName, getManagementController().getAuthName(), request.getServiceConfigVersionNote(), configGroup); @@ -781,11 +783,7 @@ public class ConfigGroupResourceProvider extends } } - Config config = new ConfigImpl(type); - config.setTag(tag); - config.setProperties(configProperties); - config.setPropertiesAttributes(configAttributes); - + Config config = configFactory.createReadOnly(type, tag, configProperties, configAttributes); configurations.put(config.getType(), config); } } catch (Exception e) { http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java index 5459ddb..97280ee 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/ConfigureAction.java @@ -451,7 +451,7 @@ public class ConfigureAction extends AbstractServerAction { // of creating a whole new history record since it was already done if (!targetStack.equals(currentStack) && targetStack.equals(configStack)) { config.setProperties(newValues); - config.persist(false); + config.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", outputBuffer.toString(), ""); } @@ -570,8 +570,9 @@ public class ConfigureAction extends AbstractServerAction { for(Replace replacement: replacements){ if(isOperationAllowed(cluster, configType, replacement.key, - replacement.ifKey, replacement.ifType, replacement.ifValue, replacement.ifKeyState)) + replacement.ifKey, replacement.ifType, replacement.ifValue, replacement.ifKeyState)) { allowedReplacements.add(replacement); + } } return allowedReplacements; @@ -582,8 +583,9 @@ public class ConfigureAction extends AbstractServerAction { for(ConfigurationKeyValue configurationKeyValue: sets){ if(isOperationAllowed(cluster, configType, configurationKeyValue.key, - configurationKeyValue.ifKey, configurationKeyValue.ifType, configurationKeyValue.ifValue, configurationKeyValue.ifKeyState)) + configurationKeyValue.ifKey, configurationKeyValue.ifType, configurationKeyValue.ifValue, configurationKeyValue.ifKeyState)) { allowedSets.add(configurationKeyValue); + } } return allowedSets; @@ -593,14 +595,16 @@ public class ConfigureAction extends AbstractServerAction { List<Transfer> allowedTransfers = new ArrayList<>(); for (Transfer transfer : transfers) { String key = ""; - if(transfer.operation == TransferOperation.DELETE) + if(transfer.operation == TransferOperation.DELETE) { key = transfer.deleteKey; - else + } else { key = transfer.fromKey; + } if(isOperationAllowed(cluster, configType, key, - transfer.ifKey, transfer.ifType, transfer.ifValue, transfer.ifKeyState)) + transfer.ifKey, transfer.ifType, transfer.ifValue, transfer.ifKeyState)) { allowedTransfers.add(transfer); + } } return allowedTransfers; http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java index ffa21ab..4833729 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixLzoCodecPath.java @@ -18,7 +18,11 @@ package org.apache.ambari.server.serveraction.upgrades; -import com.google.inject.Inject; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; @@ -28,13 +32,7 @@ import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; import org.apache.commons.lang.StringUtils; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import com.google.inject.Inject; /** * During stack upgrade, update lzo codec path in mapreduce.application.classpath and @@ -78,7 +76,7 @@ public class FixLzoCodecPath extends AbstractServerAction { } } config.setProperties(properties); - config.persist(false); + config.save(); } if (modifiedProperties.isEmpty()) { return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java index 3a06476..75588d5 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/FixOozieAdminUsers.java @@ -18,7 +18,9 @@ package org.apache.ambari.server.serveraction.upgrades; -import com.google.inject.Inject; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; @@ -28,8 +30,7 @@ import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; import org.apache.commons.lang.StringUtils; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; +import com.google.inject.Inject; /** * During stack upgrade, update lzo codec path in mapreduce.application.classpath and @@ -86,7 +87,7 @@ public class FixOozieAdminUsers extends AbstractServerAction { oozieProperties.put(OOZIE_ADMIN_USERS_PROP, newOozieAdminUsers); oozieConfig.setProperties(oozieProperties); - oozieConfig.persist(false); + oozieConfig.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", String.format("Set oozie admin users to %s", newOozieAdminUsers), ""); http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java index 7f6d4b1..739dd7e 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseConfigCalculation.java @@ -18,7 +18,10 @@ package org.apache.ambari.server.serveraction.upgrades; -import com.google.inject.Inject; +import java.math.BigDecimal; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; @@ -27,9 +30,7 @@ import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; -import java.math.BigDecimal; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; +import com.google.inject.Inject; /** * Computes HBase properties. This class is only used when moving from @@ -79,8 +80,9 @@ public class HBaseConfigCalculation extends AbstractServerAction { "Upper or lower memstore limit setting value is malformed, skipping", ""); } - if (lowerLimit.scale() < 2) //make sure result will have at least 2 digits after decimal point + if (lowerLimit.scale() < 2) { lowerLimit = lowerLimit.setScale(2, BigDecimal.ROUND_HALF_UP); + } BigDecimal lowerLimitNew = lowerLimit.divide(upperLimit, BigDecimal.ROUND_HALF_UP); properties.put(NEW_LOWER_LIMIT_PROPERTY_NAME, lowerLimitNew.toString()); @@ -90,7 +92,7 @@ public class HBaseConfigCalculation extends AbstractServerAction { properties.remove(OLD_LOWER_LIMIT_PROPERTY_NAME); config.setProperties(properties); - config.persist(false); + config.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", String.format("%s was set to %s", NEW_LOWER_LIMIT_PROPERTY_NAME, lowerLimitNew.toString()), ""); http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java index b238bca..fb15555 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HBaseEnvMaxDirectMemorySizeAction.java @@ -18,7 +18,11 @@ package org.apache.ambari.server.serveraction.upgrades; -import com.google.inject.Inject; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; @@ -27,10 +31,7 @@ import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import com.google.inject.Inject; /** * Computes HBase Env content property. @@ -79,7 +80,7 @@ public class HBaseEnvMaxDirectMemorySizeAction extends AbstractServerAction { properties.put(CONTENT_NAME, appendedContent); config.setProperties(properties); - config.persist(false); + config.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", String.format("The %s/%s property was appended with %s", SOURCE_CONFIG_TYPE, CONTENT_NAME, APPEND_CONTENT_LINE),""); http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java index 0e10160..c5000bf 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveEnvClasspathAction.java @@ -18,7 +18,11 @@ package org.apache.ambari.server.serveraction.upgrades; -import com.google.inject.Inject; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; @@ -27,10 +31,7 @@ import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import com.google.inject.Inject; /** * Append hive-env config type with HIVE_HOME and HIVE_CONF_DIR variables if they are absent @@ -103,7 +104,7 @@ public class HiveEnvClasspathAction extends AbstractServerAction { } config.setProperties(properties); - config.persist(false); + config.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", String.format("Added %s, %s to content at %s", HIVE_CONF_DIR, HIVE_HOME, TARGET_CONFIG_TYPE), ""); http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java index 0ade30b..7ebad08 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/HiveZKQuorumConfigAction.java @@ -85,7 +85,7 @@ public class HiveZKQuorumConfigAction extends AbstractServerAction { hiveSiteProperties.put(HIVE_SITE_ZK_CONNECT_STRING, zookeeperQuorum); hiveSite.setProperties(hiveSiteProperties); - hiveSite.persist(false); + hiveSite.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", String.format("Successfully set %s and %s in %s", HIVE_SITE_ZK_QUORUM, http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java index 4da67ca..9b8a7dc 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/OozieConfigCalculation.java @@ -18,7 +18,11 @@ package org.apache.ambari.server.serveraction.upgrades; -import com.google.inject.Inject; +import java.util.Map; +import java.util.concurrent.ConcurrentMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; @@ -27,10 +31,7 @@ import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; -import java.util.Map; -import java.util.concurrent.ConcurrentMap; -import java.util.regex.Matcher; -import java.util.regex.Pattern; +import com.google.inject.Inject; /** * Changes oozie-env during upgrade (adds -Dhdp.version to $HADOOP_OPTS variable) @@ -67,7 +68,7 @@ public class OozieConfigCalculation extends AbstractServerAction { } config.setProperties(properties); - config.persist(false); + config.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", String.format("Added -Dhdp.version to $HADOOP_OPTS variable at %s", TARGET_CONFIG_TYPE), ""); http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java index ff4a20e..8e0161b 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerConfigCalculation.java @@ -141,13 +141,13 @@ public class RangerConfigCalculation extends AbstractServerAction { targetValues.put("ranger.jpa.audit.jdbc.dialect", dialect); config.setProperties(targetValues); - config.persist(false); + config.save(); config = cluster.getDesiredConfigByType(RANGER_ENV_CONFIG_TYPE); targetValues = config.getProperties(); targetValues.put("ranger_privelege_user_jdbc_url", userJDBCUrl); config.setProperties(targetValues); - config.persist(false); + config.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", stdout.toString(), ""); } http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java index ba0da79..c059c9e 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKerberosConfigCalculation.java @@ -87,7 +87,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { if (null != hadoopUser) { targetValues.put(RANGER_PLUGINS_HDFS_SERVICE_USER, hadoopUser); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_HDFS_SERVICE_USER); } else { errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "hdfs_user", HADOOP_ENV_CONFIG_TYPE); @@ -104,7 +104,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { if (null != hiveUser) { targetValues.put(RANGER_PLUGINS_HIVE_SERVICE_USER, hiveUser); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_HIVE_SERVICE_USER); } else { errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "hive_user", HIVE_ENV_CONFIG_TYPE); @@ -121,7 +121,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { if (null != yarnUser) { targetValues.put(RANGER_PLUGINS_YARN_SERVICE_USER, yarnUser); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_YARN_SERVICE_USER); } else { errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "yarn_user", YARN_ENV_CONFIG_TYPE); @@ -138,7 +138,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { if (null != hbaseUser) { targetValues.put(RANGER_PLUGINS_HBASE_SERVICE_USER, hbaseUser); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_HBASE_SERVICE_USER); } else { errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "hbase_user", HBASE_ENV_CONFIG_TYPE); @@ -155,7 +155,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { if (null != knoxUser) { targetValues.put(RANGER_PLUGINS_KNOX_SERVICE_USER, knoxUser); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_KNOX_SERVICE_USER); } else { errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "knox_user", KNOX_ENV_CONFIG_TYPE); @@ -190,7 +190,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { } targetValues.put(RANGER_PLUGINS_STORM_SERVICE_USER, stormValue); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_STORM_SERVICE_USER); } else { errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "storm_user", STORM_ENV_CONFIG_TYPE); @@ -207,7 +207,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { if (null != kafkaUser) { targetValues.put(RANGER_PLUGINS_KAFKA_SERVICE_USER, kafkaUser); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_KAFKA_SERVICE_USER); } else { errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "kafka_user", KAFKA_ENV_CONFIG_TYPE); @@ -224,7 +224,7 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { if (null != rangerKmsUser) { targetValues.put(RANGER_PLUGINS_KMS_SERVICE_USER, rangerKmsUser); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_PLUGINS_KMS_SERVICE_USER); } else { errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "kms_user", RANGER_KMS_ENV_CONFIG_TYPE); @@ -243,10 +243,10 @@ public class RangerKerberosConfigCalculation extends AbstractServerAction { if (null != spnegoKeytab) { targetValues.put(RANGER_SPNEGO_KEYTAB, spnegoKeytab); rangerAdminconfig.setProperties(targetValues); - rangerAdminconfig.persist(false); + rangerAdminconfig.save(); sucessMsg = sucessMsg + MessageFormat.format("{0}\n", RANGER_SPNEGO_KEYTAB); } else { - errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "dfs.web.authentication.kerberos.keytab", HDFS_SITE_CONFIG_TYPE); + errMsg = errMsg + MessageFormat.format("{0} not found in {1}\n", "dfs.web.authentication.kerberos.keytab", HDFS_SITE_CONFIG_TYPE); } } else { http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java index bb88f55..25387cc 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/RangerKmsProxyConfig.java @@ -29,7 +29,6 @@ import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; import org.apache.ambari.server.state.SecurityType; -import org.apache.commons.lang.StringUtils; import com.google.inject.Inject; @@ -83,7 +82,7 @@ public class RangerKmsProxyConfig extends AbstractServerAction { targetValues.put(groupProp, "*"); targetValues.put(hostProp, "*"); kmsSite.setProperties(targetValues); - kmsSite.persist(false); + kmsSite.save(); outputMsg = outputMsg + MessageFormat.format("Successfully added properties to {0}", RANGER_KMS_SITE_CONFIG_TYPE); } else { outputMsg = outputMsg + MessageFormat.format("Kerberos not enable, not setting proxy properties to {0}", RANGER_KMS_SITE_CONFIG_TYPE); http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java index 299a373..b1aa6e1 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/SparkShufflePropertyConfig.java @@ -25,7 +25,6 @@ import java.util.Map; import java.util.concurrent.ConcurrentMap; import org.apache.ambari.server.AmbariException; -import org.apache.ambari.server.ServiceNotFoundException; import org.apache.ambari.server.actionmanager.HostRoleStatus; import org.apache.ambari.server.agent.CommandReport; import org.apache.ambari.server.serveraction.AbstractServerAction; @@ -89,7 +88,7 @@ public class SparkShufflePropertyConfig extends AbstractServerAction { yarnSiteProperties.put(YARN_NODEMANAGER_AUX_SERVICES, newAuxServices); yarnSiteProperties.put(YARN_NODEMANAGER_AUX_SERVICES_SPARK_SHUFFLE_CLASS, YARN_NODEMANAGER_AUX_SERVICES_SPARK_SHUFFLE_CLASS_VALUE); yarnSiteConfig.setProperties(yarnSiteProperties); - yarnSiteConfig.persist(false); + yarnSiteConfig.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", String.format("%s was set from %s to %s. %s was set to %s", http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java index feefcaf..d638858 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/serveraction/upgrades/YarnConfigCalculation.java @@ -67,7 +67,7 @@ public class YarnConfigCalculation extends AbstractServerAction { yarnSiteProperties.put(YARN_RM_ZK_ADDRESS_PROPERTY_NAME, zkServersStr); yarnSiteProperties.put(HADOOP_REGISTRY_ZK_QUORUM_PROPERTY_NAME, zkServersStr); yarnSiteConfig.setProperties(yarnSiteProperties); - yarnSiteConfig.persist(false); + yarnSiteConfig.save(); return createCommandReport(0, HostRoleStatus.COMPLETED, "{}", String.format("%s was set from %s to %s. %s was set from %s to %s", http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java b/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java index b35aad9..67570f4 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/Config.java @@ -30,8 +30,6 @@ public interface Config { void setPropertiesTypes(Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes); - void setStackId(StackId stackId); - /** * @return Config Type */ @@ -66,18 +64,6 @@ public interface Config { public Map<String, Map<String, String>> getPropertiesAttributes(); /** - * Change the version tag - * @param versionTag - */ - public void setTag(String versionTag); - - /** - * Set config version - * @param version - */ - public void setVersion(Long version); - - /** * Replace properties with new provided set * @param properties Property Map to replace existing one */ @@ -110,11 +96,5 @@ public interface Config { /** * Persist the configuration. */ - public void persist(); - - /** - * Persist the configuration, optionally creating a new config entity. - */ - public void persist(boolean newConfig); - + public void save(); } http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java index eaf68aa..d6cd997 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigFactory.java @@ -27,18 +27,20 @@ import com.google.inject.assistedinject.Assisted; * Factory for creating configuration objects using {@link Assisted} constructor parameters */ public interface ConfigFactory { - + /** * Creates a new {@link Config} object using provided values. * * @param cluster * @param type + * @param tag * @param map * @param mapAttributes * @return */ - Config createNew(Cluster cluster, String type, Map<String, String> map, Map<String, Map<String, String>> mapAttributes); - + Config createNew(Cluster cluster, @Assisted("type") String type, @Assisted("tag") String tag, + Map<String, String> map, Map<String, Map<String, String>> mapAttributes); + /** * Creates a new {@link Config} object using provided entity * @@ -48,4 +50,16 @@ public interface ConfigFactory { */ Config createExisting(Cluster cluster, ClusterConfigEntity entity); + /** + * Creates a read-only instance of a {@link Config} suitable for returning in + * REST responses. + * + * @param type + * @param tag + * @param map + * @param mapAttributes + * @return + */ + Config createReadOnly(@Assisted("type") String type, @Assisted("tag") String tag, + Map<String, String> map, Map<String, Map<String, String>> mapAttributes); } http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java index 1f52e6a..0a861d8 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/ConfigImpl.java @@ -18,27 +18,29 @@ package org.apache.ambari.server.state; -import java.util.Collections; -import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.ConcurrentMap; import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; + +import javax.annotation.Nullable; import org.apache.ambari.server.events.ClusterConfigChangedEvent; import org.apache.ambari.server.events.publishers.AmbariEventPublisher; +import org.apache.ambari.server.logging.LockFactory; import org.apache.ambari.server.orm.dao.ClusterDAO; import org.apache.ambari.server.orm.dao.ServiceConfigDAO; import org.apache.ambari.server.orm.entities.ClusterConfigEntity; import org.apache.ambari.server.orm.entities.ClusterEntity; +import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.gson.Gson; +import com.google.gson.JsonSyntaxException; import com.google.inject.Inject; -import com.google.inject.Injector; import com.google.inject.assistedinject.Assisted; import com.google.inject.assistedinject.AssistedInject; import com.google.inject.persist.Transactional; @@ -49,52 +51,113 @@ public class ConfigImpl implements Config { */ private final static Logger LOG = LoggerFactory.getLogger(ConfigImpl.class); + /** + * A label for {@link #hostLock} to use with the {@link LockFactory}. + */ + private static final String PROPERTY_LOCK_LABEL = "configurationPropertyLock"; + public static final String GENERATED_TAG_PREFIX = "generatedTag_"; - private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); + private final long configId; + private final Cluster cluster; + private final StackId stackId; + private final String type; + private final String tag; + private final Long version; - private Cluster cluster; - private StackId stackId; - private String type; - private volatile String tag; - private volatile Long version; - private volatile Map<String, String> properties; - private volatile Map<String, Map<String, String>> propertiesAttributes; - private ClusterConfigEntity entity; - private volatile Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes; + /** + * The properties of this configuration. This cannot be a + * {@link ConcurrentMap} since we allow null values. Therefore, it must be + * synchronized externally. + */ + private Map<String, String> properties; - @Inject - private ClusterDAO clusterDAO; + /** + * A lock for reading/writing of {@link #properties} concurrently. + * + * @see #properties + */ + private final ReadWriteLock propertyLock; - @Inject - private Gson gson; + /** + * The property attributes for this configuration. + */ + private Map<String, Map<String, String>> propertiesAttributes; + + private Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes; + + private final ClusterDAO clusterDAO; + + private final Gson gson; @Inject private ServiceConfigDAO serviceConfigDAO; - @Inject - private AmbariEventPublisher eventPublisher; + private final AmbariEventPublisher eventPublisher; @AssistedInject - public ConfigImpl(@Assisted Cluster cluster, @Assisted String type, @Assisted Map<String, String> properties, - @Assisted Map<String, Map<String, String>> propertiesAttributes, Injector injector) { + ConfigImpl(@Assisted Cluster cluster, @Assisted("type") String type, + @Assisted("tag") @Nullable String tag, + @Assisted Map<String, String> properties, + @Assisted @Nullable Map<String, Map<String, String>> propertiesAttributes, ClusterDAO clusterDAO, + Gson gson, AmbariEventPublisher eventPublisher, LockFactory lockFactory) { + + propertyLock = lockFactory.newReadWriteLock(PROPERTY_LOCK_LABEL); + this.cluster = cluster; this.type = type; this.properties = properties; - this.propertiesAttributes = propertiesAttributes; + + // only set this if it's non-null + this.propertiesAttributes = null == propertiesAttributes ? null + : new HashMap<>(propertiesAttributes); + + this.clusterDAO = clusterDAO; + this.gson = gson; + this.eventPublisher = eventPublisher; + version = cluster.getNextConfigVersion(type); + + // tag is nullable from factory but not in the DB, so ensure we generate something + tag = StringUtils.isBlank(tag) ? GENERATED_TAG_PREFIX + version : tag; + this.tag = tag; + + ClusterEntity clusterEntity = clusterDAO.findById(cluster.getClusterId()); + + ClusterConfigEntity entity = new ClusterConfigEntity(); + entity.setClusterEntity(clusterEntity); + entity.setClusterId(cluster.getClusterId()); + entity.setType(type); + entity.setVersion(version); + entity.setTag(this.tag); + entity.setTimestamp(System.currentTimeMillis()); + entity.setStack(clusterEntity.getDesiredStack()); + entity.setData(gson.toJson(properties)); + + if (null != propertiesAttributes) { + entity.setAttributes(gson.toJson(propertiesAttributes)); + } // when creating a brand new config without a backing entity, use the // cluster's desired stack as the config's stack stackId = cluster.getDesiredStackVersion(); - - injector.injectMembers(this); propertiesTypes = cluster.getConfigPropertiesTypes(type); - } + persist(entity); + configId = entity.getConfigId(); + } @AssistedInject - public ConfigImpl(@Assisted Cluster cluster, @Assisted ClusterConfigEntity entity, Injector injector) { + ConfigImpl(@Assisted Cluster cluster, @Assisted ClusterConfigEntity entity, + ClusterDAO clusterDAO, Gson gson, AmbariEventPublisher eventPublisher, + LockFactory lockFactory) { + propertyLock = lockFactory.newReadWriteLock(PROPERTY_LOCK_LABEL); + this.cluster = cluster; + this.clusterDAO = clusterDAO; + this.gson = gson; + this.eventPublisher = eventPublisher; + configId = entity.getConfigId(); + type = entity.getType(); tag = entity.getTag(); version = entity.getVersion(); @@ -102,16 +165,71 @@ public class ConfigImpl implements Config { // when using an existing entity, use the actual value of the entity's stack stackId = new StackId(entity.getStack()); - this.entity = entity; - injector.injectMembers(this); propertiesTypes = cluster.getConfigPropertiesTypes(type); + + // incur the hit on deserialization since this business object is stored locally + try { + Map<String, String> deserializedProperties = gson.<Map<String, String>> fromJson( + entity.getData(), Map.class); + + if (null == deserializedProperties) { + deserializedProperties = new HashMap<>(); + } + + properties = deserializedProperties; + } catch (JsonSyntaxException e) { + LOG.error("Malformed configuration JSON stored in the database for {}/{}", entity.getType(), + entity.getTag()); + } + + // incur the hit on deserialization since this business object is stored locally + try { + Map<String, Map<String, String>> deserializedAttributes = gson.<Map<String, Map<String, String>>> fromJson( + entity.getAttributes(), Map.class); + + if (null != deserializedAttributes) { + propertiesAttributes = new HashMap<>(deserializedAttributes); + } + } catch (JsonSyntaxException e) { + LOG.error("Malformed configuration attribute JSON stored in the database for {}/{}", + entity.getType(), entity.getTag()); + } } /** - * Constructor for clients not using factory. + * Constructor. This will create an instance suitable only for + * representation/serialization as it is incomplete. + * + * @param type + * @param tag + * @param properties + * @param propertiesAttributes + * @param clusterDAO + * @param gson + * @param eventPublisher */ - public ConfigImpl(String type) { + @AssistedInject + ConfigImpl(@Assisted("type") String type, + @Assisted("tag") @Nullable String tag, + @Assisted Map<String, String> properties, + @Assisted @Nullable Map<String, Map<String, String>> propertiesAttributes, ClusterDAO clusterDAO, + Gson gson, AmbariEventPublisher eventPublisher, LockFactory lockFactory) { + + propertyLock = lockFactory.newReadWriteLock(PROPERTY_LOCK_LABEL); + + this.tag = tag; this.type = type; + this.properties = new HashMap<>(properties); + this.propertiesAttributes = null == propertiesAttributes ? null + : new HashMap<>(propertiesAttributes); + this.clusterDAO = clusterDAO; + this.gson = gson; + this.eventPublisher = eventPublisher; + + cluster = null; + configId = 0; + version = 0L; + stackId = null; } /** @@ -119,232 +237,124 @@ public class ConfigImpl implements Config { */ @Override public StackId getStackId() { - readWriteLock.readLock().lock(); - try { - return stackId; - } finally { - readWriteLock.readLock().unlock(); - } - + return stackId; } @Override public Map<PropertyInfo.PropertyType, Set<String>> getPropertiesTypes() { - readWriteLock.readLock().lock(); - try { - return propertiesTypes; - } finally { - readWriteLock.readLock().unlock(); - } + return propertiesTypes; } @Override public void setPropertiesTypes(Map<PropertyInfo.PropertyType, Set<String>> propertiesTypes) { - readWriteLock.writeLock().lock(); - try { - this.propertiesTypes = propertiesTypes; - } finally { - readWriteLock.writeLock().unlock(); - } - } - - @Override - public void setStackId(StackId stackId) { - readWriteLock.writeLock().lock(); - try { - this.stackId = stackId; - } finally { - readWriteLock.writeLock().unlock(); - } - + this.propertiesTypes = propertiesTypes; } @Override public String getType() { - readWriteLock.readLock().lock(); - try { - return type; - } finally { - readWriteLock.readLock().unlock(); - } - + return type; } @Override public String getTag() { - if (tag == null) { - readWriteLock.writeLock().lock(); - try { - if (tag == null) { - tag = GENERATED_TAG_PREFIX + getVersion(); - } - } finally { - readWriteLock.writeLock().unlock(); - } - } - - readWriteLock.readLock().lock(); - try { - - return tag; - } finally { - readWriteLock.readLock().unlock(); - } - + return tag; } @Override public Long getVersion() { - if (version == null && cluster != null) { - readWriteLock.writeLock().lock(); - try { - if (version == null) { - version = cluster.getNextConfigVersion(type); //pure DB calculation call, no cluster locking required - } - } finally { - readWriteLock.writeLock().unlock(); - } - } - - readWriteLock.readLock().lock(); - try { - return version; - } finally { - readWriteLock.readLock().unlock(); - } - + return version; } @Override public Map<String, String> getProperties() { - if (null != entity && null == properties) { - readWriteLock.writeLock().lock(); - try { - if (properties == null) { - properties = gson.<Map<String, String>>fromJson(entity.getData(), Map.class); - } - } finally { - readWriteLock.writeLock().unlock(); - } - } - - readWriteLock.readLock().lock(); + propertyLock.readLock().lock(); try { - return null == properties ? new HashMap<String, String>() - : new HashMap<String, String>(properties); + return properties == null ? new HashMap<String, String>() : new HashMap<>(properties); } finally { - readWriteLock.readLock().unlock(); + propertyLock.readLock().unlock(); } - } @Override public Map<String, Map<String, String>> getPropertiesAttributes() { - if (null != entity && null == propertiesAttributes) { - readWriteLock.writeLock().lock(); - try { - if (propertiesAttributes == null) { - propertiesAttributes = gson.<Map<String, Map<String, String>>>fromJson(entity.getAttributes(), Map.class); - } - } finally { - readWriteLock.writeLock().unlock(); - } - } - - readWriteLock.readLock().lock(); - try { - return null == propertiesAttributes ? null : new HashMap<String, Map<String, String>>(propertiesAttributes); - } finally { - readWriteLock.readLock().unlock(); - } - - } - - @Override - public void setTag(String tag) { - readWriteLock.writeLock().lock(); - try { - this.tag = tag; - } finally { - readWriteLock.writeLock().unlock(); - } - - } - - @Override - public void setVersion(Long version) { - readWriteLock.writeLock().lock(); - try { - this.version = version; - } finally { - readWriteLock.writeLock().unlock(); - } - + return null == propertiesAttributes ? null + : new HashMap<String, Map<String, String>>(propertiesAttributes); } @Override public void setProperties(Map<String, String> properties) { - readWriteLock.writeLock().lock(); + propertyLock.writeLock().lock(); try { this.properties = properties; } finally { - readWriteLock.writeLock().unlock(); + propertyLock.writeLock().unlock(); } - } @Override public void setPropertiesAttributes(Map<String, Map<String, String>> propertiesAttributes) { - readWriteLock.writeLock().lock(); - try { - this.propertiesAttributes = propertiesAttributes; - } finally { - readWriteLock.writeLock().unlock(); - } - + this.propertiesAttributes = propertiesAttributes; } @Override - public void updateProperties(Map<String, String> properties) { - readWriteLock.writeLock().lock(); + public void updateProperties(Map<String, String> propertiesToUpdate) { + propertyLock.writeLock().lock(); try { - this.properties.putAll(properties); + properties.putAll(propertiesToUpdate); } finally { - readWriteLock.writeLock().unlock(); + propertyLock.writeLock().unlock(); } - } @Override public List<Long> getServiceConfigVersions() { - readWriteLock.readLock().lock(); - try { - if (cluster == null || type == null || version == null) { - return Collections.emptyList(); - } - return serviceConfigDAO.getServiceConfigVersionsByConfig(cluster.getClusterId(), type, version); - } finally { - readWriteLock.readLock().unlock(); - } - + return serviceConfigDAO.getServiceConfigVersionsByConfig(cluster.getClusterId(), type, version); } @Override - public void deleteProperties(List<String> properties) { - readWriteLock.writeLock().lock(); + public void deleteProperties(List<String> propertyKeysToRemove) { + propertyLock.writeLock().lock(); try { - for (String key : properties) { - this.properties.remove(key); - } + Set<String> keySet = properties.keySet(); + keySet.removeAll(propertyKeysToRemove); } finally { - readWriteLock.writeLock().unlock(); + propertyLock.writeLock().unlock(); } + } + + /** + * Persist the entity and update the internal state relationships once the + * transaction has been committed. + */ + private void persist(ClusterConfigEntity entity) { + persistEntitiesInTransaction(entity); + // ensure that the in-memory state of the cluster is kept consistent + cluster.addConfig(this); + + // re-load the entity associations for the cluster + cluster.refresh(); + + // broadcast the change event for the configuration + ClusterConfigChangedEvent event = new ClusterConfigChangedEvent(cluster.getClusterName(), + getType(), getTag(), getVersion()); + + eventPublisher.publish(event); } - @Override - public void persist() { - persist(true); + /** + * Persist the cluster and configuration entities in their own transaction. + */ + @Transactional + void persistEntitiesInTransaction(ClusterConfigEntity entity) { + ClusterEntity clusterEntity = entity.getClusterEntity(); + + clusterDAO.createConfig(entity); + clusterEntity.getClusterConfigEntities().add(entity); + + // save the entity, forcing a flush to ensure the refresh picks up the + // newest data + clusterDAO.merge(clusterEntity, true); } /** @@ -352,69 +362,29 @@ public class ConfigImpl implements Config { */ @Override @Transactional - public void persist(boolean newConfig) { - readWriteLock.writeLock().lock(); - try { - ClusterEntity clusterEntity = clusterDAO.findById(cluster.getClusterId()); - - if (newConfig) { - ClusterConfigEntity entity = new ClusterConfigEntity(); - entity.setClusterEntity(clusterEntity); - entity.setClusterId(cluster.getClusterId()); - entity.setType(getType()); - entity.setVersion(getVersion()); - entity.setTag(getTag()); - entity.setTimestamp(new Date().getTime()); - entity.setStack(clusterEntity.getDesiredStack()); - entity.setData(gson.toJson(getProperties())); - - if (null != getPropertiesAttributes()) { - entity.setAttributes(gson.toJson(getPropertiesAttributes())); - } - - clusterDAO.createConfig(entity); - clusterEntity.getClusterConfigEntities().add(entity); - - // save the entity, forcing a flush to ensure the refresh picks up the - // newest data - clusterDAO.merge(clusterEntity, true); - } else { - // only supporting changes to the properties - ClusterConfigEntity entity = null; - - // find the existing configuration to update - for (ClusterConfigEntity cfe : clusterEntity.getClusterConfigEntities()) { - if (getTag().equals(cfe.getTag()) && getType().equals(cfe.getType()) - && getVersion().equals(cfe.getVersion())) { - entity = cfe; - break; - } - } - - // if the configuration was found, then update it - if (null != entity) { - LOG.debug( - "Updating {} version {} with new configurations; a new version will not be created", - getType(), getVersion()); - - entity.setData(gson.toJson(getProperties())); - - // save the entity, forcing a flush to ensure the refresh picks up the - // newest data - clusterDAO.merge(clusterEntity, true); - } - } - } finally { - readWriteLock.writeLock().unlock(); - } + public void save() { + ClusterConfigEntity entity = clusterDAO.findConfig(configId); + ClusterEntity clusterEntity = clusterDAO.findById(entity.getClusterId()); - // re-load the entity associations for the cluster - cluster.refresh(); + // if the configuration was found, then update it + if (null != entity) { + LOG.debug("Updating {} version {} with new configurations; a new version will not be created", + getType(), getVersion()); - // broadcast the change event for the configuration - ClusterConfigChangedEvent event = new ClusterConfigChangedEvent(cluster.getClusterName(), - getType(), getTag(), getVersion()); + entity.setData(gson.toJson(getProperties())); + + // save the entity, forcing a flush to ensure the refresh picks up the + // newest data + clusterDAO.merge(clusterEntity, true); + + // re-load the entity associations for the cluster + cluster.refresh(); + + // broadcast the change event for the configuration + ClusterConfigChangedEvent event = new ClusterConfigChangedEvent(cluster.getClusterName(), + getType(), getTag(), getVersion()); eventPublisher.publish(event); + } } } http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java index 7bf24ce..649fe38 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/cluster/ClusterImpl.java @@ -326,8 +326,11 @@ public class ClusterImpl implements Cluster { loadStackVersion(); loadServices(); loadServiceHostComponents(); - loadConfigGroups(); + + // cache configurations before loading configuration groups cacheConfigurations(); + loadConfigGroups(); + loadRequestExecutions(); if (desiredStackVersion != null && !StringUtils.isEmpty(desiredStackVersion.getStackName()) && ! @@ -2566,7 +2569,6 @@ public class ClusterImpl implements Cluster { } } configGroup.setHosts(groupDesiredHosts); - configGroup.persist(); } else { throw new IllegalArgumentException("Config group {} doesn't exist"); } http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java index 1b29c9b..5a9c574 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroup.java @@ -18,13 +18,13 @@ package org.apache.ambari.server.state.configgroup; +import java.util.Map; + import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.controller.ConfigGroupResponse; import org.apache.ambari.server.state.Config; import org.apache.ambari.server.state.Host; -import java.util.Map; - /** * Configuration group or Config group is a type of Ambari resource that * supports grouping of configuration resources and host resources for a @@ -80,29 +80,20 @@ public interface ConfigGroup { public void setDescription(String description); /** - * List of hosts to which configs are applied + * Gets an unmodifiable list of {@link Host}s. + * * @return */ public Map<Long, Host> getHosts(); /** - * List of @Config objects + * Gets an unmodifiable map of {@link Config}s. + * * @return */ public Map<String, Config> getConfigurations(); /** - * Persist the Config group along with the related host and config mapping - * entities to the persistence store - */ - void persist(); - - /** - * Persist the host mapping entity to the persistence store - */ - void persistHostMapping(); - - /** * Delete config group and the related host and config mapping * entities from the persistence store */ @@ -116,13 +107,6 @@ public interface ConfigGroup { public void addHost(Host host) throws AmbariException; /** - * Add config to the config group - * @param config - * @throws AmbariException - */ - public void addConfiguration(Config config) throws AmbariException; - - /** * Return @ConfigGroupResponse for the config group * * @return @ConfigGroupResponse @@ -131,11 +115,6 @@ public interface ConfigGroup { public ConfigGroupResponse convertToResponse() throws AmbariException; /** - * Refresh Config group and the host and config mappings for the group - */ - public void refresh(); - - /** * Reassign the set of hosts associated with this config group * @param hosts */ http://git-wip-us.apache.org/repos/asf/ambari/blob/704170e4/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java index 9abadf3..906d948 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/state/configgroup/ConfigGroupFactory.java @@ -17,22 +17,38 @@ */ package org.apache.ambari.server.state.configgroup; -import com.google.inject.assistedinject.Assisted; +import java.util.Map; + import org.apache.ambari.server.orm.entities.ConfigGroupEntity; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Config; import org.apache.ambari.server.state.Host; -import org.apache.ambari.server.state.configgroup.ConfigGroup; -import java.util.Map; +import com.google.inject.assistedinject.Assisted; public interface ConfigGroupFactory { - ConfigGroup createNew(@Assisted("cluster") Cluster cluster, - @Assisted("name") String name, - @Assisted("tag") String tag, - @Assisted("description") String description, - @Assisted("configs") Map<String, Config> configs, - @Assisted("hosts") Map<Long, Host> hosts); + /** + * Creates and saves a new {@link ConfigGroup}. + * + * @param cluster + * @param name + * @param tag + * @param description + * @param configs + * @param hosts + * @param serviceName + * @return + */ + ConfigGroup createNew(@Assisted("cluster") Cluster cluster, @Assisted("name") String name, + @Assisted("tag") String tag, @Assisted("description") String description, + @Assisted("configs") Map<String, Config> configs, @Assisted("hosts") Map<Long, Host> hosts); + /** + * Instantiates a {@link ConfigGroup} fron an existing, persisted entity. + * + * @param cluster + * @param entity + * @return + */ ConfigGroup createExisting(Cluster cluster, ConfigGroupEntity entity); }