This is an automated email from the ASF dual-hosted git repository. wuzhiguo pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/ambari.git
The following commit(s) were added to refs/heads/trunk by this push: new c73a88e4d4 AMBARI-25386. dfs_ha_initial_* properties should be removed during upgrade. (#3448) c73a88e4d4 is described below commit c73a88e4d4a52b4185ff69546ec96b3827934b3c Author: lucasbak <lucas.bakal...@gmail.com> AuthorDate: Thu Oct 27 11:10:49 2022 +0200 AMBARI-25386. dfs_ha_initial_* properties should be removed during upgrade. (#3448) --- .../ambari/server/upgrade/SchemaUpgradeHelper.java | 1 + .../ambari/server/upgrade/UpgradeCatalog275.java | 120 +++++++++++++++++++++ .../server/upgrade/UpgradeCatalog275Test.java | 91 ++++++++++++++++ 3 files changed, 212 insertions(+) diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java index 75639758c3..752d9d43a2 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/SchemaUpgradeHelper.java @@ -191,6 +191,7 @@ public class SchemaUpgradeHelper { catalogBinder.addBinding().to(UpgradeCatalog270.class); catalogBinder.addBinding().to(UpgradeCatalog271.class); catalogBinder.addBinding().to(UpgradeCatalog272.class); + catalogBinder.addBinding().to(UpgradeCatalog275.class); catalogBinder.addBinding().to(UpgradeCatalog280.class); catalogBinder.addBinding().to(UpdateAlertScriptPaths.class); catalogBinder.addBinding().to(FinalUpgradeCatalog.class); diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog275.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog275.java new file mode 100644 index 0000000000..35bcc82d16 --- /dev/null +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog275.java @@ -0,0 +1,120 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.server.upgrade; + +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +import org.apache.ambari.server.AmbariException; +import org.apache.ambari.server.orm.dao.BlueprintDAO; +import org.apache.ambari.server.orm.entities.BlueprintConfigEntity; +import org.apache.ambari.server.orm.entities.BlueprintEntity; + +import com.google.gson.Gson; +import com.google.inject.Inject; +import com.google.inject.Injector; + +/** + * The {@link UpgradeCatalog275} upgrades Ambari from 2.7.4 to 2.7.5. + */ +public class UpgradeCatalog275 extends AbstractUpgradeCatalog { + + static final Gson GSON = new Gson(); + + + @Inject + public UpgradeCatalog275(Injector injector) { + super(injector); + } + + @Override + public String getSourceVersion() { + return "2.7.4"; + } + + @Override + public String getTargetVersion() { + return "2.7.5"; + } + + /** + * Perform database schema transformation. Can work only before persist service start + * + * @throws AmbariException + * @throws SQLException + */ + @Override + protected void executeDDLUpdates() throws AmbariException, SQLException { + + } + + /** + * Perform data insertion before running normal upgrade of data, requires started persist service + * + * @throws AmbariException + * @throws SQLException + */ + @Override + protected void executePreDMLUpdates() throws AmbariException, SQLException { + removeDfsHAInitial(); + } + + /** + * Performs normal data upgrade + * + * @throws AmbariException + * @throws SQLException + */ + @Override + protected void executeDMLUpdates() throws AmbariException, SQLException { + addNewConfigurationsFromXml(); + } + + protected void removeDfsHAInitial() { + BlueprintDAO blueprintDAO = injector.getInstance(BlueprintDAO.class); + List<BlueprintEntity> blueprintEntityList = blueprintDAO.findAll(); + List<BlueprintEntity> changedBlueprints = new ArrayList<>(); + for (BlueprintEntity blueprintEntity : blueprintEntityList){ + boolean changed = false; + Collection<BlueprintConfigEntity> blueprintConfigurations = blueprintEntity.getConfigurations(); + for (BlueprintConfigEntity blueprintConfigEntity : blueprintConfigurations) { + if (blueprintConfigEntity.getType().equals("hadoop-env")) { + String configData = blueprintConfigEntity.getConfigData(); + + Map<String, String> typeProperties = GSON.<Map<String, String>>fromJson( + configData, Map.class); + + typeProperties.remove("dfs_ha_initial_namenode_standby"); + typeProperties.remove("dfs_ha_initial_namenode_active"); + + blueprintConfigEntity.setConfigData(GSON.toJson(typeProperties)); + changed = true; + } + } + if (changed) { + changedBlueprints.add(blueprintEntity); + } + } + for (BlueprintEntity blueprintEntity : changedBlueprints) { + blueprintDAO.merge(blueprintEntity); + } + } +} \ No newline at end of file diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog275Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog275Test.java new file mode 100644 index 0000000000..93ee00edf3 --- /dev/null +++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog275Test.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.ambari.server.upgrade; + +import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.createMock; +import static org.easymock.EasyMock.createNiceMock; +import static org.easymock.EasyMock.expect; +import static org.easymock.EasyMock.replay; +import static org.easymock.EasyMock.verify; + +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import org.apache.ambari.server.orm.dao.BlueprintDAO; +import org.apache.ambari.server.orm.entities.BlueprintConfigEntity; +import org.apache.ambari.server.orm.entities.BlueprintEntity; +import org.easymock.Capture; +import org.junit.Assert; +import org.junit.Test; + +import com.google.inject.Injector; + +public class UpgradeCatalog275Test { + + @Test + public void testRemoveDfsHAInitial() { + Injector injector = createNiceMock(Injector.class); + BlueprintDAO blueprintDAO = createMock(BlueprintDAO.class); + + BlueprintConfigEntity blueprintConfigEntity = new BlueprintConfigEntity(); + blueprintConfigEntity.setType("hadoop-env"); + blueprintConfigEntity.setConfigData("{\"dfs_ha_initial_namenode_standby\":\"%HOSTGROUP::master_2%\"," + + "\"dfs_ha_initial_namenode_active\":\"u1602.ambari.apache.org\"}"); + + List<BlueprintConfigEntity> blueprintConfigurations = Collections.singletonList(blueprintConfigEntity); + + BlueprintEntity blueprintEntity = new BlueprintEntity(); + blueprintEntity.setConfigurations(blueprintConfigurations); + + List<BlueprintEntity> blueprintEntityList = Collections.singletonList(blueprintEntity); + + expect(injector.getInstance(BlueprintDAO.class)).andReturn(blueprintDAO); + expect(blueprintDAO.findAll()).andReturn(blueprintEntityList); + + Capture<BlueprintEntity> blueprintEntityCapture = Capture.newInstance(); + expect(blueprintDAO.merge(capture(blueprintEntityCapture))).andReturn(null); + + replay(injector, blueprintDAO); + + UpgradeCatalog275 upgradeCatalog275 = new UpgradeCatalog275(injector); + upgradeCatalog275.removeDfsHAInitial(); + + verify(injector, blueprintDAO); + + Assert.assertNotNull(blueprintEntityCapture.getValues()); + Assert.assertEquals(1, blueprintEntityCapture.getValues().size()); + + BlueprintEntity blueprintEntityToMerge = blueprintEntityCapture.getValue(); + + Collection<BlueprintConfigEntity> resultConfigurations = blueprintEntityToMerge.getConfigurations(); + for (BlueprintConfigEntity resultConfiguration : resultConfigurations) { + if (resultConfiguration.getType().equals("hadoop-env")) { + String configData = resultConfiguration.getConfigData(); + + Map<String, String> typeProperties = UpgradeCatalog275.GSON.<Map<String, String>>fromJson( + configData, Map.class); + Assert.assertEquals(0, typeProperties.size()); + return; + } + } + Assert.fail("No \"hadoop-env\" config type was found in result configuration"); + } +} \ No newline at end of file --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@ambari.apache.org For additional commands, e-mail: commits-h...@ambari.apache.org