Repository: ambari Updated Branches: refs/heads/trunk d658e6fd7 -> 33f24d575
AMBARI-11672. Ambari 2.0.1 server doesn't start after upgrade from HDP 2.1.7 to 2.2.4. (swagle) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/33f24d57 Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/33f24d57 Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/33f24d57 Branch: refs/heads/trunk Commit: 33f24d57537c5055b896d5b011e535be96d31c8f Parents: d658e6f Author: Siddharth Wagle <swa...@hortonworks.com> Authored: Wed Jun 3 18:30:50 2015 -0700 Committer: Siddharth Wagle <swa...@hortonworks.com> Committed: Wed Jun 3 19:40:19 2015 -0700 ---------------------------------------------------------------------- .../server/upgrade/UpgradeCatalog210.java | 94 +++++++++++++++++--- .../server/upgrade/UpgradeCatalog210Test.java | 81 +++++++++++++++++ 2 files changed, 164 insertions(+), 11 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/33f24d57/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java index e331475..2e40461 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java @@ -31,14 +31,20 @@ import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo; import org.apache.ambari.server.orm.dao.AlertDefinitionDAO; import org.apache.ambari.server.orm.dao.DaoUtils; +import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO; import org.apache.ambari.server.orm.dao.StackDAO; import org.apache.ambari.server.orm.entities.AlertDefinitionEntity; +import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity; +import org.apache.ambari.server.orm.entities.HostComponentStateEntity; +import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity; +import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK; import org.apache.ambari.server.orm.entities.StackEntity; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.stack.OsFamily; +import org.apache.ambari.server.utils.VersionUtils; import org.apache.commons.lang.StringUtils; import org.eclipse.persistence.internal.databaseaccess.FieldTypeDefinition; import org.slf4j.Logger; @@ -46,6 +52,9 @@ import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import javax.persistence.Query; +import javax.persistence.criteria.CriteriaBuilder; +import javax.persistence.criteria.CriteriaDelete; +import javax.persistence.criteria.Root; import java.sql.ResultSet; import java.sql.SQLException; import java.text.MessageFormat; @@ -471,21 +480,21 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog { dbAccessor.executeQuery("ALTER TABLE " + SERVICE_CONFIG_HOSTS_TABLE + " DROP CONSTRAINT serviceconfighosts_pkey"); } dbAccessor.executeQuery("ALTER TABLE " + CONFIG_GROUP_HOST_MAPPING_TABLE + - " ADD CONSTRAINT configgrouphostmapping_pkey PRIMARY KEY (config_group_id, host_id)"); + " ADD CONSTRAINT configgrouphostmapping_pkey PRIMARY KEY (config_group_id, host_id)"); dbAccessor.executeQuery("ALTER TABLE " + CLUSTER_HOST_MAPPING_TABLE + - " ADD CONSTRAINT clusterhostmapping_pkey PRIMARY KEY (cluster_id, host_id)"); + " ADD CONSTRAINT clusterhostmapping_pkey PRIMARY KEY (cluster_id, host_id)"); dbAccessor.executeQuery("ALTER TABLE " + HOST_CONFIG_MAPPING_TABLE + - " ADD CONSTRAINT hostconfigmapping_pkey PRIMARY KEY (cluster_id, host_id, type_name, create_timestamp)"); + " ADD CONSTRAINT hostconfigmapping_pkey PRIMARY KEY (cluster_id, host_id, type_name, create_timestamp)"); dbAccessor.executeQuery("ALTER TABLE " + HOST_COMPONENT_STATE_TABLE + - " ADD CONSTRAINT hostcomponentstate_pkey PRIMARY KEY (cluster_id, component_name, host_id, service_name)"); + " ADD CONSTRAINT hostcomponentstate_pkey PRIMARY KEY (cluster_id, component_name, host_id, service_name)"); dbAccessor.executeQuery("ALTER TABLE " + HOST_COMPONENT_DESIRED_STATE_TABLE + - " ADD CONSTRAINT hostcomponentdesiredstate_pkey PRIMARY KEY (cluster_id, component_name, host_id, service_name)"); + " ADD CONSTRAINT hostcomponentdesiredstate_pkey PRIMARY KEY (cluster_id, component_name, host_id, service_name)"); dbAccessor.executeQuery("ALTER TABLE " + HOST_STATE_TABLE + - " ADD CONSTRAINT hoststate_pkey PRIMARY KEY (host_id)"); + " ADD CONSTRAINT hoststate_pkey PRIMARY KEY (host_id)"); dbAccessor.executeQuery("ALTER TABLE " + KERBEROS_PRINCIPAL_HOST_TABLE + - " ADD CONSTRAINT kerberos_principal_host_pkey PRIMARY KEY (principal_name, host_id)"); + " ADD CONSTRAINT kerberos_principal_host_pkey PRIMARY KEY (principal_name, host_id)"); dbAccessor.executeQuery("ALTER TABLE " + SERVICE_CONFIG_HOSTS_TABLE + - " ADD CONSTRAINT serviceconfighosts_pkey PRIMARY KEY (service_config_id, host_id)"); + " ADD CONSTRAINT serviceconfighosts_pkey PRIMARY KEY (service_config_id, host_id)"); // Finish by deleting the unnecessary host_name columns. dbAccessor.dropColumn(CONFIG_GROUP_HOST_MAPPING_TABLE, HOST_NAME_COL); @@ -579,11 +588,11 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog { dbAccessor.createTable(STACK_TABLE, columns, "stack_id"); dbAccessor.executeQuery("ALTER TABLE " + STACK_TABLE - + " ADD CONSTRAINT unq_stack UNIQUE (stack_name,stack_version)", false); + + " ADD CONSTRAINT unq_stack UNIQUE (stack_name,stack_version)", false); dbAccessor.executeQuery( - "INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('stack_id_seq', 0)", - false); + "INSERT INTO ambari_sequences(sequence_name, sequence_value) VALUES('stack_id_seq', 0)", + false); // create the new stack ID columns NULLABLE for now since we need to insert // data into them later on (we'll change them to NOT NULL after that) @@ -894,6 +903,69 @@ public class UpgradeCatalog210 extends AbstractUpgradeCatalog { addMissingConfigs(); updateAlertDefinitions(); + removeStormRestApiServiceComponent(); + } + + /** + * Delete STORM_REST_API component if HDP is upgraded past 2.2 and the + * Component still exists. + */ + protected void removeStormRestApiServiceComponent() { + AmbariManagementController ambariManagementController = injector.getInstance(AmbariManagementController.class); + Clusters clusters = ambariManagementController.getClusters(); + + if (clusters != null) { + Map<String, Cluster> clusterMap = clusters.getClusters(); + for (final Cluster cluster : clusterMap.values()) { + StackId stackId = cluster.getCurrentStackVersion(); + if (stackId != null && stackId.getStackName().equals("HDP") && + VersionUtils.compareVersions(stackId.getStackVersion(), "2.2") >= 0) { + + executeInTransaction(new Runnable() { + @Override + public void run() { + ServiceComponentDesiredStateDAO dao = injector.getInstance(ServiceComponentDesiredStateDAO.class); + ServiceComponentDesiredStateEntityPK entityPK = new ServiceComponentDesiredStateEntityPK(); + entityPK.setClusterId(cluster.getClusterId()); + entityPK.setServiceName("STORM"); + entityPK.setComponentName("STORM_REST_API"); + ServiceComponentDesiredStateEntity entity = dao.findByPK(entityPK); + if (entity != null) { + EntityManager em = getEntityManagerProvider().get(); + CriteriaBuilder cb = em.getCriteriaBuilder(); + + try { + LOG.info("Deleting STORM_REST_API service component."); + CriteriaDelete<HostComponentStateEntity> hcsDelete = cb.createCriteriaDelete(HostComponentStateEntity.class); + CriteriaDelete<HostComponentDesiredStateEntity> hcdDelete = cb.createCriteriaDelete(HostComponentDesiredStateEntity.class); + CriteriaDelete<ServiceComponentDesiredStateEntity> scdDelete = cb.createCriteriaDelete(ServiceComponentDesiredStateEntity.class); + + Root<HostComponentStateEntity> hcsRoot = hcsDelete.from(HostComponentStateEntity.class); + Root<HostComponentDesiredStateEntity> hcdRoot = hcdDelete.from(HostComponentDesiredStateEntity.class); + Root<ServiceComponentDesiredStateEntity> scdRoot = scdDelete.from(ServiceComponentDesiredStateEntity.class); + + hcsDelete.where(cb.equal(hcsRoot.get("componentName"), "STORM_REST_API")); + hcdDelete.where(cb.equal(hcdRoot.get("componentName"), "STORM_REST_API")); + scdDelete.where(cb.equal(scdRoot.get("componentName"), "STORM_REST_API")); + + em.createQuery(hcsDelete).executeUpdate(); + em.createQuery(hcdDelete).executeUpdate(); + em.createQuery(scdDelete).executeUpdate(); + } catch (Exception e) { + LOG.warn("Error deleting STORM_REST_API service component. " + + "This could result in issue with ambari server start. " + + "Please make sure the STORM_REST_API component is deleted " + + "from the database by running following commands:\n" + + "delete from hostcomponentdesiredstate where component_name='STORM_REST_API';\n" + + "delete from hostcomponentstate where component_name='STORM_REST_API';\n" + + "delete from servicecomponentdesiredstate where component_name='STORM_REST_API';\n", e); + } + } + } + }); + } + } + } } protected void updateAlertDefinitions() { http://git-wip-us.apache.org/repos/asf/ambari/blob/33f24d57/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java index 356fb7d..8ea45b9 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog210Test.java @@ -24,14 +24,29 @@ import com.google.inject.Injector; import com.google.inject.Module; import com.google.inject.Provider; import com.google.inject.persist.PersistService; +import org.apache.ambari.server.api.services.AmbariMetaInfo; import org.apache.ambari.server.configuration.Configuration; import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.orm.DBAccessor; import org.apache.ambari.server.orm.DBAccessor.DBColumnInfo; import org.apache.ambari.server.orm.GuiceJpaInitializer; import org.apache.ambari.server.orm.InMemoryDefaultTestModule; +import org.apache.ambari.server.orm.dao.ClusterDAO; +import org.apache.ambari.server.orm.dao.ClusterStateDAO; +import org.apache.ambari.server.orm.dao.HostComponentDesiredStateDAO; +import org.apache.ambari.server.orm.dao.ServiceComponentDesiredStateDAO; +import org.apache.ambari.server.orm.dao.StackDAO; +import org.apache.ambari.server.orm.entities.ClusterEntity; +import org.apache.ambari.server.orm.entities.ClusterServiceEntity; +import org.apache.ambari.server.orm.entities.ClusterStateEntity; +import org.apache.ambari.server.orm.entities.HostComponentDesiredStateEntity; +import org.apache.ambari.server.orm.entities.HostEntity; +import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntity; +import org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityPK; +import org.apache.ambari.server.orm.entities.StackEntity; import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; +import org.apache.ambari.server.state.HostComponentAdminState; import org.apache.ambari.server.state.Service; import org.apache.ambari.server.state.stack.OsFamily; import org.easymock.Capture; @@ -71,6 +86,7 @@ public class UpgradeCatalog210Test { private Provider<EntityManager> entityManagerProvider = createStrictMock(Provider.class); private EntityManager entityManager = createNiceMock(EntityManager.class); private UpgradeCatalogHelper upgradeCatalogHelper; + private StackEntity desiredStackEntity; @Before public void init() { @@ -81,6 +97,11 @@ public class UpgradeCatalog210Test { injector.getInstance(GuiceJpaInitializer.class); upgradeCatalogHelper = injector.getInstance(UpgradeCatalogHelper.class); + // inject AmbariMetaInfo to ensure that stacks get populated in the DB + injector.getInstance(AmbariMetaInfo.class); + // load the stack entity + StackDAO stackDAO = injector.getInstance(StackDAO.class); + desiredStackEntity = stackDAO.find("HDP", "2.2.0"); } @After @@ -215,6 +236,66 @@ public class UpgradeCatalog210Test { verify(controller, clusters, cluster); } + @Test + public void testDeleteStormRestApiServiceComponent() throws Exception { + ClusterEntity clusterEntity = upgradeCatalogHelper.createCluster(injector, + "c1", desiredStackEntity); + ClusterServiceEntity clusterServiceEntity = upgradeCatalogHelper.createService( + injector, clusterEntity, "STORM"); + HostEntity hostEntity = upgradeCatalogHelper.createHost(injector, + clusterEntity, "h1"); + + // Set current stack version + ClusterDAO clusterDAO = injector.getInstance(ClusterDAO.class); + ClusterStateDAO clusterStateDAO = injector.getInstance(ClusterStateDAO.class); + ClusterStateEntity clusterStateEntity = new ClusterStateEntity(); + clusterStateEntity.setClusterId(clusterEntity.getClusterId()); + clusterStateEntity.setClusterEntity(clusterEntity); + clusterStateEntity.setCurrentStack(desiredStackEntity); + clusterStateDAO.create(clusterStateEntity); + clusterEntity.setClusterStateEntity(clusterStateEntity); + clusterDAO.merge(clusterEntity); + + ServiceComponentDesiredStateEntity componentDesiredStateEntity = new ServiceComponentDesiredStateEntity(); + componentDesiredStateEntity.setClusterId(clusterEntity.getClusterId()); + componentDesiredStateEntity.setServiceName(clusterServiceEntity.getServiceName()); + componentDesiredStateEntity.setClusterServiceEntity(clusterServiceEntity); + componentDesiredStateEntity.setComponentName("STORM_REST_API"); + componentDesiredStateEntity.setDesiredStack(desiredStackEntity); + + HostComponentDesiredStateDAO hostComponentDesiredStateDAO = + injector.getInstance(HostComponentDesiredStateDAO.class); + + HostComponentDesiredStateEntity hostComponentDesiredStateEntity = new HostComponentDesiredStateEntity(); + + hostComponentDesiredStateEntity.setClusterId(clusterEntity.getClusterId()); + hostComponentDesiredStateEntity.setComponentName("STORM_REST_API"); + hostComponentDesiredStateEntity.setAdminState(HostComponentAdminState.INSERVICE); + hostComponentDesiredStateEntity.setServiceName(clusterServiceEntity.getServiceName()); + hostComponentDesiredStateEntity.setServiceComponentDesiredStateEntity(componentDesiredStateEntity); + hostComponentDesiredStateEntity.setHostEntity(hostEntity); + hostComponentDesiredStateEntity.setDesiredStack(desiredStackEntity); + + hostComponentDesiredStateDAO.create(hostComponentDesiredStateEntity); + + HostComponentDesiredStateEntity entity = hostComponentDesiredStateDAO.findAll().get(0); + + Assert.assertEquals(HostComponentAdminState.INSERVICE.name(), entity.getAdminState().name()); + + UpgradeCatalog210 upgradeCatalog210 = injector.getInstance(UpgradeCatalog210.class); + upgradeCatalog210.removeStormRestApiServiceComponent(); + + ServiceComponentDesiredStateDAO componentDesiredStateDAO = + injector.getInstance(ServiceComponentDesiredStateDAO.class); + + ServiceComponentDesiredStateEntityPK entityPK = new ServiceComponentDesiredStateEntityPK(); + entityPK.setClusterId(clusterEntity.getClusterId()); + entityPK.setServiceName("STORM"); + entityPK.setComponentName("STORM_REST_API"); + Assert.assertNull(componentDesiredStateDAO.findByPK(entityPK)); + } + + /** * @param dbAccessor * @return