Repository: ambari Updated Branches: refs/heads/trunk c774475b9 -> 7b67b01cb
AMBARI-21772. zeppelin proxy user settings are not configured in core-site.xml on upgraded cluster from Ambari 2.4.2 (rlevas) Project: http://git-wip-us.apache.org/repos/asf/ambari/repo Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/7b67b01c Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/7b67b01c Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/7b67b01c Branch: refs/heads/trunk Commit: 7b67b01cb7d6321debf0f67e4715418c021c0d62 Parents: c774475 Author: Robert Levas <rle...@hortonworks.com> Authored: Tue Sep 5 09:01:01 2017 -0400 Committer: Robert Levas <rle...@hortonworks.com> Committed: Tue Sep 5 09:01:01 2017 -0400 ---------------------------------------------------------------------- .../server/upgrade/UpgradeCatalog260.java | 72 ++++++++++++++++++++ .../ZEPPELIN/0.6.0.2.5/kerberos.json | 4 +- .../ZEPPELIN/0.6.0.3.0/kerberos.json | 4 +- .../HDP/2.5/services/ZEPPELIN/kerberos.json | 4 +- .../HDP/2.6/services/ZEPPELIN/kerberos.json | 4 +- .../server/upgrade/UpgradeCatalog260Test.java | 65 ++++++++++++++++++ 6 files changed, 141 insertions(+), 12 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ambari/blob/7b67b01c/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java index 2bd0f93..de5d5ae 100644 --- a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java +++ b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog260.java @@ -17,9 +17,13 @@ */ package org.apache.ambari.server.upgrade; +import static org.apache.ambari.server.view.ViewContextImpl.CORE_SITE; + import java.sql.SQLException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import javax.persistence.EntityManager; import javax.persistence.Query; @@ -27,6 +31,10 @@ import javax.persistence.Query; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.orm.DBAccessor; import org.apache.ambari.server.orm.entities.ClusterConfigEntity; +import org.apache.ambari.server.state.Cluster; +import org.apache.ambari.server.state.Clusters; +import org.apache.ambari.server.state.Config; +import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -368,6 +376,7 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog { addNewConfigurationsFromXml(); setUnmappedForOrphanedConfigs(); removeSupersetFromDruid(); + ensureZeppelinProxyUserConfigs(); } public int getCurrentVersionID() throws AmbariException, SQLException { @@ -413,4 +422,67 @@ public class UpgradeCatalog260 extends AbstractUpgradeCatalog { dbAccessor.executeQuery(hostComponentStateRemoveSQL); dbAccessor.executeQuery(serviceComponentDesiredStateRemoveSQL); } + + /** + * If Zeppelin is installed, ensure that the proxyuser configurations are set in <code>core-site</code>. + * <p> + * The following configurations will be added, if core-site exists and the properties are not in the + * set of core-site properties: + * <ul> + * <li><code>"hadoop.proxyuser.{zeppelin-env/zeppelin_user}.groups": "*"</code></li> + * <li><code>"hadoop.proxyuser.{zeppelin-env/zeppelin_user}.hosts": "*"</code></li> + * </ul> + */ + void ensureZeppelinProxyUserConfigs() throws AmbariException { + Clusters clusters = injector.getInstance(Clusters.class); + Map<String, Cluster> clusterMap = getCheckedClusterMap(clusters); + + if ((clusterMap != null) && !clusterMap.isEmpty()) { + for (final Cluster cluster : clusterMap.values()) { + Config zeppelinEnvConfig = cluster.getDesiredConfigByType("zeppelin-env"); + + if (zeppelinEnvConfig != null) { + // If zeppelin-env exists, than it is assumed that Zeppelin is installed + Map<String, String> zeppelinEnvProperties = zeppelinEnvConfig.getProperties(); + + String zeppelinUser = null; + if (zeppelinEnvProperties != null) { + zeppelinUser = zeppelinEnvProperties.get("zeppelin_user"); + } + + if (!StringUtils.isEmpty(zeppelinUser)) { + // If the zeppelin user is set, see if the proxyuser configs need to be set + + Config coreSiteConfig = cluster.getDesiredConfigByType(CORE_SITE); + if (coreSiteConfig != null) { + // If core-site exists, ensure the proxyuser configurations for Zeppelin are set. + // If they are not already set, set them to their default value. + String proxyUserHostsName = String.format("hadoop.proxyuser.%s.hosts", zeppelinUser); + String proxyUserGroupsName = String.format("hadoop.proxyuser.%s.groups", zeppelinUser); + + Map<String, String> proxyUserProperties = new HashMap<>(); + proxyUserProperties.put(proxyUserHostsName, "*"); + proxyUserProperties.put(proxyUserGroupsName, "*"); + + Map<String, String> coreSiteConfigProperties = coreSiteConfig.getProperties(); + + if (coreSiteConfigProperties != null) { + if (coreSiteConfigProperties.containsKey(proxyUserHostsName)) { + proxyUserProperties.remove(proxyUserHostsName); + } + + if (coreSiteConfigProperties.containsKey(proxyUserGroupsName)) { + proxyUserProperties.remove(proxyUserGroupsName); + } + } + + if (!proxyUserProperties.isEmpty()) { + updateConfigurationPropertiesForCluster(cluster, CORE_SITE, proxyUserProperties, true, false); + } + } + } + } + } + } + } } http://git-wip-us.apache.org/repos/asf/ambari/blob/7b67b01c/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json index 925215b..b605c9d 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.2.5/kerberos.json @@ -42,9 +42,7 @@ { "core-site": { "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}-${cluster_name|toLower()}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}-${cluster_name|toLower()}.hosts": "*" + "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*" } } ] http://git-wip-us.apache.org/repos/asf/ambari/blob/7b67b01c/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json index 925215b..b605c9d 100644 --- a/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json +++ b/ambari-server/src/main/resources/common-services/ZEPPELIN/0.6.0.3.0/kerberos.json @@ -42,9 +42,7 @@ { "core-site": { "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}-${cluster_name|toLower()}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}-${cluster_name|toLower()}.hosts": "*" + "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*" } } ] http://git-wip-us.apache.org/repos/asf/ambari/blob/7b67b01c/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/kerberos.json index 925215b..b605c9d 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/kerberos.json +++ b/ambari-server/src/main/resources/stacks/HDP/2.5/services/ZEPPELIN/kerberos.json @@ -42,9 +42,7 @@ { "core-site": { "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}-${cluster_name|toLower()}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}-${cluster_name|toLower()}.hosts": "*" + "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*" } } ] http://git-wip-us.apache.org/repos/asf/ambari/blob/7b67b01c/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/kerberos.json ---------------------------------------------------------------------- diff --git a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/kerberos.json b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/kerberos.json index 925215b..b605c9d 100644 --- a/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/kerberos.json +++ b/ambari-server/src/main/resources/stacks/HDP/2.6/services/ZEPPELIN/kerberos.json @@ -42,9 +42,7 @@ { "core-site": { "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}-${cluster_name|toLower()}.groups": "*", - "hadoop.proxyuser.${zeppelin-env/zeppelin_user}-${cluster_name|toLower()}.hosts": "*" + "hadoop.proxyuser.${zeppelin-env/zeppelin_user}.hosts": "*" } } ] http://git-wip-us.apache.org/repos/asf/ambari/blob/7b67b01c/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java ---------------------------------------------------------------------- diff --git a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java index d8e8171..4b0404d 100644 --- a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java +++ b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog260Test.java @@ -19,7 +19,9 @@ package org.apache.ambari.server.upgrade; import static org.easymock.EasyMock.anyObject; +import static org.easymock.EasyMock.anyString; import static org.easymock.EasyMock.capture; +import static org.easymock.EasyMock.createMock; import static org.easymock.EasyMock.eq; import static org.easymock.EasyMock.expect; import static org.easymock.EasyMock.expectLastCall; @@ -34,12 +36,17 @@ import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.List; +import java.util.Map; +import java.util.Set; import javax.persistence.EntityManager; +import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.actionmanager.ActionManager; import org.apache.ambari.server.configuration.Configuration; +import org.apache.ambari.server.controller.AmbariManagementController; import org.apache.ambari.server.controller.KerberosHelper; import org.apache.ambari.server.controller.MaintenanceStateHelper; import org.apache.ambari.server.orm.DBAccessor; @@ -48,6 +55,7 @@ import org.apache.ambari.server.state.Cluster; import org.apache.ambari.server.state.Clusters; import org.apache.ambari.server.state.Config; import org.apache.ambari.server.state.Service; +import org.apache.ambari.server.state.StackId; import org.apache.ambari.server.state.stack.OsFamily; import org.easymock.Capture; import org.easymock.EasyMockRunner; @@ -535,4 +543,61 @@ public class UpgradeCatalog260Test { Assert.assertEquals(UpgradeCatalog260.REPO_VERSION_HIDDEN_COLUMN, hiddenColumn.getName()); Assert.assertEquals(false, hiddenColumn.isNullable()); } + + @Test + public void testEnsureZeppelinProxyUserConfigs() throws AmbariException { + + final Clusters clusters = createMock(Clusters.class); + final Cluster cluster = createMock(Cluster.class); + final Config zeppelinEnvConf = createMock(Config.class); + final Config coreSiteConf = createMock(Config.class); + final Config coreSiteConfNew = createMock(Config.class); + final AmbariManagementController controller = createMock(AmbariManagementController.class); + + Capture<? extends Map<String, String>> captureCoreSiteConfProperties = newCapture(); + + Module module = new Module() { + @Override + public void configure(Binder binder) { + binder.bind(DBAccessor.class).toInstance(dbAccessor); + binder.bind(OsFamily.class).toInstance(osFamily); + binder.bind(EntityManager.class).toInstance(entityManager); + binder.bind(Configuration.class).toInstance(configuration); + binder.bind(Clusters.class).toInstance(clusters); + binder.bind(AmbariManagementController.class).toInstance(controller); + } + }; + + expect(clusters.getClusters()).andReturn(Collections.singletonMap("c1", cluster)).once(); + + expect(cluster.getClusterName()).andReturn("c1").atLeastOnce(); + expect(cluster.getDesiredStackVersion()).andReturn(new StackId("HDP-2.6")).atLeastOnce(); + expect(cluster.getDesiredConfigByType("zeppelin-env")).andReturn(zeppelinEnvConf).atLeastOnce(); + expect(cluster.getDesiredConfigByType("core-site")).andReturn(coreSiteConf).atLeastOnce(); + expect(cluster.getConfigsByType("core-site")).andReturn(Collections.singletonMap("tag1", coreSiteConf)).atLeastOnce(); + expect(cluster.getConfig(eq("core-site"), anyString())).andReturn(coreSiteConfNew).atLeastOnce(); + expect(cluster.getServiceByConfigType("core-site")).andReturn("HDFS").atLeastOnce(); + expect(cluster.addDesiredConfig(eq("ambari-upgrade"), anyObject(Set.class), anyString())).andReturn(null).atLeastOnce(); + + expect(zeppelinEnvConf.getProperties()).andReturn(Collections.singletonMap("zeppelin_user", "zeppelin_user")).once(); + + expect(coreSiteConf.getProperties()).andReturn(Collections.singletonMap("hadoop.proxyuser.zeppelin_user.hosts", "existing_value")).atLeastOnce(); + expect(coreSiteConf.getPropertiesAttributes()).andReturn(Collections.<String, Map<String, String>>emptyMap()).atLeastOnce(); + + expect(controller.createConfig(eq(cluster), anyObject(StackId.class), eq("core-site"), capture(captureCoreSiteConfProperties), anyString(), anyObject(Map.class))) + .andReturn(coreSiteConfNew) + .once(); + + replay(clusters, cluster, zeppelinEnvConf, coreSiteConf, coreSiteConfNew, controller); + + Injector injector = Guice.createInjector(module); + UpgradeCatalog260 upgradeCatalog260 = injector.getInstance(UpgradeCatalog260.class); + upgradeCatalog260.ensureZeppelinProxyUserConfigs(); + + verify(clusters, cluster, zeppelinEnvConf, coreSiteConf, coreSiteConfNew, controller); + + Assert.assertTrue(captureCoreSiteConfProperties.hasCaptured()); + Assert.assertEquals("existing_value", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.hosts")); + Assert.assertEquals("*", captureCoreSiteConfProperties.getValue().get("hadoop.proxyuser.zeppelin_user.groups")); + } }