AMBARI-21798. Oozie server crashes post migration after regen kerberos keytabs 
and restart all services (rlevas)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/60632536
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/60632536
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/60632536

Branch: refs/heads/feature-branch-AMBARI-21307
Commit: 60632536aa6fbcee96580c24d6e2d7f67dd184fa
Parents: 65ddaed
Author: Robert Levas <rle...@hortonworks.com>
Authored: Thu Aug 24 09:06:06 2017 -0400
Committer: Robert Levas <rle...@hortonworks.com>
Committed: Thu Aug 24 09:06:06 2017 -0400

----------------------------------------------------------------------
 .../server/upgrade/UpgradeCatalog252.java       | 87 ++++++++++++++++----
 .../server/upgrade/UpgradeCatalog252Test.java   | 57 +++++++++++++
 2 files changed, 129 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/60632536/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
index b83ba64..1b706d6 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog252.java
@@ -19,6 +19,7 @@ package org.apache.ambari.server.upgrade;
 
 import java.sql.SQLException;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -37,9 +38,12 @@ import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.ConfigHelper;
 import org.apache.ambari.server.state.PropertyInfo;
 import 
org.apache.ambari.server.state.kerberos.AbstractKerberosDescriptorContainer;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosConfigurationDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
+import org.apache.ambari.server.state.kerberos.KerberosServiceDescriptor;
+import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang.StringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -236,15 +240,52 @@ public class UpgradeCatalog252 extends 
AbstractUpgradeCatalog {
         final KerberosDescriptor kerberosDescriptor = new 
KerberosDescriptorFactory().createInstance(data);
 
         if (kerberosDescriptor != null) {
+          boolean updated = false;
+
           // Find and remove configuration specifications for 
<code>livy-conf/livy.superusers</code>
           // in SPARK since this logic has been moved to the relevant 
stack/service advisors
-          boolean updatedSpark = 
removeConfigurationSpecification(kerberosDescriptor.getService("SPARK"), 
"livy-conf", "livy.superusers");
+          
if(removeConfigurationSpecifications(kerberosDescriptor.getService("SPARK"),
+              Collections.<String, 
Collection<String>>singletonMap("livy-conf", 
Collections.singleton("livy.superusers")))) {
+            updated = true;
+          }
 
           // Find and remove configuration specifications for 
<code>livy-conf2/livy.superusers</code>
           // in SPARK2 since this logic has been moved to the relevant 
stack/service advisors
-          boolean updatedSpark2 = 
removeConfigurationSpecification(kerberosDescriptor.getService("SPARK2"), 
"livy2-conf", "livy.superusers");
+          
if(removeConfigurationSpecifications(kerberosDescriptor.getService("SPARK2"),
+              Collections.<String, 
Collection<String>>singletonMap("livy2-conf", 
Collections.singleton("livy.superusers")))) {
+            updated = true;
+          }
+
+          // Find and remove configuration specifications for the following 
configurations in KNOX/KNOX_GATEWAY
+          // since they are invalid due to static "knox" embedded in the 
property name:
+          // * oozie-site/oozie.service.ProxyUserService.proxyuser.knox.groups
+          // * oozie-site/oozie.service.ProxyUserService.proxyuser.knox.hosts
+          // * webhcat-site/webhcat.proxyuser.knox.groups
+          // * webhcat-site/webhcat.proxyuser.knox.hosts
+          // * core-site/hadoop.proxyuser.knox.groups
+          // * core-site/hadoop.proxyuser.knox.hosts
+          // * 
falcon-runtime.properties/*.falcon.service.ProxyUserService.proxyuser.knox.groups
+          // * 
falcon-runtime.properties/*.falcon.service.ProxyUserService.proxyuser.knox.hosts
+          KerberosServiceDescriptor knoxKerberosDescriptor = 
kerberosDescriptor.getService("KNOX");
+          if(knoxKerberosDescriptor != null) {
+            KerberosComponentDescriptor knoxGatewayKerberosDescriptor = 
knoxKerberosDescriptor.getComponent("KNOX_GATEWAY");
+            if (knoxGatewayKerberosDescriptor != null) {
+              Map<String, Collection<String>> configsToRemove = new 
HashMap<>();
+              configsToRemove.put("oozie-site",
+                  
Arrays.asList("oozie.service.ProxyUserService.proxyuser.knox.groups", 
"oozie.service.ProxyUserService.proxyuser.knox.hosts"));
+              configsToRemove.put("webhcat-site",
+                  Arrays.asList("webhcat.proxyuser.knox.groups", 
"webhcat.proxyuser.knox.hosts"));
+              configsToRemove.put("core-site",
+                  Arrays.asList("hadoop.proxyuser.knox.groups", 
"hadoop.proxyuser.knox.hosts"));
+              configsToRemove.put("falcon-runtime.properties",
+                  
Arrays.asList("*.falcon.service.ProxyUserService.proxyuser.knox.groups", 
"*.falcon.service.ProxyUserService.proxyuser.knox.hosts"));
+              if 
(removeConfigurationSpecifications(knoxGatewayKerberosDescriptor, 
configsToRemove)) {
+                updated = true;
+              }
+            }
+          }
 
-          if (updatedSpark || updatedSpark2) {
+          if (updated) {
             artifactEntity.setArtifactData(kerberosDescriptor.toMap());
             artifactDAO.merge(artifactEntity);
           }
@@ -354,24 +395,40 @@ public class UpgradeCatalog252 extends 
AbstractUpgradeCatalog {
   }
 
   /**
-   * Given an {@link AbstractKerberosDescriptorContainer}, attempts to remove 
the specified property
-   * (<code>configType/propertyName</code> from it.
+   * Given an {@link AbstractKerberosDescriptorContainer}, attempts to remove 
the specified
+   * configurations (<code>configType/propertyName</code> from it.
    *
    * @param kerberosDescriptorContainer the container to update
-   * @param configType                  the configuration type
-   * @param propertyName                the property name
+   * @param configurations              a map of configuration types to sets 
of property names.
    * @return true if changes where made to the container; false otherwise
    */
-  private boolean 
removeConfigurationSpecification(AbstractKerberosDescriptorContainer 
kerberosDescriptorContainer, String configType, String propertyName) {
+  private boolean 
removeConfigurationSpecifications(AbstractKerberosDescriptorContainer 
kerberosDescriptorContainer, Map<String, Collection<String>> configurations) {
     boolean updated = false;
     if (kerberosDescriptorContainer != null) {
-      KerberosConfigurationDescriptor configurationDescriptor = 
kerberosDescriptorContainer.getConfiguration(configType);
-      if (configurationDescriptor != null) {
-        Map<String, String> properties = 
configurationDescriptor.getProperties();
-        if ((properties != null) && properties.containsKey(propertyName)) {
-          properties.remove(propertyName);
-          LOG.info("Removed {}/{} from the descriptor named {}", configType, 
propertyName, kerberosDescriptorContainer.getName());
-          updated = true;
+      if (!MapUtils.isEmpty(configurations)) {
+        for (Map.Entry<String, Collection<String>> entry : 
configurations.entrySet()) {
+          String configType = entry.getKey();
+
+          for (String propertyName : entry.getValue()) {
+            Map<String, KerberosConfigurationDescriptor> 
configurationDescriptors = kerberosDescriptorContainer.getConfigurations(false);
+            KerberosConfigurationDescriptor configurationDescriptor = 
(configurationDescriptors == null)
+                ? null
+                : configurationDescriptors.get(configType);
+            if (configurationDescriptor != null) {
+              Map<String, String> properties = 
configurationDescriptor.getProperties();
+              if ((properties != null) && 
properties.containsKey(propertyName)) {
+                properties.remove(propertyName);
+                LOG.info("Removed {}/{} from the descriptor named {}", 
configType, propertyName, kerberosDescriptorContainer.getName());
+                updated = true;
+
+                // If there are no more properties in the 
configurationDescriptor, remove it from the container.
+                if (properties.isEmpty()) {
+                  configurationDescriptors.remove(configType);
+                  
kerberosDescriptorContainer.setConfigurations(configurationDescriptors);
+                }
+              }
+            }
+          }
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/ambari/blob/60632536/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
index d7df68c..d936c6d 100644
--- 
a/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
+++ 
b/ambari-server/src/test/java/org/apache/ambari/server/upgrade/UpgradeCatalog252Test.java
@@ -55,6 +55,7 @@ import org.apache.ambari.server.state.Clusters;
 import org.apache.ambari.server.state.Config;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
+import org.apache.ambari.server.state.kerberos.KerberosComponentDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptor;
 import org.apache.ambari.server.state.kerberos.KerberosDescriptorFactory;
 import org.apache.ambari.server.state.stack.OsFamily;
@@ -294,6 +295,48 @@ public class UpgradeCatalog252Test {
         "      ]" +
         "    }," +
         "    {" +
+        "      \"name\": \"KNOX\"," +
+        "      \"components\": [" +
+        "        {" +
+        "          \"name\": \"KNOX_GATEWAY\"," +
+        "          \"configurations\": [" +
+        "            {" +
+        "              \"core-site\": {" +
+        "                \"property1\": \"true\"," +
+        "                \"property2\": \"true\"," +
+        "                \"hadoop.proxyuser.knox.groups\": \"somevalue\"," +
+        "                \"hadoop.proxyuser.knox.hosts\": \"somevalue\"" +
+        "              }" +
+        "            }," +
+        "            {" +
+        "              \"webhcat-site\": {" +
+        "                \"webhcat.proxyuser.knox.groups\": \"somevalue\"," +
+        "                \"webhcat.proxyuser.knox.hosts\": \"somevalue\"" +
+        "              }" +
+        "            }," +
+        "            {" +
+        "              \"oozie-site\": {" +
+        "                
\"oozie.service.ProxyUserService.proxyuser.knox.groups\": \"somevalue\"," +
+        "                
\"oozie.service.ProxyUserService.proxyuser.knox.hosts\": \"somevalue\"" +
+        "              }" +
+        "            }," +
+        "            {" +
+        "              \"falcon-runtime.properties\": {" +
+        "                
\"*.falcon.service.ProxyUserService.proxyuser.knox.groups\": \"somevalue\"," +
+        "                
\"*.falcon.service.ProxyUserService.proxyuser.knox.hosts\": \"somevalue\"" +
+        "              }" +
+        "            }," +
+        "            {" +
+        "              \"some-env\": {" +
+        "                \"groups\": \"${hadoop-env/proxyuser_group}\"," +
+        "                \"hosts\": 
\"${clusterHostInfo/existing_service_master_hosts}\"" +
+        "              }" +
+        "            }" +
+        "          ]" +
+        "        }" +
+        "      ]" +
+        "    }," +
+        "    {" +
         "      \"name\": \"NOT_SPARK\"," +
         "      \"configurations\": [" +
         "        {" +
@@ -365,5 +408,19 @@ public class UpgradeCatalog252Test {
     
Assert.assertNotNull(result.getService("NOT_SPARK").getConfiguration("not-livy-conf"));
     
Assert.assertNotNull(result.getService("NOT_SPARK").getConfiguration("not-livy-conf").getProperties());
     
Assert.assertTrue(result.getService("NOT_SPARK").getConfiguration("not-livy-conf").getProperties().containsKey("livy.superusers"));
+
+    Assert.assertNotNull(result.getService("KNOX"));
+
+    KerberosComponentDescriptor knoxGateway = 
result.getService("KNOX").getComponent("KNOX_GATEWAY");
+    Assert.assertNotNull(knoxGateway);
+    Assert.assertNotNull(knoxGateway.getConfiguration("core-site"));
+    
Assert.assertNotNull(knoxGateway.getConfiguration("core-site").getProperties());
+    
Assert.assertTrue(knoxGateway.getConfiguration("core-site").getProperties().containsKey("property1"));
+    
Assert.assertFalse(knoxGateway.getConfiguration("core-site").getProperties().containsKey("hadoop.proxyuser.knox.groups"));
+    
Assert.assertFalse(knoxGateway.getConfiguration("core-site").getProperties().containsKey("hadoop.proxyuser.knox.hosts"));
+    Assert.assertNull(knoxGateway.getConfiguration("oozie-site"));
+    Assert.assertNull(knoxGateway.getConfiguration("webhcat-site"));
+    
Assert.assertNull(knoxGateway.getConfiguration("falcon-runtime.properties"));
+    Assert.assertNotNull(knoxGateway.getConfiguration("some-env"));
   }
 }

Reply via email to