Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 25e531d37 -> aec3e708f


AMBARI-11913. Hive,Hbase have required empty properties after ambari only 
upgrade to 2.1.0.(vbrodetskyi)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/aec3e708
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/aec3e708
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/aec3e708

Branch: refs/heads/branch-2.1
Commit: aec3e708fd6d9d842a99eb78776c4c22ea800846
Parents: 25e531d
Author: Vitaly Brodetskyi <vbrodets...@hortonworks.com>
Authored: Mon Jun 15 02:03:06 2015 +0300
Committer: Vitaly Brodetskyi <vbrodets...@hortonworks.com>
Committed: Mon Jun 15 02:03:06 2015 +0300

----------------------------------------------------------------------
 .../server/upgrade/AbstractUpgradeCatalog.java  | 43 ++++++++++++--------
 .../server/upgrade/UpgradeCatalog210.java       | 38 +++++++++++++++++
 2 files changed, 63 insertions(+), 18 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/aec3e708/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
index 3b62c76..3fec278 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/AbstractUpgradeCatalog.java
@@ -17,19 +17,11 @@
  */
 package org.apache.ambari.server.upgrade;
 
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-import javax.persistence.EntityManager;
-
+import com.google.common.collect.Maps;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Provider;
+import com.google.inject.persist.Transactional;
 import org.apache.ambari.server.AmbariException;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.configuration.Configuration.DatabaseType;
@@ -48,11 +40,17 @@ import org.apache.ambari.server.utils.VersionUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Maps;
-import com.google.inject.Inject;
-import com.google.inject.Injector;
-import com.google.inject.Provider;
-import com.google.inject.persist.Transactional;
+import javax.persistence.EntityManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
 
 public abstract class AbstractUpgradeCatalog implements UpgradeCatalog {
   @Inject
@@ -249,6 +247,10 @@ public abstract class AbstractUpgradeCatalog implements 
UpgradeCatalog {
             String configType = 
ConfigHelper.fileNameToConfigType(property.getFilename());
             Config clusterConfigs = cluster.getDesiredConfigByType(configType);
             if(clusterConfigs == null || 
!clusterConfigs.getProperties().containsKey(property.getName())) {
+              if (!checkAccordingToStackAdvisor(property, cluster)) {
+                continue;
+              }
+
               LOG.info("Config " + property.getName() + " from " + configType 
+ " from xml configurations" +
                   " is not found on the cluster. Adding it...");
 
@@ -269,6 +271,11 @@ public abstract class AbstractUpgradeCatalog implements 
UpgradeCatalog {
     }
   }
 
+  protected boolean checkAccordingToStackAdvisor(PropertyInfo property, 
Cluster cluster) {
+    //TODO: in future, we can add here general filters
+    return true;
+  }
+
   /**
    * Create a new cluster scoped configuration with the new properties added
    * with the values from the coresponding xml files.

http://git-wip-us.apache.org/repos/asf/ambari/blob/aec3e708/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
index 9a22aa6..22e80a3 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/upgrade/UpgradeCatalog210.java
@@ -41,6 +41,8 @@ import 
org.apache.ambari.server.orm.entities.ServiceComponentDesiredStateEntityP
 import org.apache.ambari.server.orm.entities.StackEntity;
 import org.apache.ambari.server.state.Cluster;
 import org.apache.ambari.server.state.Clusters;
+import org.apache.ambari.server.state.Config;
+import org.apache.ambari.server.state.PropertyInfo;
 import org.apache.ambari.server.state.Service;
 import org.apache.ambari.server.state.StackId;
 import org.apache.ambari.server.state.alert.AlertDefinitionFactory;
@@ -109,6 +111,8 @@ public class UpgradeCatalog210 extends 
AbstractUpgradeCatalog {
   private static final String TOPOLOGY_HOST_REQUEST_TABLE = 
"topology_host_request";
   private static final String TOPOLOGY_HOST_TASK_TABLE = "topology_host_task";
   private static final String TOPOLOGY_LOGICAL_TASK_TABLE = 
"topology_logical_task";
+  private static final String CONFIGURATION_TYPE_HIVE_SITE = "hive-site";
+  private static final String PROPERTY_HIVE_SERVER2_AUTHENTICATION = 
"hive.server2.authentication";
 
   // constants for stack table changes
   private static final String STACK_ID_COLUMN_NAME = "stack_id";
@@ -120,6 +124,11 @@ public class UpgradeCatalog210 extends 
AbstractUpgradeCatalog {
   private static final DBColumnInfo CURRENT_STACK_ID_COLUMN = new 
DBColumnInfo(CURRENT_STACK_ID_COLUMN_NAME, Long.class, null, null, true);
   private static final DBColumnInfo STACK_ID_COLUMN = new 
DBColumnInfo(STACK_ID_COLUMN_NAME, Long.class, null, null, true);
 
+  // map and list with constants, for filtration like in stack advisor
+  Map<String,List<String>> hiveAuthPropertyValueDependencies = new 
HashMap<String, List<String>>();
+  List<String> allHiveAuthPropertyValueDependecies = new ArrayList<String>();
+
+
   @Inject
   DaoUtils daoUtils;
 
@@ -161,6 +170,17 @@ public class UpgradeCatalog210 extends 
AbstractUpgradeCatalog {
 
     daoUtils = injector.getInstance(DaoUtils.class);
     osFamily = injector.getInstance(OsFamily.class);
+
+    hiveAuthPropertyValueDependencies.put("ldap", 
Arrays.asList("hive.server2.authentication.ldap.url",
+            "hive.server2.authentication.ldap.baseDN"));
+    hiveAuthPropertyValueDependencies.put("kerberos", 
Arrays.asList("hive.server2.authentication.kerberos.keytab",
+            "hive.server2.authentication.kerberos.principal"));
+    hiveAuthPropertyValueDependencies.put("pam", 
Arrays.asList("hive.server2.authentication.pam.services"));
+    hiveAuthPropertyValueDependencies.put("custom", 
Arrays.asList("hive.server2.custom.authentication.class"));
+
+    for (List<String> dependencies : 
hiveAuthPropertyValueDependencies.values()) {
+      allHiveAuthPropertyValueDependecies.addAll(dependencies);
+    }
   }
 
   // ----- AbstractUpgradeCatalog --------------------------------------------
@@ -928,6 +948,24 @@ public class UpgradeCatalog210 extends 
AbstractUpgradeCatalog {
     removeStormRestApiServiceComponent();
   }
 
+  @Override
+  protected boolean checkAccordingToStackAdvisor(PropertyInfo property, 
Cluster cluster) {
+    if (allHiveAuthPropertyValueDependecies.contains(property.getName())) {
+      Config hiveSite = 
cluster.getDesiredConfigByType(CONFIGURATION_TYPE_HIVE_SITE);
+      if (hiveSite != null) {
+        String hiveAuthValue = 
hiveSite.getProperties().get(PROPERTY_HIVE_SERVER2_AUTHENTICATION);
+        if (hiveAuthValue != null) {
+          List<String> dependencies = 
hiveAuthPropertyValueDependencies.get(hiveAuthValue.toLowerCase());
+          if (dependencies != null) {
+            return dependencies.contains(property.getName());
+          }
+        }
+      }
+      return false;
+    }
+    return true;
+  }
+
   /**
    * Delete STORM_REST_API component if HDP is upgraded past 2.2 and the
    * Component still exists.

Reply via email to