Repository: ambari
Updated Branches:
  refs/heads/branch-2.2 192acc5d5 -> f3575b1b0


AMBARI-15929. Client START while suspended puts entire host to wrong version 
(ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f3575b1b
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f3575b1b
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f3575b1b

Branch: refs/heads/branch-2.2
Commit: f3575b1b000c18b1ad85ccd7f177d8d34582d821
Parents: 192acc5
Author: Nate Cole <nc...@hortonworks.com>
Authored: Fri Apr 15 19:28:46 2016 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Fri Apr 15 19:35:30 2016 -0400

----------------------------------------------------------------------
 .../AmbariManagementControllerImpl.java         |   9 +
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |   2 +
 .../scripts/shared_initialization.py            |  10 +-
 .../hooks/after-INSTALL/test_after_install.py   | 235 +++++++++++++++++++
 4 files changed, 251 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f3575b1b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
index 40c9a12..20da126 100644
--- 
a/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
+++ 
b/ambari-server/src/main/java/org/apache/ambari/server/controller/AmbariManagementControllerImpl.java
@@ -46,6 +46,7 @@ import org.apache.ambari.server.actionmanager.RequestFactory;
 import org.apache.ambari.server.actionmanager.Stage;
 import org.apache.ambari.server.actionmanager.StageFactory;
 import org.apache.ambari.server.agent.ExecutionCommand;
+import org.apache.ambari.server.agent.ExecutionCommand.KeyNames;
 import org.apache.ambari.server.api.services.AmbariMetaInfo;
 import org.apache.ambari.server.configuration.Configuration;
 import org.apache.ambari.server.configuration.Configuration.DatabaseType;
@@ -2092,6 +2093,14 @@ public class AmbariManagementControllerImpl implements 
AmbariManagementControlle
     execCmd.setHostLevelParams(hostParams);
 
     Map<String, String> roleParams = new TreeMap<String, String>();
+
+    // !!! consistent with where custom commands put variables
+    // !!! after-INSTALL hook checks this such that the stack selection tool 
won't
+    // select-all to a version that is not being upgraded, breaking RU
+    if (cluster.isUpgradeSuspended()) {
+      roleParams.put(KeyNames.UPGRADE_SUSPENDED, 
Boolean.TRUE.toString().toLowerCase());
+    }
+
     execCmd.setRoleParams(roleParams);
 
     if ((execCmd != null) && 
(execCmd.getConfigurationTags().containsKey("cluster-env"))) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3575b1b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index 580ab72..63e31e1 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -97,3 +97,5 @@ if has_namenode or dfs_type == 'HCFS':
 
 link_configs_lock_file = os.path.join(tmp_dir, "link_configs_lock_file")
 hdp_select_lock_file = os.path.join(tmp_dir, "hdp_select_lock_file")
+
+upgrade_suspended = default("/roleParams/upgrade_suspended", False)

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3575b1b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index 1c53b51..182bc11 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -43,10 +43,10 @@ def setup_hdp_symlinks():
     # which would only be during an intial cluster installation
     version = params.current_version if params.current_version is not None 
else params.stack_version_unformatted
 
-    # On parallel command execution this should be executed by a single 
process at a time.
-    with FcntlBasedProcessLock(params.hdp_select_lock_file, enabled = 
params.is_parallel_execution_enabled, skip_fcntl_failures = True):
-      hdp_select.select_all(version)
-
+    if not params.upgrade_suspended:
+      # On parallel command execution this should be executed by a single 
process at a time.
+      with FcntlBasedProcessLock(params.hdp_select_lock_file, enabled = 
params.is_parallel_execution_enabled, skip_fcntl_failures = True):
+        hdp_select.select_all(version)
 
 def setup_config():
   import params
@@ -104,4 +104,4 @@ def link_configs(struct_out_file):
   # On parallel command execution this should be executed by a single process 
at a time.
   with FcntlBasedProcessLock(params.link_configs_lock_file, enabled = 
params.is_parallel_execution_enabled, skip_fcntl_failures = True):
     for k, v in conf_select.PACKAGE_DIRS.iteritems():
-      conf_select.convert_conf_directories_to_symlinks(k, json_version, v)
\ No newline at end of file
+      conf_select.convert_conf_directories_to_symlinks(k, json_version, v)

http://git-wip-us.apache.org/repos/asf/ambari/blob/f3575b1b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index 0a487a3..171a66a 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -601,3 +601,238 @@ class TestHookAfterInstall(RMFTestCase):
 
     self.assertResourceCalled('Execute', 'ambari-sudo.sh /usr/bin/hdp-select 
set all `ambari-python-wrap /usr/bin/hdp-select versions | grep ^2.3.0.0-1234 | 
tail -1`',
       only_if = 'ls -d /usr/hdp/2.3.0.0-1234*')
+
+
+
+
+  @patch("shared_initialization.load_version", new = 
MagicMock(return_value="2.3.0.0-1243"))
+  @patch("resource_management.libraries.functions.conf_select.create")
+  @patch("resource_management.libraries.functions.conf_select.select")
+  @patch("os.symlink")
+  @patch("shutil.rmtree")
+  def test_hook_default_conf_select_suspended(self, rmtree_mock, symlink_mock, 
conf_select_select_mock, conf_select_create_mock):
+
+    def mocked_conf_select(arg1, arg2, arg3, dry_run = False):
+      return "/etc/{0}/{1}/0".format(arg2, arg3)
+
+    conf_select_create_mock.side_effect = mocked_conf_select
+
+    config_file = self.get_src_folder() + 
"/test/python/stacks/2.0.6/configs/default.json"
+    with open(config_file, "r") as f:
+      json_content = json.load(f)
+
+    version = '2.3.0.0-1234'
+    json_content['commandParams']['version'] = version
+    json_content['hostLevelParams']['stack_version'] = "2.3"
+    json_content['roleParams']['upgrade_suspended'] = "true"
+
+    self.executeScript("2.0.6/hooks/after-INSTALL/scripts/hook.py",
+                       classname="AfterInstallHook",
+                       command="hook",
+                       config_dict = json_content)
+
+    # same assertions as test_hook_default_conf_select, but skip hdp-select 
set all
+
+    self.assertResourceCalled('XmlConfig', 'core-site.xml',
+      owner = 'hdfs',
+      group = 'hadoop',
+      conf_dir = "/usr/hdp/current/hadoop-client/conf",
+      configurations = self.getConfig()['configurations']['core-site'],
+      configuration_attributes = 
self.getConfig()['configuration_attributes']['core-site'],
+      only_if="ls /usr/hdp/current/hadoop-client/conf")
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/ranger/kms/conf', '/etc/ranger/kms/conf.backup'),
+        not_if = 'test -e /etc/ranger/kms/conf.backup',
+        sudo = True,)
+    self.assertResourceCalled('Directory', '/etc/ranger/kms/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/ranger/kms/conf',
+        to = '/usr/hdp/current/ranger-kms/conf',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/zookeeper/conf', '/etc/zookeeper/conf.backup'),
+        not_if = 'test -e /etc/zookeeper/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/zookeeper/conf',
+        to = '/usr/hdp/current/zookeeper-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', 
'/etc/pig/conf.backup'),
+        not_if = 'test -e /etc/pig/conf.backup',
+        sudo = True,)
+    self.assertResourceCalled('Directory', '/etc/pig/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/pig/conf',
+        to = '/usr/hdp/current/pig-client/conf',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', 
'/etc/tez/conf.backup'),
+        not_if = 'test -e /etc/tez/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/tez/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/tez/conf',
+        to = '/usr/hdp/current/tez-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.backup'),
+        not_if = 'test -e /etc/hive-webhcat/conf.backup',
+        sudo = True,)
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.backup'),
+        not_if = 'test -e /etc/hive-hcatalog/conf.backup',
+        sudo = True,)
+
+    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/hive-webhcat/conf',
+        to = '/usr/hdp/current/hive-webhcat/etc/webhcat',)
+
+    self.assertResourceCalled('Directory', '/etc/hive-hcatalog/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/hive-hcatalog/conf',
+        to = '/usr/hdp/current/hive-webhcat/etc/hcatalog',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', 
'/etc/hbase/conf.backup'),
+        not_if = 'test -e /etc/hbase/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/hbase/conf',
+        to = '/usr/hdp/current/hbase-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', 
'/etc/knox/conf.backup'),
+        not_if = 'test -e /etc/knox/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/knox/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/knox/conf',
+        to = '/usr/hdp/current/knox-server/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.backup'),
+        not_if = 'test -e /etc/ranger/usersync/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/ranger/usersync/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/ranger/usersync/conf',
+        to = '/usr/hdp/current/ranger-usersync/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/hadoop/conf', '/etc/hadoop/conf.backup'),
+        not_if = 'test -e /etc/hadoop/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/hadoop/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/hadoop/conf',
+        to = '/usr/hdp/current/hadoop-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/mahout/conf', '/etc/mahout/conf.backup'),
+        not_if = 'test -e /etc/mahout/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/mahout/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/mahout/conf',
+        to = '/usr/hdp/current/mahout-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', 
'/etc/storm/conf.backup'),
+        not_if = 'test -e /etc/storm/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/storm/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/storm/conf',
+        to = '/usr/hdp/current/storm-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/ranger/admin/conf', '/etc/ranger/admin/conf.backup'),
+        not_if = 'test -e /etc/ranger/admin/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
+        to = '/usr/hdp/current/ranger-admin/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', 
'/etc/flume/conf.backup'),
+        not_if = 'test -e /etc/flume/conf.backup',
+        sudo = True,)
+    self.assertResourceCalled('Directory', '/etc/flume/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/flume/conf',
+        to = '/usr/hdp/current/flume-server/conf',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', 
'/etc/sqoop/conf.backup'),
+        not_if = 'test -e /etc/sqoop/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/sqoop/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/sqoop/conf',
+        to = '/usr/hdp/current/sqoop-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/accumulo/conf', '/etc/accumulo/conf.backup'),
+        not_if = 'test -e /etc/accumulo/conf.backup',
+        sudo = True,)
+    self.assertResourceCalled('Directory', '/etc/accumulo/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/accumulo/conf',
+        to = '/usr/hdp/current/accumulo-client/conf',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/phoenix/conf', '/etc/phoenix/conf.backup'),
+        not_if = 'test -e /etc/phoenix/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/phoenix/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/phoenix/conf',
+        to = '/usr/hdp/current/phoenix-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.backup'),
+        not_if = 'test -e /etc/storm-slider-client/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/storm-slider-client/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/storm-slider-client/conf',
+        to = '/usr/hdp/current/storm-slider-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/slider/conf', '/etc/slider/conf.backup'),
+        not_if = 'test -e /etc/slider/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/slider/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/slider/conf',
+        to = '/usr/hdp/current/slider-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', 
'/etc/oozie/conf.backup'),
+        not_if = 'test -e /etc/oozie/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/oozie/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/oozie/conf',
+        to = '/usr/hdp/current/oozie-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/falcon/conf', '/etc/falcon/conf.backup'),
+        not_if = 'test -e /etc/falcon/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/falcon/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/falcon/conf',
+        to = '/usr/hdp/current/falcon-client/conf')
+
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', 
'/etc/spark/conf.backup'),
+        not_if = 'test -e /etc/spark/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/spark/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/spark/conf',
+        to = '/usr/hdp/current/spark-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', 
'/etc/kafka/conf.backup'),
+        not_if = 'test -e /etc/kafka/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/kafka/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/kafka/conf',
+        to = '/usr/hdp/current/kafka-broker/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', 
'/etc/hive/conf.backup'),
+        not_if = 'test -e /etc/hive/conf.backup',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/hive/conf',
+        to = '/usr/hdp/current/hive-client/conf')
+
+    self.assertNoMoreResources()

Reply via email to