Repository: ambari
Updated Branches:
  refs/heads/trunk f0e829973 -> e5d17bc75


AMBARI-19293. Component install and after-install should not run in parallel 
(Attila Doroszlai via magyari_sandor)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e5d17bc7
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e5d17bc7
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e5d17bc7

Branch: refs/heads/trunk
Commit: e5d17bc75c3d47b9f2979e26b4a011dd9e494398
Parents: f0e8299
Author: Attila Doroszlai <adorosz...@hortonworks.com>
Authored: Fri Jan 13 13:46:06 2017 +0100
Committer: Sandor Magyari <smagy...@hortonworks.com>
Committed: Fri Jan 13 13:46:06 2017 +0100

----------------------------------------------------------------------
 .../libraries/script/script.py                  | 26 ++++++++++++++++++++
 .../2.0.6/hooks/after-INSTALL/scripts/params.py |  3 ++-
 .../3.0/hooks/after-INSTALL/scripts/params.py   |  3 ++-
 3 files changed, 30 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/e5d17bc7/ambari-common/src/main/python/resource_management/libraries/script/script.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
index ccb09c7..6eec3cc 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/script/script.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/script/script.py
@@ -56,6 +56,7 @@ from contextlib import closing
 from resource_management.libraries.functions.stack_features import 
check_stack_feature
 from resource_management.libraries.functions.constants import StackFeature
 from resource_management.libraries.functions.show_logs import show_logs
+from resource_management.libraries.functions.fcntl_based_process_lock import 
FcntlBasedProcessLock
 
 import ambari_simplejson as json # simplejson is much faster comparing to 
Python 2.6 json module and has the same functions set.
 
@@ -95,7 +96,32 @@ def get_path_from_configuration(name, configuration):
 
   return configuration
 
+def get_config_lock_file():
+  return os.path.join(Script.get_tmp_dir(), "link_configs_lock_file")
+
+class LockedConfigureMeta(type):
+  '''
+  This metaclass ensures that Script.configure() is invoked with a fcntl-based 
process lock
+  if necessary (when Ambari Agent is configured to execute tasks concurrently) 
for all subclasses.
+  '''
+  def __new__(meta, classname, supers, classdict):
+    if 'configure' in classdict:
+      original_configure = classdict['configure']
+
+      def locking_configure(obj, *args, **kw):
+        # local import to avoid circular dependency (default imports Script)
+        from resource_management.libraries.functions.default import default
+        parallel_execution_enabled = 
int(default("/agentConfigParams/agent/parallel_execution", 0)) == 1
+        lock = FcntlBasedProcessLock(get_config_lock_file(), 
skip_fcntl_failures = True, enabled = parallel_execution_enabled)
+        with lock:
+          original_configure(obj, *args, **kw)
+
+      classdict['configure'] = locking_configure
+
+    return type.__new__(meta, classname, supers, classdict)
+
 class Script(object):
+  __metaclass__ = LockedConfigureMeta
 
   instance = None
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/e5d17bc7/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
index ed34217..1782298 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/params.py
@@ -21,6 +21,7 @@ import os
 
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.script import Script
+from resource_management.libraries.script.script import get_config_lock_file
 from resource_management.libraries.functions import default
 from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import stack_select
@@ -96,7 +97,7 @@ has_namenode = not len(namenode_host) == 0
 if has_namenode or dfs_type == 'HCFS':
   hadoop_conf_dir = 
conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 
-link_configs_lock_file = os.path.join(tmp_dir, "link_configs_lock_file")
+link_configs_lock_file = get_config_lock_file()
 stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")
 
 upgrade_suspended = default("/roleParams/upgrade_suspended", False)

http://git-wip-us.apache.org/repos/asf/ambari/blob/e5d17bc7/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
index 566f5b3..5dcd39b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/3.0/hooks/after-INSTALL/scripts/params.py
@@ -21,6 +21,7 @@ import os
 
 from ambari_commons.constants import AMBARI_SUDO_BINARY
 from resource_management.libraries.script import Script
+from resource_management.libraries.script.script import get_config_lock_file
 from resource_management.libraries.functions import default
 from resource_management.libraries.functions import format
 from resource_management.libraries.functions import conf_select
@@ -91,7 +92,7 @@ has_namenode = not len(namenode_host) == 0
 if has_namenode or dfs_type == 'HCFS':
   hadoop_conf_dir = 
conf_select.get_hadoop_conf_dir(force_latest_on_upgrade=True)
 
-link_configs_lock_file = os.path.join(tmp_dir, "link_configs_lock_file")
+link_configs_lock_file = get_config_lock_file()
 stack_select_lock_file = os.path.join(tmp_dir, "stack_select_lock_file")
 
 upgrade_suspended = default("/roleParams/upgrade_suspended", False)

Reply via email to