Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 494f954d6 -> 1d84518f8


AMBARI-12341. RU: invalid symlinks on upgraded cluster (ncole)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1d84518f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1d84518f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1d84518f

Branch: refs/heads/branch-2.1
Commit: 1d84518f8a1835fcdd70702c9850fab0b083d61f
Parents: 494f954
Author: Nate Cole <nc...@hortonworks.com>
Authored: Wed Jul 8 17:07:23 2015 -0400
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Wed Jul 8 17:47:44 2015 -0400

----------------------------------------------------------------------
 .../libraries/functions/conf_select.py          | 255 +++++++++-----
 .../1.4.0.2.0/package/scripts/flume_handler.py  |   2 +
 .../0.12.0.2.0/package/scripts/pig_client.py    |   3 +-
 .../custom_actions/scripts/ru_set_all.py        |   7 +-
 .../scripts/shared_initialization.py            |  94 ++++--
 .../python/stacks/2.0.6/PIG/test_pig_client.py  |  14 +-
 .../hooks/after-INSTALL/test_after_install.py   | 337 ++++++++++---------
 7 files changed, 415 insertions(+), 297 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1d84518f/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
index 8b3f930..9c91497 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/functions/conf_select.py
@@ -30,82 +30,150 @@ from resource_management.core.logger import Logger
 from resource_management.core.resources.system import Directory
 
 PACKAGE_DIRS = {
-  "accumulo": {
-    "conf_dir": "/etc/accumulo/conf",
-    "current_dir": "/usr/hdp/current/accumulo-client/conf"
-  },
-  "falcon": {
-    "conf_dir": "/etc/falcon/conf",
-    "current_dir": "/usr/hdp/current/falcon-client/conf"
-  },
-  "hadoop": {
-    "conf_dir": "/etc/hadoop/conf",
-    "current_dir": "/usr/hdp/current/hadoop-client/conf"
-  },
-  "hbase": {
-    "conf_dir": "/etc/hbase/conf",
-    "current_dir": "/usr/hdp/current/hbase-client/conf"
-  },
-  "hive": {
-    "conf_dir": "/etc/hive/conf",
-    "current_dir": "/usr/hdp/current/hive-client/conf"
-  },
-  "kafka": {
-    "conf_dir": "/etc/kafka/conf",
-    "current_dir": "/usr/hdp/current/kafka-broker/conf"
-  },
-  "knox": {
-    "conf_dir": "/etc/knox/conf",
-    "current_dir": "/usr/hdp/current/knox-server/conf"
-  },
-  "mahout": {
-    "conf_dir": "/etc/mahout/conf",
-    "current_dir": "/usr/hdp/current/mahout-client/conf"
-  },
-  "oozie": {
-    "conf_dir": "/etc/oozie/conf",
-    "current_dir": "/usr/hdp/current/oozie-client/conf"
-  },
-  "phoenix": {
-    "conf_dir": "/etc/phoenix/conf",
-    "current_dir": "/usr/hdp/current/phoenix-client/conf"
-  },
-  "ranger-admin": {
-    "conf_dir": "/etc/ranger/admin/conf",
-    "current_dir": "/usr/hdp/current/ranger-admin/conf"
-  },
-  "ranger-kms": {
-    "conf_dir": "/etc/ranger/kms/conf",
-    "current_dir": "/usr/hdp/current/ranger-kms/conf"
-  },
-  "ranger-usersync": {
-    "conf_dir": "/etc/ranger/kms/usersync",
-    "current_dir": "/usr/hdp/current/ranger-usersync/conf"
-  },
-  "slider": {
-    "conf_dir": "/etc/slider/conf",
-    "current_dir": "/usr/hdp/current/slider-client/conf"
-  },
-  "spark": {
-    "conf_dir": "/etc/spark/conf",
-    "current_dir": "/usr/hdp/current/spark-client/conf"
-  },
-  "sqoop": {
-    "conf_dir": "/etc/sqoop/conf",
-    "current_dir": "/usr/hdp/current/sqoop-client/conf"
-  },
-  "storm": {
-    "conf_dir": "/etc/storm/conf",
-    "current_dir": "/usr/hdp/current/storm-client/conf"
-  },
-  "tez": {
-    "conf_dir": "/etc/tez/conf",
-    "current_dir": "/usr/hdp/current/tez-client/conf"
-  },
-  "zookeeper": {
-    "conf_dir": "/etc/zookeeper/conf",
-    "current_dir": "/usr/hdp/current/zookeeper-client/conf"
-  }
+  "accumulo": [
+    {
+      "conf_dir": "/etc/accumulo/conf",
+      "current_dir": "/usr/hdp/current/accumulo-client/conf"
+    }
+  ],
+  "falcon": [
+    {
+      "conf_dir": "/etc/falcon/conf",
+      "current_dir": "/usr/hdp/current/falcon-client/conf"
+    }
+  ],
+  "hadoop": [
+    {
+      "conf_dir": "/etc/hadoop/conf",
+      "current_dir": "/usr/hdp/current/hadoop-client/conf"
+    }
+  ],
+  "hbase": [
+    {
+      "conf_dir": "/etc/hbase/conf",
+      "current_dir": "/usr/hdp/current/hbase-client/conf"
+    }
+  ],
+  "hive": [
+    {
+      "conf_dir": "/etc/hive/conf",
+      "current_dir": "/usr/hdp/current/hive-client/conf"
+    }
+  ],
+  "kafka": [
+    {
+      "conf_dir": "/etc/kafka/conf",
+      "current_dir": "/usr/hdp/current/kafka-broker/conf"
+    }
+  ],
+  "knox": [
+    {
+      "conf_dir": "/etc/knox/conf",
+      "current_dir": "/usr/hdp/current/knox-server/conf"
+    }
+  ],
+  "mahout": [
+    {
+      "conf_dir": "/etc/mahout/conf",
+      "current_dir": "/usr/hdp/current/mahout-client/conf"
+    }
+  ],
+  "oozie": [
+    {
+      "conf_dir": "/etc/oozie/conf",
+      "current_dir": "/usr/hdp/current/oozie-client/conf"
+    }
+  ],
+  "phoenix": [
+    {
+      "conf_dir": "/etc/phoenix/conf",
+      "current_dir": "/usr/hdp/current/phoenix-client/conf"
+    }
+  ],
+  "ranger-admin": [
+    {
+      "conf_dir": "/etc/ranger/admin/conf",
+      "current_dir": "/usr/hdp/current/ranger-admin/conf"
+    }
+  ],
+  "ranger-kms": [
+    {
+      "conf_dir": "/etc/ranger/kms/conf",
+      "current_dir": "/usr/hdp/current/ranger-kms/conf"
+    }
+  ],
+  "ranger-usersync": [
+    {
+      "conf_dir": "/etc/ranger/usersync/conf",
+      "current_dir": "/usr/hdp/current/ranger-usersync/conf"
+    }
+  ],
+  "slider": [
+    {
+      "conf_dir": "/etc/slider/conf",
+      "current_dir": "/usr/hdp/current/slider-client/conf"
+    }
+  ],
+  "spark": [
+    {
+      "conf_dir": "/etc/spark/conf",
+      "current_dir": "/usr/hdp/current/spark-client/conf"
+    }
+  ],
+  "sqoop": [
+    {
+      "conf_dir": "/etc/sqoop/conf",
+      "current_dir": "/usr/hdp/current/sqoop-client/conf"
+    }
+  ],
+  "storm": [
+    {
+      "conf_dir": "/etc/storm/conf",
+      "current_dir": "/usr/hdp/current/storm-client/conf"
+    }
+  ],
+  "tez": [
+    {
+      "conf_dir": "/etc/tez/conf",
+      "current_dir": "/usr/hdp/current/tez-client/conf"
+    }
+  ],
+  "zookeeper": [
+    {
+      "conf_dir": "/etc/zookeeper/conf",
+      "current_dir": "/usr/hdp/current/zookeeper-client/conf"
+    }
+  ],
+  "pig": [
+    {
+      "conf_dir": "/etc/pig/conf",
+      "current_dir": "/usr/hdp/current/pig-client/conf"
+    }
+  ],
+  "flume": [
+    {
+      "conf_dir": "/etc/flume/conf",
+      "current_dir": "/usr/hdp/current/flume-server/conf"
+    }
+  ],
+  "storm-slider-client": [
+    {
+      "conf_dir": "/etc/storm-slider-client/conf",
+      "current_dir": "/usr/hdp/current/storm-slider-client/conf"
+    }
+  ],
+  "hive-hcatalog": [
+    {
+      "conf_dir": "/etc/hive-webhcat/conf",
+      "prefix": "/etc/hive-webhcat",
+      "current_dir": "/usr/hdp/current/hive-webhcat/etc/webhcat"
+    },
+    {
+      "conf_dir": "/etc/hive-hcatalog/conf",
+      "prefix": "/etc/hive-hcatalog",
+      "current_dir": "/usr/hdp/current/hive-webhcat/etc/hcatalog"
+    }
+  ]
 }
 
 def get_cmd(command, package, version):
@@ -136,15 +204,22 @@ def create(stack_name, package, version, dry_run = False):
 
   code, stdout = shell.call(get_cmd(command, package, version), 
logoutput=False, quiet=True, sudo=True)
 
+  # conf-select can set more than one directory
+  # per package, so return that list, especially for dry_run
+  dirs = []
+  if 0 == code and stdout is not None: # just be sure we have a stdout
+    for line in stdout.splitlines():
+      dirs.append(line.rstrip('\n'))
+
   # take care of permissions
   if not code and stdout and command == "create-conf-dir":
-    Directory(stdout,
-        mode=0755,
-        cd_access='a',
-        recursive=True,
-    )
+    for d in dirs:
+      Directory(d,
+          mode=0755,
+          cd_access='a',
+          recursive=True)
 
-  return stdout
+  return dirs
 
 
 def select(stack_name, package, version, try_create=True):
@@ -234,10 +309,16 @@ def create_config_links(stack_id, stack_version):
     return
 
   for k, v in PACKAGE_DIRS.iteritems():
-    if os.path.exists(v['conf_dir']):
-      new_conf_dir = create(args[0], k, stack_version, dry_run = True)
-
-      if not os.path.exists(new_conf_dir):
-        Logger.info("Creating conf {0} for {1}".format(new_conf_dir, k))
+    dirs = create(args[0], k, stack_version, dry_run = True)
+    if 0 == len(dirs):
+      Logger.debug("Package {0} is not installed".format(k))
+    else:
+      need = False
+      for new_conf_dir in dirs:
+        if not os.path.exists(new_conf_dir):
+          need = True
+
+      if need:
+        Logger.info("Creating conf dirs {0} for {1}".format(",".join(dirs), k))
         select(args[0], k, stack_version)
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d84518f/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
index 5d2eea5..a305ce7 100644
--- 
a/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
+++ 
b/ambari-server/src/main/resources/common-services/FLUME/1.4.0.2.0/package/scripts/flume_handler.py
@@ -23,6 +23,7 @@ from flume import flume
 from flume import get_desired_state
 
 from resource_management import *
+from resource_management.libraries.functions import conf_select
 from resource_management.libraries.functions import hdp_select
 from resource_management.libraries.functions.flume_agent_helper import 
find_expected_agent_names
 from resource_management.libraries.functions.flume_agent_helper import 
get_flume_status
@@ -120,6 +121,7 @@ class FlumeHandler(Script):
       return
 
     Logger.info("Executing Flume Rolling Upgrade pre-restart")
+    conf_select.select(params.stack_name, "flume", params.version)
     hdp_select.select("flume-server", params.version)
     flume_upgrade.pre_start_restore()
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d84518f/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
index 7dc57a4..15fb40c 100644
--- 
a/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
+++ 
b/ambari-server/src/main/resources/common-services/PIG/0.12.0.2.0/package/scripts/pig_client.py
@@ -47,8 +47,9 @@ class PigClientLinux(PigClient):
     env.set_params(params)
 
     if params.version and 
compare_versions(format_hdp_stack_version(params.version), '2.2.0.0') >= 0:
+      conf_select.select(params.stack_name, "pig", params.version)
       conf_select.select(params.stack_name, "hadoop", params.version)
-      hdp_select.select("hadoop-client", params.version)
+      hdp_select.select("hadoop-client", params.version) # includes pig-client
 
   def install(self, env):
     self.install_packages(env)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d84518f/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py 
b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
index a6f7c28..f868002 100644
--- a/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
+++ b/ambari-server/src/main/resources/custom_actions/scripts/ru_set_all.py
@@ -64,7 +64,8 @@ class UpgradeSetAll(Script):
       if compare_versions(real_ver, format_hdp_stack_version("2.3")) >= 0:
         # backup the old and symlink /etc/[component]/conf to 
/usr/hdp/current/[component]
         for k, v in conf_select.PACKAGE_DIRS.iteritems():
-          link_config(v['conf_dir'], v['current_dir'])
+          for dir_def in v:
+            link_config(dir_def['conf_dir'], dir_def['current_dir'])
 
 def link_config(old_conf, link_conf):
   """
@@ -95,9 +96,7 @@ def link_config(old_conf, link_conf):
   shutil.rmtree(old_conf, ignore_errors=True)
 
   # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
-  Link(old_conf,
-    to = link_conf,
-  )
+  Link(old_conf, to = link_conf)
 
 if __name__ == "__main__":
   UpgradeSetAll().execute()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d84518f/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
index 24b3ec8..2e3f282 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/after-INSTALL/scripts/shared_initialization.py
@@ -18,16 +18,17 @@ limitations under the License.
 """
 import os
 import shutil
+
 import ambari_simplejson as json
-from resource_management.core.shell import as_sudo
 from resource_management.core.logger import Logger
+from resource_management.core.resources.system import Directory, Link
 from resource_management.core.resources.system import Execute
-from resource_management.libraries.functions.version import compare_versions
-from resource_management.libraries.functions.format import format
+from resource_management.core.shell import as_sudo
 from resource_management.libraries.functions import conf_select
+from resource_management.libraries.functions.format import format
+from resource_management.libraries.functions.version import compare_versions
 from resource_management.libraries.resources.xml_config import XmlConfig
 from resource_management.libraries.script import Script
-from resource_management.core.resources.system import Directory, Link
 
 
 def setup_hdp_install_directory():
@@ -87,54 +88,79 @@ def link_configs(struct_out_file):
     return
 
   for k, v in conf_select.PACKAGE_DIRS.iteritems():
-    _link_configs(k, json_version, v['conf_dir'], v['current_dir'])
+    _link_configs(k, json_version, v)
 
-def _link_configs(package, version, old_conf, link_conf):
+def _link_configs(package, version, dirs):
   """
   Link a specific package's configuration directory
   """
+  bad_dirs = []
+  for dir_def in dirs:
+    if not os.path.exists(dir_def['conf_dir']):
+      bad_dirs.append(dir_def['conf_dir'])
 
-  if not os.path.exists(old_conf):
-    Logger.debug("Skipping {0} as it does not exist.".format(old_conf))
+  if len(bad_dirs) > 0:
+    Logger.debug("Skipping {0} as it does not 
exist.".format(",".join(bad_dirs)))
     return
 
-  # check if conf is a link to the target already
-  if os.path.islink(old_conf):
-    Logger.debug("{0} is already a link to {1}".format(old_conf, 
os.path.realpath(old_conf)))
+  bad_dirs = []
+  for dir_def in dirs:
+    # check if conf is a link already
+    old_conf = dir_def['conf_dir']
+    if os.path.islink(old_conf):
+      Logger.debug("{0} is a link to {1}".format(old_conf, 
os.path.realpath(old_conf)))
+      bad_dirs.append(old_conf)
+
+  if len(bad_dirs) > 0:
     return
 
   # make backup dir and copy everything in case configure() was called after 
install()
-  old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
-  old_conf_copy = os.path.join(old_parent, "conf.install")
-  Execute(("cp", "-R", "-p", old_conf, old_conf_copy),
-          not_if = format("test -e {old_conf_copy}"),
-          sudo = True,
-  )
+  for dir_def in dirs:
+    old_conf = dir_def['conf_dir']
+    old_parent = os.path.abspath(os.path.join(old_conf, os.pardir))
+    old_conf_copy = os.path.join(old_parent, "conf.install")
+    Execute(("cp", "-R", "-p", old_conf, old_conf_copy),
+      not_if = format("test -e {old_conf_copy}"), sudo = True)
+
+  # we're already in the HDP stack
+  versioned_confs = conf_select.create("HDP", package, version, dry_run = True)
 
-  versioned_conf = conf_select.create("HDP", package, version, dry_run = True)
+  Logger.info("New conf directories: {0}".format(", ".join(versioned_confs)))
 
-  Logger.info("New conf directory is {0}".format(versioned_conf))
+  need_dirs = []
+  for d in versioned_confs:
+    if not os.path.exists(d):
+      need_dirs.append(d)
 
-  # make new conf dir and copy everything in case configure() was called after 
install()
-  if not os.path.exists(versioned_conf):
+  if len(need_dirs) > 0:
     conf_select.create("HDP", package, version)
-    Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), 
versioned_conf], auto_escape=False),
-            only_if = format("ls {old_conf}/*")
-    )
-    
-  # make /usr/hdp/<version>/hadoop/conf point to the versioned config.
+
+    # find the matching definition and back it up (not the most efficient way) 
ONLY if there is more than one directory
+    if len(dirs) > 1:
+      for need_dir in need_dirs:
+        for dir_def in dirs:
+          if 'prefix' in dir_def and need_dir.startswith(dir_def['prefix']):
+            old_conf = dir_def['conf_dir']
+            versioned_conf = need_dir
+            Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), 
versioned_conf], auto_escape=False),
+              only_if = format("ls {old_conf}/*"))
+    elif 1 == len(dirs) and 1 == len(need_dirs):
+      old_conf = dirs[0]['conf_dir']
+      versioned_conf = need_dirs[0]
+      Execute(as_sudo(["cp", "-R", "-p", os.path.join(old_conf, "*"), 
versioned_conf], auto_escape=False),
+        only_if = format("ls {old_conf}/*"))
+
+
+  # make /usr/hdp/[version]/[component]/conf point to the versioned config.
   # /usr/hdp/current is already set
   conf_select.select("HDP", package, version)
 
   # no more references to /etc/[component]/conf
-  Directory(old_conf,
-    action="delete",
-  )
-
-  # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
-  Link(old_conf,
-    to = link_conf
-  )
+  for dir_def in dirs:
+    Directory(dir_def['conf_dir'], action="delete")
+
+    # link /etc/[component]/conf -> /usr/hdp/current/[component]-client/conf
+    Link(dir_def['conf_dir'], to = dir_def['current_dir'])
       
   # should conf.install be removed?
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d84518f/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py 
b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
index 8825248..2f42520 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/PIG/test_pig_client.py
@@ -170,11 +170,17 @@ class TestPigClient(RMFTestCase):
                               ('hdp-select', 'set', 'hadoop-client', version), 
sudo=True)
     self.assertNoMoreResources()
 
-    self.assertEquals(1, mocks_dict['call'].call_count)
-    self.assertEquals(1, mocks_dict['checked_call'].call_count)
+    self.assertEquals(2, mocks_dict['call'].call_count)
+    self.assertEquals(2, mocks_dict['checked_call'].call_count)
     self.assertEquals(
-      ('conf-select', 'set-conf-dir', '--package', 'hadoop', 
'--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
+      ('conf-select', 'set-conf-dir', '--package', 'pig', '--stack-version', 
'2.3.0.0-1234', '--conf-version', '0'),
        mocks_dict['checked_call'].call_args_list[0][0][0])
     self.assertEquals(
-      ('conf-select', 'create-conf-dir', '--package', 'hadoop', 
'--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
+      ('conf-select', 'set-conf-dir', '--package', 'hadoop', 
'--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
+       mocks_dict['checked_call'].call_args_list[1][0][0])
+    self.assertEquals(
+      ('conf-select', 'create-conf-dir', '--package', 'pig', 
'--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
        mocks_dict['call'].call_args_list[0][0][0])
+    self.assertEquals(
+      ('conf-select', 'create-conf-dir', '--package', 'hadoop', 
'--stack-version', '2.3.0.0-1234', '--conf-version', '0'),
+       mocks_dict['call'].call_args_list[1][0][0])

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d84518f/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
index 572cd11..48c7306 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/after-INSTALL/test_after_install.py
@@ -83,196 +83,199 @@ class TestHookAfterInstall(RMFTestCase):
       configurations = self.getConfig()['configurations']['core-site'],
       configuration_attributes = 
self.getConfig()['configuration_attributes']['core-site'],
       only_if="ls /usr/hdp/current/hadoop-client/conf")
-  
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/zookeeper/conf', '/etc/zookeeper/conf.install'),
-        not_if = 'test -e /etc/zookeeper/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/zookeeper/conf',
-        to = '/usr/hdp/current/zookeeper-client/conf',
-    )
+
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/ranger/kms/conf', '/etc/ranger/kms/conf.install'),
         not_if = 'test -e /etc/ranger/kms/conf.install',
-        sudo = True,
-    )
+        sudo = True,)
     self.assertResourceCalled('Directory', '/etc/ranger/kms/conf',
-        action = ['delete'],
-    )
+        action = ['delete'],)
     self.assertResourceCalled('Link', '/etc/ranger/kms/conf',
-        to = '/usr/hdp/current/ranger-kms/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/accumulo/conf', '/etc/accumulo/conf.install'),
-        not_if = 'test -e /etc/accumulo/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/accumulo/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/accumulo/conf',
-        to = '/usr/hdp/current/accumulo-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/phoenix/conf', '/etc/phoenix/conf.install'),
-        not_if = 'test -e /etc/phoenix/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/phoenix/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/phoenix/conf',
-        to = '/usr/hdp/current/phoenix-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', 
'/etc/sqoop/conf.install'),
-        not_if = 'test -e /etc/sqoop/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/sqoop/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/sqoop/conf',
-        to = '/usr/hdp/current/sqoop-client/conf',
-    )
+        to = '/usr/hdp/current/ranger-kms/conf',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/zookeeper/conf', '/etc/zookeeper/conf.install'),
+        not_if = 'test -e /etc/zookeeper/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/zookeeper/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/zookeeper/conf',
+        to = '/usr/hdp/current/zookeeper-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/pig/conf', 
'/etc/pig/conf.install'),
+        not_if = 'test -e /etc/pig/conf.install',
+        sudo = True,)
+    self.assertResourceCalled('Directory', '/etc/pig/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/pig/conf',
+        to = '/usr/hdp/current/pig-client/conf',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', 
'/etc/tez/conf.install'),
+        not_if = 'test -e /etc/tez/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/tez/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/tez/conf',
+        to = '/usr/hdp/current/tez-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/hive-webhcat/conf', '/etc/hive-webhcat/conf.install'),
+        not_if = 'test -e /etc/hive-webhcat/conf.install',
+        sudo = True,)
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/hive-hcatalog/conf', '/etc/hive-hcatalog/conf.install'),
+        not_if = 'test -e /etc/hive-hcatalog/conf.install',
+        sudo = True,)
+
+    self.assertResourceCalled('Directory', '/etc/hive-webhcat/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/hive-webhcat/conf',
+        to = '/usr/hdp/current/hive-webhcat/etc/webhcat',)
+
+    self.assertResourceCalled('Directory', '/etc/hive-hcatalog/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/hive-hcatalog/conf',
+        to = '/usr/hdp/current/hive-webhcat/etc/hcatalog',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', 
'/etc/hbase/conf.install'),
+        not_if = 'test -e /etc/hbase/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/hbase/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/hbase/conf',
+        to = '/usr/hdp/current/hbase-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', 
'/etc/knox/conf.install'),
+        not_if = 'test -e /etc/knox/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/knox/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/knox/conf',
+        to = '/usr/hdp/current/knox-server/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/ranger/usersync/conf', '/etc/ranger/usersync/conf.install'),
+        not_if = 'test -e /etc/ranger/usersync/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/ranger/usersync/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/ranger/usersync/conf',
+        to = '/usr/hdp/current/ranger-usersync/conf')
+
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/hadoop/conf', '/etc/hadoop/conf.install'),
         not_if = 'test -e /etc/hadoop/conf.install',
-        sudo = True,
-    )
+        sudo = True)
     self.assertResourceCalled('Directory', '/etc/hadoop/conf',
-        action = ['delete'],
-    )
+        action = ['delete'])
     self.assertResourceCalled('Link', '/etc/hadoop/conf',
-        to = '/usr/hdp/current/hadoop-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', 
'/etc/hive/conf.install'),
-        not_if = 'test -e /etc/hive/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/hive/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/hive/conf',
-        to = '/usr/hdp/current/hive-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', 
'/etc/kafka/conf.install'),
-        not_if = 'test -e /etc/kafka/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/kafka/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/kafka/conf',
-        to = '/usr/hdp/current/kafka-broker/conf',
-    )
+        to = '/usr/hdp/current/hadoop-client/conf')
+
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/mahout/conf', '/etc/mahout/conf.install'),
         not_if = 'test -e /etc/mahout/conf.install',
-        sudo = True,
-    )
+        sudo = True)
     self.assertResourceCalled('Directory', '/etc/mahout/conf',
-        action = ['delete'],
-    )
+        action = ['delete'])
     self.assertResourceCalled('Link', '/etc/mahout/conf',
-        to = '/usr/hdp/current/mahout-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/slider/conf', '/etc/slider/conf.install'),
-        not_if = 'test -e /etc/slider/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/slider/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/slider/conf',
-        to = '/usr/hdp/current/slider-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/tez/conf', 
'/etc/tez/conf.install'),
-        not_if = 'test -e /etc/tez/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/tez/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/tez/conf',
-        to = '/usr/hdp/current/tez-client/conf',
-    )
+        to = '/usr/hdp/current/mahout-client/conf')
+
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/storm/conf', 
'/etc/storm/conf.install'),
         not_if = 'test -e /etc/storm/conf.install',
-        sudo = True,
-    )
+        sudo = True)
     self.assertResourceCalled('Directory', '/etc/storm/conf',
-        action = ['delete'],
-    )
+        action = ['delete'])
     self.assertResourceCalled('Link', '/etc/storm/conf',
-        to = '/usr/hdp/current/storm-client/conf',
-    )
+        to = '/usr/hdp/current/storm-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/ranger/admin/conf', '/etc/ranger/admin/conf.install'),
+        not_if = 'test -e /etc/ranger/admin/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
+        to = '/usr/hdp/current/ranger-admin/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/flume/conf', 
'/etc/flume/conf.install'),
+        not_if = 'test -e /etc/flume/conf.install',
+        sudo = True,)
+    self.assertResourceCalled('Directory', '/etc/flume/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/flume/conf',
+        to = '/usr/hdp/current/flume-server/conf',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/sqoop/conf', 
'/etc/sqoop/conf.install'),
+        not_if = 'test -e /etc/sqoop/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/sqoop/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/sqoop/conf',
+        to = '/usr/hdp/current/sqoop-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/accumulo/conf', '/etc/accumulo/conf.install'),
+        not_if = 'test -e /etc/accumulo/conf.install',
+        sudo = True,)
+    self.assertResourceCalled('Directory', '/etc/accumulo/conf',
+        action = ['delete'],)
+    self.assertResourceCalled('Link', '/etc/accumulo/conf',
+        to = '/usr/hdp/current/accumulo-client/conf',)
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/phoenix/conf', '/etc/phoenix/conf.install'),
+        not_if = 'test -e /etc/phoenix/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/phoenix/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/phoenix/conf',
+        to = '/usr/hdp/current/phoenix-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/storm-slider-client/conf', '/etc/storm-slider-client/conf.install'),
+        not_if = 'test -e /etc/storm-slider-client/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/storm-slider-client/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/storm-slider-client/conf',
+        to = '/usr/hdp/current/storm-slider-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/slider/conf', '/etc/slider/conf.install'),
+        not_if = 'test -e /etc/slider/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/slider/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/slider/conf',
+        to = '/usr/hdp/current/slider-client/conf')
+
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/oozie/conf', 
'/etc/oozie/conf.install'),
         not_if = 'test -e /etc/oozie/conf.install',
-        sudo = True,
-    )
+        sudo = True)
     self.assertResourceCalled('Directory', '/etc/oozie/conf',
-        action = ['delete'],
-    )
+        action = ['delete'])
     self.assertResourceCalled('Link', '/etc/oozie/conf',
-        to = '/usr/hdp/current/oozie-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', 
'/etc/spark/conf.install'),
-        not_if = 'test -e /etc/spark/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/spark/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/spark/conf',
-        to = '/usr/hdp/current/spark-client/conf',
-    )
+        to = '/usr/hdp/current/oozie-client/conf')
+
     self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/falcon/conf', '/etc/falcon/conf.install'),
         not_if = 'test -e /etc/falcon/conf.install',
-        sudo = True,
-    )
+        sudo = True)
     self.assertResourceCalled('Directory', '/etc/falcon/conf',
-        action = ['delete'],
-    )
+        action = ['delete'])
     self.assertResourceCalled('Link', '/etc/falcon/conf',
-        to = '/usr/hdp/current/falcon-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hbase/conf', 
'/etc/hbase/conf.install'),
-        not_if = 'test -e /etc/hbase/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/hbase/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/hbase/conf',
-        to = '/usr/hdp/current/hbase-client/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/ranger/kms/usersync', '/etc/ranger/kms/conf.install'),
-        not_if = 'test -e /etc/ranger/kms/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/ranger/kms/usersync',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/ranger/kms/usersync',
-        to = '/usr/hdp/current/ranger-usersync/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/knox/conf', 
'/etc/knox/conf.install'),
-        not_if = 'test -e /etc/knox/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/knox/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/knox/conf',
-        to = '/usr/hdp/current/knox-server/conf',
-    )
-    self.assertResourceCalled('Execute', ('cp', '-R', '-p', 
'/etc/ranger/admin/conf', '/etc/ranger/admin/conf.install'),
-        not_if = 'test -e /etc/ranger/admin/conf.install',
-        sudo = True,
-    )
-    self.assertResourceCalled('Directory', '/etc/ranger/admin/conf',
-        action = ['delete'],
-    )
-    self.assertResourceCalled('Link', '/etc/ranger/admin/conf',
-        to = '/usr/hdp/current/ranger-admin/conf',
-    )
+        to = '/usr/hdp/current/falcon-client/conf')
+
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/spark/conf', 
'/etc/spark/conf.install'),
+        not_if = 'test -e /etc/spark/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/spark/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/spark/conf',
+        to = '/usr/hdp/current/spark-client/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/kafka/conf', 
'/etc/kafka/conf.install'),
+        not_if = 'test -e /etc/kafka/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/kafka/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/kafka/conf',
+        to = '/usr/hdp/current/kafka-broker/conf')
+
+    self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/etc/hive/conf', 
'/etc/hive/conf.install'),
+        not_if = 'test -e /etc/hive/conf.install',
+        sudo = True)
+    self.assertResourceCalled('Directory', '/etc/hive/conf',
+        action = ['delete'])
+    self.assertResourceCalled('Link', '/etc/hive/conf',
+        to = '/usr/hdp/current/hive-client/conf')
 
     self.assertNoMoreResources()

Reply via email to