AMBARI-14646. Streamline the usage pattern for kinit'ing in code (aonishuk)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/1d96f61a
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/1d96f61a
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/1d96f61a

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 1d96f61a78146b7c382709fc9acb06d54201e3f5
Parents: 61e81e6
Author: Andrew Onishuk <aonis...@hortonworks.com>
Authored: Wed Jan 13 11:36:40 2016 +0200
Committer: Nate Cole <nc...@hortonworks.com>
Committed: Thu Jan 14 11:43:27 2016 -0500

----------------------------------------------------------------------
 .../TestExecuteHadoopResource.py                | 147 +------------------
 .../libraries/providers/execute_hadoop.py       |  10 --
 .../libraries/resources/execute_hadoop.py       |   6 -
 .../2.1.0.2.0/package/scripts/hdfs_namenode.py  |   3 -
 .../package/scripts/hcat_service_check.py       |  27 ++--
 .../python/stacks/2.0.6/HDFS/test_namenode.py   |   7 +-
 .../2.0.6/HIVE/test_hive_service_check.py       |  10 +-
 7 files changed, 15 insertions(+), 195 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/1d96f61a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
----------------------------------------------------------------------
diff --git 
a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py 
b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
index 264398b..2938795 100644
--- 
a/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
+++ 
b/ambari-agent/src/test/python/resource_management/TestExecuteHadoopResource.py
@@ -51,62 +51,6 @@ class TestExecuteHadoopResource(TestCase):
                         'path': [],
                         'environment': {}})
 
-
-  @patch("resource_management.core.providers.system.ExecuteProvider")
-  def test_run_unknown_conf(self, execute_mock):
-    '''
-    Test when UnknownConfiguration passed
-    '''
-    with Environment() as env:
-      ExecuteHadoop("command",
-                    
kinit_path_local=UnknownConfiguration(name="kinit_path_local"),
-                    conf_dir="conf_dir",
-                    user="user",
-                    keytab=UnknownConfiguration(name="keytab"),
-                    security_enabled=False,
-                    principal=UnknownConfiguration(name="principal")
-                    )
-      self.assertEqual(execute_mock.call_count, 1)
-      self.assertEqual(execute_mock.call_args[0][0].command,'hadoop --config 
conf_dir command')
-      self.assertEqual(execute_mock.call_args[0][0].arguments,
-                       {'logoutput': None,
-                        'tries': 1,
-                        'user': 'user',
-                        'try_sleep': 0,
-                        'path': [],
-                        'environment': {}})
-
-
-  @patch("resource_management.core.providers.system.ExecuteProvider")
-  def test_run_defined_args(self, execute_mock):
-    '''
-    Test if defined arguments are passed to Execute
-    '''
-    with Environment("/") as env:
-      ExecuteHadoop("command",
-                    action="run",
-                    kinit_path_local="path",
-                    conf_dir="conf_dir",
-                    user="user",
-                    tries=2,
-                    keytab="keytab",
-                    security_enabled=False,
-                    kinit_override=False,
-                    try_sleep=2,
-                    logoutput=True,
-                    principal="principal"
-      )
-      self.assertEqual(execute_mock.call_count, 1)
-      self.assertEqual(execute_mock.call_args[0][0].command,'hadoop --config 
conf_dir command')
-      self.assertEqual(execute_mock.call_args[0][0].arguments,
-                       {'logoutput': True,
-                        'tries': 2,
-                        'user': 'user',
-                        'try_sleep': 2,
-                        'path': [],
-                        'environment': {}})
-
-
   @patch("resource_management.core.providers.system.ExecuteProvider")
   def test_run_command_list(self, execute_mock):
     '''
@@ -115,10 +59,8 @@ class TestExecuteHadoopResource(TestCase):
     with Environment("/") as env:
       ExecuteHadoop(["command1","command2"],
                     action="run",
-                    kinit_path_local="path",
                     conf_dir="conf_dir",
                     user="user",
-                    keytab="keytab"
       )
       self.assertEqual(execute_mock.call_count, 2)
       self.assertEqual(execute_mock.call_args_list[0][0][0].command,
@@ -149,96 +91,9 @@ class TestExecuteHadoopResource(TestCase):
     with Environment("/") as env:
       ExecuteHadoop(("command1","command2","command3"),
                     action="run",
-                    kinit_path_local="path",
                     conf_dir="conf_dir",
                     user="user",
-                    keytab="keytab"
       )
       self.assertEqual(execute_mock.call_count, 1)
       self.assertEqual(execute_mock.call_args[0][0].command,
-                       'hadoop --config conf_dir command1 command2 command3')
-
-
-  @patch("resource_management.core.providers.system.ExecuteProvider")
-  def test_run_secured(self, execute_mock):
-    '''
-    Test security_enabled=True behaviour
-    '''
-    with Environment("/") as env:
-      ExecuteHadoop("command",
-                    action="run",
-                    kinit_path_local="path",
-                    conf_dir="conf_dir",
-                    user="user",
-                    principal="principal",
-                    tries=1,
-                    keytab="keytab",
-                    security_enabled=True,
-                    kinit_override=False,
-                    try_sleep=0,
-                    logoutput=True
-      )
-      self.assertEqual(execute_mock.call_count, 2)
-      self.assertEqual(str(execute_mock.call_args_list[0][0][0]),
-                       'Execute[\'path -kt keytab principal\']')
-      self.assertEqual(execute_mock.call_args_list[0][0][0].command,
-                       'path -kt keytab principal')
-      self.assertEqual(execute_mock.call_args_list[0][0][0].arguments,
-                       {'path': ['/bin'], 'user': 'user'})
-      self.assertEqual(execute_mock.call_args_list[1][0][0].command,
-                       'hadoop --config conf_dir command')
-      self.assertEqual(execute_mock.call_args_list[1][0][0].arguments,
-                       {'logoutput': True,
-                        'tries': 1,
-                        'user': 'user',
-                        'try_sleep': 0,
-                        'path': [],
-                        'environment': {}})
-
-
-  @patch("resource_management.core.providers.system.ExecuteProvider")
-  def test_run_secured_kinit_override(self, execute_mock):
-    '''
-    Test security_enabled=True and kinit_override=True behaviour
-    '''
-    with Environment("/") as env:
-      ExecuteHadoop("command",
-                    action="run",
-                    kinit_path_local="path",
-                    conf_dir="conf_dir",
-                    user="user",
-                    tries=1,
-                    keytab="keytab",
-                    security_enabled=True,
-                    kinit_override=True,
-                    try_sleep=0,
-                    logoutput=True
-      )
-      self.assertEqual(execute_mock.call_count, 1)
-      self.assertEqual(execute_mock.call_args_list[0][0][0].command,
-                       'hadoop --config conf_dir command')
-
-
-  @patch("resource_management.core.providers.system.ExecuteProvider")
-  def test_run_secured_principal(self, execute_mock):
-    '''
-    Test with "principal" argument
-    '''
-    with Environment("/") as env:
-      ExecuteHadoop("command",
-                    action="run",
-                    kinit_path_local="path",
-                    conf_dir="conf_dir",
-                    user="user",
-                    tries=1,
-                    keytab="keytab",
-                    security_enabled=True,
-                    kinit_override=False,
-                    try_sleep=0,
-                    logoutput=True,
-                    principal="principal")
-      self.assertEqual(execute_mock.call_count, 2)
-      self.assertEqual(execute_mock.call_args_list[0][0][0].command,
-                       'path -kt keytab principal')
-      self.assertEqual(execute_mock.call_args_list[1][0][0].command,
-                       'hadoop --config conf_dir command')
+                       'hadoop --config conf_dir command1 command2 command3')
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d96f61a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
index 7b9fdab..7c1a49f 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/execute_hadoop.py
@@ -29,23 +29,13 @@ from resource_management.core.shell import quote_bash_args
 
 class ExecuteHadoopProvider(Provider):
   def action_run(self):
-    kinit__path_local = self.resource.kinit_path_local
-    keytab = self.resource.keytab
     conf_dir = self.resource.conf_dir
     command = self.resource.command
-    principal = self.resource.principal
     
     if isinstance(command, (list, tuple)):
       command = ' '.join(quote_bash_args(x) for x in command)
     
     with Environment.get_instance_copy() as env:
-      if self.resource.security_enabled and not self.resource.kinit_override:
-        Execute (format("{kinit__path_local} -kt {keytab} {principal}"),
-          path = ['/bin'],
-          user = self.resource.user
-        )
-
-
       Execute (format("hadoop --config {conf_dir} {command}"),
         user        = self.resource.user,
         tries       = self.resource.tries,

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d96f61a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
 
b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
index 73dd1a3..2678eb7 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/resources/execute_hadoop.py
@@ -26,20 +26,14 @@ from resource_management.core.base import Resource, 
ForcedListArgument, Resource
 class ExecuteHadoop(Resource):
   action = ForcedListArgument(default="run")
   command = ResourceArgument(default=lambda obj: obj.name)
-  kinit_override = BooleanArgument(default=False)
   tries = ResourceArgument(default=1)
   try_sleep = ResourceArgument(default=0) # seconds
   user = ResourceArgument()
   logoutput = ResourceArgument()
-  principal = ResourceArgument()
   bin_dir = ResourceArgument(default=[]) # appended to $PATH
   environment = ResourceArgument(default={})
   
   conf_dir = ResourceArgument()
   
-  security_enabled = BooleanArgument(default=False)
-  keytab = ResourceArgument()
-  kinit_path_local = ResourceArgument()
-  
   actions = Resource.actions + ["run"]
   

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d96f61a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
index 12ca1ad..2b417ac 100644
--- 
a/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_namenode.py
@@ -290,7 +290,6 @@ def format_namenode(force=None):
   if not params.dfs_ha_enabled:
     if force:
       ExecuteHadoop('namenode -format',
-                    kinit_override=True,
                     bin_dir=params.hadoop_bin_dir,
                     conf_dir=hadoop_conf_dir)
     else:
@@ -310,7 +309,6 @@ def format_namenode(force=None):
       # only format the "active" namenode in an HA deployment
       if force:
         ExecuteHadoop('namenode -format',
-                      kinit_override=True,
                       bin_dir=params.hadoop_bin_dir,
                       conf_dir=hadoop_conf_dir)
       else:
@@ -406,7 +404,6 @@ def decommission():
     ExecuteHadoop(nn_refresh_cmd,
                   user=hdfs_user,
                   conf_dir=conf_dir,
-                  kinit_override=True,
                   bin_dir=params.hadoop_bin_dir)
 
 @OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d96f61a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
index dcca88e..27ff29a 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hcat_service_check.py
@@ -65,26 +65,17 @@ def hcat_service_check():
             logoutput=True)
 
     if params.security_enabled:
-      ExecuteHadoop(test_cmd,
-                    user=params.hdfs_user,
-                    logoutput=True,
-                    conf_dir=params.hadoop_conf_dir,
-                    security_enabled=params.security_enabled,
-                    kinit_path_local=params.kinit_path_local,
-                    keytab=params.hdfs_user_keytab,
-                    principal=params.hdfs_principal_name,
-                    bin_dir=params.execute_path)
-    else:
-      ExecuteHadoop(test_cmd,
-                    user=params.hdfs_user,
-                    logoutput=True,
-                    conf_dir=params.hadoop_conf_dir,
-                    security_enabled=params.security_enabled,
-                    kinit_path_local=params.kinit_path_local,
-                    keytab=params.hdfs_user_keytab,
-                    bin_dir=params.execute_path
+      Execute (format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name}"),
+               user = params.hdfs_user,
       )
 
+    ExecuteHadoop(test_cmd,
+                  user=params.hdfs_user,
+                  logoutput=True,
+                  conf_dir=params.hadoop_conf_dir,
+                  bin_dir=params.execute_path
+    )
+
     cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} 
cleanup {purge_tables}")
 
     Execute(cleanup_cmd,

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d96f61a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py 
b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
index 36d9133..39244ff 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HDFS/test_namenode.py
@@ -1021,8 +1021,7 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
                               user = 'hdfs',
                               conf_dir = '/etc/hadoop/conf',
-                              bin_dir = '/usr/bin',
-                              kinit_override = True)
+                              bin_dir = '/usr/bin')
     self.assertNoMoreResources()
 
   def test_decommission_update_exclude_file_only(self):
@@ -1058,8 +1057,7 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
                               user = 'hdfs',
                               conf_dir = '/etc/hadoop/conf',
-                              bin_dir = '/usr/bin',
-                              kinit_override = True)
+                              bin_dir = '/usr/bin')
     self.assertNoMoreResources()
 
 
@@ -1082,7 +1080,6 @@ class TestNamenode(RMFTestCase):
     self.assertResourceCalled('ExecuteHadoop', 'dfsadmin -fs 
hdfs://c6401.ambari.apache.org:8020 -refreshNodes',
         bin_dir = '/usr/bin',
         conf_dir = '/etc/hadoop/conf',
-        kinit_override = True,
         user = 'hdfs',
     )
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/1d96f61a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index b755d46..ea17c27 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -62,11 +62,8 @@ class TestServiceCheck(RMFTestCase):
         try_sleep = 5,
     )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e 
/apps/hive/warehouse/hcatsmoke',
-        security_enabled = False,
-        keytab = UnknownConfigurationMock(),
         conf_dir = '/etc/hadoop/conf',
         logoutput = True,
-        kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         bin_dir = '/bin:/usr/lib/hive/bin:/usr/bin',
     )
@@ -172,15 +169,14 @@ class TestServiceCheck(RMFTestCase):
         user = 'ambari-qa',
         try_sleep = 5,
     )
+    self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/hdfs.headless.keytab hdfs',
+        user = 'hdfs',
+    )
     self.assertResourceCalled('ExecuteHadoop', 'fs -test -e 
/apps/hive/warehouse/hcatsmoke',
-        security_enabled = True,
-        keytab = '/etc/security/keytabs/hdfs.headless.keytab',
         conf_dir = '/etc/hadoop/conf',
         logoutput = True,
-        kinit_path_local = '/usr/bin/kinit',
         user = 'hdfs',
         bin_dir = '/bin:/usr/lib/hive/bin:/usr/bin',
-        principal = 'hdfs',
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/smokeuser.headless.keytab ambari...@example.com;  
/tmp/hcatSmoke.sh hcatsmoke cleanup false',
         logoutput = True,

Reply via email to