[1/5] ambari git commit: AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
Repository: ambari Updated Branches: refs/heads/trunk 6f6c38310 -> c2f35d489 http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py -- diff --git a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py index 60d7924..4e8ec42 100644 --- a/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py +++ b/ambari-server/src/test/python/stacks/2.2/SPARK/test_job_history_server.py @@ -93,6 +93,28 @@ class TestJobHistoryServer(RMFTestCase): self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/spark.service.keytab spark/localh...@example.com; ', user = 'spark', ) +self.assertResourceCalled('HdfsResource', 'hdfs:///hdp/apps/2.2.0.0/tez//tez.tar.gz', +security_enabled = True, +hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf', +keytab = UnknownConfigurationMock(), +source = '/usr/hdp/current/tez-client/lib/tez.tar.gz', +kinit_path_local = '/usr/bin/kinit', +user = UnknownConfigurationMock(), +owner = UnknownConfigurationMock(), +group = 'hadoop', +hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin', +type = 'file', +action = ['create_on_execute'], +) +self.assertResourceCalled('HdfsResource', None, +security_enabled = True, +hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin', +keytab = UnknownConfigurationMock(), +kinit_path_local = '/usr/bin/kinit', +user = UnknownConfigurationMock(), +action = ['execute'], +hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf', +) self.assertResourceCalled('Execute', '/usr/hdp/current/spark-client/sbin/start-history-server.sh', environment = {'JAVA_HOME': u'/usr/jdk64/jdk1.7.0_45'}, not_if = 'ls /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid >/dev/null 2>&1 && ps -p `cat /var/run/spark/spark-spark-org.apache.spark.deploy.history.HistoryServer-1.pid` >/dev/null 2>&1', @@ -128,16 +150,26 @@ class TestJobHistoryServer(RMFTestCase): group = 'hadoop', recursive = True, ) -self.assertResourceCalled('HdfsDirectory', '/user/spark', +self.assertResourceCalled('HdfsResource', '/user/spark', security_enabled = False, +hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin', keytab = UnknownConfigurationMock(), -conf_dir = '/usr/hdp/current/hadoop-client/conf', -hdfs_user = 'hdfs', kinit_path_local = '/usr/bin/kinit', -mode = 0775, +user = 'hdfs', owner = 'spark', -bin_dir = '/usr/hdp/current/hadoop-client/bin', -action = ['create'], +hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf', +type = 'directory', +action = ['create_on_execute'], +mode = 0775, +) +self.assertResourceCalled('HdfsResource', None, +security_enabled = False, +hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin', +keytab = UnknownConfigurationMock(), +kinit_path_local = '/usr/bin/kinit', +user = 'hdfs', +action = ['execute'], +hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf', ) self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/conf/spark-defaults.conf', key_value_delimiter = ' ', @@ -175,16 +207,26 @@ class TestJobHistoryServer(RMFTestCase): group = 'hadoop', recursive = True, ) -self.assertResourceCalled('HdfsDirectory', '/user/spark', +self.assertResourceCalled('HdfsResource', '/user/spark', security_enabled = True, +hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin', keytab = UnknownConfigurationMock(), -conf_dir = '/usr/hdp/current/hadoop-client/conf', -hdfs_user = UnknownConfigurationMock(), kinit_path_local = '/usr/bin/kinit', -mode = 0775, +user = UnknownConfigurationMock(), owner = 'spark', -bin_dir = '/usr/hdp/current/hadoop-client/bin', -action = ['create'], +hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf', +type = 'directory', +action = ['create_on_execute'], +mode = 0775, +) +self.assertResourceCalled('HdfsResource', None, +security_enabled = True, +hadoop_bin_dir = '/usr/hdp/current/hadoop-client/bin', +keytab = UnknownConfigurationMock(), +kinit_path_local = '/usr/bin/kinit', +user = UnknownConfigurationMock(), +action = ['execute'], +hadoop_conf_dir = '/usr/hdp/current/hadoop-client/conf', ) self.assertResourceCalled('PropertiesFile', '/usr/hdp/current/spark-client/con
[1/5] ambari git commit: AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)
Repository: ambari Updated Branches: refs/heads/trunk 92da024a5 -> 6e8dce443 http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.2/configs/default.json -- diff --git a/ambari-server/src/test/python/stacks/2.2/configs/default.json b/ambari-server/src/test/python/stacks/2.2/configs/default.json index c5a6ae7..8188928 100644 --- a/ambari-server/src/test/python/stacks/2.2/configs/default.json +++ b/ambari-server/src/test/python/stacks/2.2/configs/default.json @@ -185,7 +185,18 @@ "ignore_groupsusers_create": "false", "smokeuser": "ambari-qa", "kerberos_domain": "EXAMPLE.COM", -"user_group": "hadoop" +"user_group": "hadoop", + "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", + "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", + "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", + "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", + "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", + "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", + "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", + "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", + "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", + "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", + "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz" }, "ranger-knox-plugin-properties": { "POLICY_MGR_URL": "{{policymgr_mgr_url}}", http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.2/configs/secured.json -- diff --git a/ambari-server/src/test/python/stacks/2.2/configs/secured.json b/ambari-server/src/test/python/stacks/2.2/configs/secured.json index 5bd8814..e224ebc 100644 --- a/ambari-server/src/test/python/stacks/2.2/configs/secured.json +++ b/ambari-server/src/test/python/stacks/2.2/configs/secured.json @@ -173,7 +173,18 @@ "user_group": "hadoop", "smokeuser_keytab": "/etc/security/keytabs/smokeuser.headless.keytab", "smokeuser_principal_name": "ambari...@example.com", -"kinit_path_local": "/usr/bin" +"kinit_path_local": "/usr/bin", + "hive_tar_source": "/usr/hdp/current/hive-client/hive.tar.gz", + "hadoop-streaming_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", + "pig_tar_source": "/usr/hdp/current/pig-client/pig.tar.gz", + "hive_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/hive/", + "tez_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/tez/", + "mapreduce_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/mapreduce/", + "tez_tar_source": "/usr/hdp/current/tez-client/lib/tez.tar.gz", + "pig_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/pig/", + "mapreduce_tar_source": "/usr/hdp/current/hadoop-client/mapreduce.tar.gz", + "sqoop_tar_destination_folder": "hdfs:///hdp/apps/{{ hdp_stack_version }}/sqoop/", + "sqoop_tar_source": "/usr/hdp/current/sqoop-client/sqoop.tar.gz" }, "webhcat-site": { "templeton.jar": "/usr/hdp/current/hive-webhcat/share/webhcat/svr/lib/hive-webhcat-*.jar", http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py -- diff --git a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py index 0d943c4..665119f 100644 --- a/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py +++ b/ambari-server/src/test/python/stacks/2.3/MAHOUT/test_mahout_service_check.py @@ -33,35 +33,42 @@ class TestMahoutClient(RMFTestCase): target = RMFTestCase.TARGET_COMMON_SERVICES ) -self.assertResourceCalled('ExecuteHadoop', 'fs -rm -r -f /user/ambari-qa/mahoutsmokeoutput /user/ambari-qa/mahoutsmokeinput', - security_enabled = False, - keytab = UnknownConfigurationMock(), - conf_dir = '/usr/hdp/current/hadoop-client/conf', -