Repository: ambari
Updated Branches:
  refs/heads/trunk f1117c25d -> be6b6b872


AMBARI-18700. Add HDFS resources for HBase, Spark, Spark2, Zeppelin to 
AmbariPreupload script. (Attila Doroszlai via stoader).


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/be6b6b87
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/be6b6b87
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/be6b6b87

Branch: refs/heads/trunk
Commit: be6b6b872861ee4b1daa8f755f175996c95334f5
Parents: f1117c2
Author: Attila Doroszlai <adorosz...@hortonworks.com>
Authored: Thu Nov 3 11:53:05 2016 +0100
Committer: Toader, Sebastian <stoa...@hortonworks.com>
Committed: Thu Nov 3 11:55:03 2016 +0100

----------------------------------------------------------------------
 .../2.0.0/package/scripts/spark_service.py       |  9 +++++----
 .../main/resources/scripts/Ambaripreupload.py    | 19 ++++++++++++++++++-
 2 files changed, 23 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/be6b6b87/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
index 1cbca8b..12988c8 100755
--- 
a/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
+++ 
b/ambari-server/src/main/resources/common-services/SPARK2/2.0.0/package/scripts/spark_service.py
@@ -57,10 +57,11 @@ def spark_service(name, upgrade_type=None, action=None):
 
     if name == 'jobhistoryserver' and effective_version and 
check_stack_feature(StackFeature.SPARK_16PLUS, effective_version):
       # create & copy spark2-hdp-yarn-archive.tar.gz to hdfs
-      source_dir=params.spark_home+"/jars"
-      tmp_archive_file=get_tarball_paths("spark2")[1]
-      make_tarfile(tmp_archive_file, source_dir)
-      copy_to_hdfs("spark2", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
+      if not params.sysprep_skip_copy_tarballs_hdfs:
+          source_dir=params.spark_home+"/jars"
+          tmp_archive_file=get_tarball_paths("spark2")[1]
+          make_tarfile(tmp_archive_file, source_dir)
+          copy_to_hdfs("spark2", params.user_group, params.hdfs_user, 
skip=params.sysprep_skip_copy_tarballs_hdfs)
       # create spark history directory
       params.HdfsResource(params.spark_history_dir,
                           type="directory",

http://git-wip-us.apache.org/repos/asf/ambari/blob/be6b6b87/ambari-server/src/main/resources/scripts/Ambaripreupload.py
----------------------------------------------------------------------
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py 
b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 1082b5e..c04424f 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -243,6 +243,7 @@ with Environment() as env:
     return _copy_files(source_and_dest_pairs, file_owner, group_owner, 
kinit_if_needed)
   
   def createHdfsResources():
+    print "Creating hdfs directories..."
     params.HdfsResource(format('{hdfs_path_prefix}/atshistory'), user='hdfs', 
change_permissions_for_parents=True, owner='yarn', group='hadoop', 
type='directory', action= ['create_on_execute'], mode=0755)
     params.HdfsResource(format('{hdfs_path_prefix}/user/hcat'), owner='hcat', 
type='directory', action=['create_on_execute'], mode=0755)
     params.HdfsResource(format('{hdfs_path_prefix}/hive/warehouse'), 
owner='hive', type='directory', action=['create_on_execute'], mode=0777)
@@ -261,8 +262,23 @@ with Environment() as env:
     params.HdfsResource(format('{hdfs_path_prefix}/amshbase/staging'), 
owner='ams', type='directory', action=['create_on_execute'], mode=0711)
     params.HdfsResource(format('{hdfs_path_prefix}/user/ams/hbase'), 
owner='ams', type='directory', action=['create_on_execute'], mode=0775)
     params.HdfsResource(format('{hdfs_path_prefix}/hdp'), owner='hdfs', 
type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource(format('{hdfs_path_prefix}/user/spark'), 
owner='spark', group='hadoop', type='directory', action=['create_on_execute'], 
mode=0775)
+    params.HdfsResource(format('{hdfs_path_prefix}/user/livy'), owner='livy', 
group='hadoop', type='directory', action=['create_on_execute'], mode=0775)
     params.HdfsResource(format('{hdfs_path_prefix}/hdp/spark-events'), 
owner='spark', group='hadoop', type='directory', action=['create_on_execute'], 
mode=0777)
+    params.HdfsResource(format('{hdfs_path_prefix}/hdp/spark2-events'), 
owner='spark', group='hadoop', type='directory', action=['create_on_execute'], 
mode=0777)
+    params.HdfsResource(format('{hdfs_path_prefix}/hbase'), owner='hbase', 
type='directory', action=['create_on_execute'])
+    params.HdfsResource(format('{hdfs_path_prefix}/apps/hbase/staging'), 
owner='hbase', type='directory', action=['create_on_execute'], mode=0711)
+    params.HdfsResource(format('{hdfs_path_prefix}/user/hbase'), 
owner='hbase', type='directory', action=['create_on_execute'], mode=0755)
+    params.HdfsResource(format('{hdfs_path_prefix}/apps/zeppelin'), 
owner='zeppelin', group='hadoop', type='directory', 
action=['create_on_execute'])
+    params.HdfsResource(format('{hdfs_path_prefix}/user/zeppelin'), 
owner='zeppelin', group='hadoop', type='directory', 
action=['create_on_execute'])
+    params.HdfsResource(format('{hdfs_path_prefix}/user/zeppelin/test'), 
owner='zeppelin', group='hadoop', type='directory', 
action=['create_on_execute'])
 
+  def copy_zeppelin_dependencies_to_hdfs(file_pattern):
+    spark_deps_full_path = glob.glob(file_pattern)
+    if spark_deps_full_path and os.path.exists(spark_deps_full_path[0]):
+      copy_tarballs_to_hdfs(spark_deps_full_path[0], 
hdfs_path_prefix+'/apps/zeppelin/', 'hadoop-mapreduce-historyserver', 
params.hdfs_user, 'zeppelin', 'zeppelin')
+    else:
+      Logger.info('zeppelin-spark-dependencies not found at %s.' % 
file_pattern)
 
   def putCreatedHdfsResourcesToIgnore(env):
     if not 'hdfs_files' in env.config:
@@ -397,8 +413,9 @@ with Environment() as env:
   copy_tarballs_to_hdfs(format("/usr/hdp/{stack_version}/pig/pig.tar.gz"), 
hdfs_path_prefix+"/hdp/apps/{{ stack_version_formatted }}/pig/", 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   
copy_tarballs_to_hdfs(format("/usr/hdp/{stack_version}/hadoop-mapreduce/hadoop-streaming.jar"),
 hdfs_path_prefix+"/hdp/apps/{{ stack_version_formatted }}/mapreduce/", 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   copy_tarballs_to_hdfs(format("/usr/hdp/{stack_version}/sqoop/sqoop.tar.gz"), 
hdfs_path_prefix+"/hdp/apps/{{ stack_version_formatted }}/sqoop/", 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
-  print "Creating hdfs directories..."
+  
   createHdfsResources()
+  
copy_zeppelin_dependencies_to_hdfs(format("/usr/hdp/{stack_version}/zeppelin/interpreter/spark/dep/zeppelin-spark-dependencies-*.jar"))
   putSQLDriverToOozieShared()
   putCreatedHdfsResourcesToIgnore(env)
 

Reply via email to