[4/5] ambari git commit: AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-12 Thread aonishuk
http://git-wip-us.apache.org/repos/asf/ambari/blob/c2f35d48/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index bfd4e74..6edca7d 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -22,6 +22,7 @@ from resource_management import *
 from resource_management.libraries import functions
 import sys
 import os
+import glob
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from urlparse import urlparse
@@ -81,18 +82,96 @@ def hive(name=None):
 
   if name == 'hiveserver2':
 
-params.HdfsDirectory(params.hive_apps_whs_dir,
- action="create_delayed",
- owner=params.hive_user,
- mode=0777
+if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, '2.2') >=0:
+  
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+  type="file",
+  action="create_on_execute",
+  source=params.mapreduce_tar_source,
+  group=params.user_group,
+  mode=params.tarballs_mode
+  )
+
+if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
+  params.HdfsResource(params.webhcat_apps_dir,
+   type="directory",
+   action="create_on_execute",
+   owner=params.webhcat_user,
+   mode=0755
+  )
+  
+if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+  params.HdfsResource(params.hcat_hdfs_user_dir,
+   type="directory",
+   action="create_on_execute",
+   owner=params.hcat_user,
+   mode=params.hcat_hdfs_user_mode
+  )
+params.HdfsResource(params.webhcat_hdfs_user_dir,
+ type="directory",
+ action="create_on_execute",
+ owner=params.webhcat_user,
+ mode=params.webhcat_hdfs_user_mode
+)
+  
+for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
+  src_filename = os.path.basename(src_filepath)
+  
params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content()
 + '/' + src_filename,
+  type="file",
+  action="create_on_execute",
+  source=src_filepath,
+  group=params.user_group,
+  mode=params.tarballs_mode
+  )
+  
+if (os.path.isfile(params.pig_tar_source)):
+  
params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
+  type="file",
+  action="create_on_execute",
+  source=params.pig_tar_source,
+  group=params.user_group,
+  mode=params.tarballs_mode
+  )
+  
+
params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
+type="file",
+action="create_on_execute",
+source=params.hive_tar_source,
+group=params.user_group,
+mode=params.tarballs_mode
+)
+ 
+for src_filepath in glob.glob(params.sqoop_tar_source):
+  src_filename = os.path.basename(src_filepath)
+  
params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content()
 + '/' + src_filename,
+  type="file",
+  action="create_on_execute",
+  source=src_filepath,
+  group=params.user_group,
+  mode=params.tarballs_mode
+  )
+  
+params.HdfsResource(params.hive_apps_whs_dir,
+ type="directory",
+  action="create_on_execute",
+  owner=params.hive_user,
+  mode=0777
 )
-params.HdfsDirectory(params.hive_hdfs_user_dir,
- action="create_delayed",
- owner=params.hive_user,
- mode=params.hive_hdfs_user_mode
+params.HdfsResource(params.hive_hdfs_user_dir,
+ type="directory",
+  ac

[4/5] ambari git commit: AMBARI-11034. Implement HdfsResouces and it's usage (aonishuk)

2015-05-08 Thread aonishuk
http://git-wip-us.apache.org/repos/asf/ambari/blob/6e8dce44/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
index bfd4e74..6edca7d 100644
--- 
a/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
+++ 
b/ambari-server/src/main/resources/common-services/HIVE/0.12.0.2.0/package/scripts/hive.py
@@ -22,6 +22,7 @@ from resource_management import *
 from resource_management.libraries import functions
 import sys
 import os
+import glob
 from ambari_commons.os_family_impl import OsFamilyFuncImpl, OsFamilyImpl
 from ambari_commons import OSConst
 from urlparse import urlparse
@@ -81,18 +82,96 @@ def hive(name=None):
 
   if name == 'hiveserver2':
 
-params.HdfsDirectory(params.hive_apps_whs_dir,
- action="create_delayed",
- owner=params.hive_user,
- mode=0777
+if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, '2.2') >=0:
+  
params.HdfsResource(InlineTemplate(params.mapreduce_tar_destination).get_content(),
+  type="file",
+  action="create_on_execute",
+  source=params.mapreduce_tar_source,
+  group=params.user_group,
+  mode=params.tarballs_mode
+  )
+
+if params.hdp_stack_version_major != "" and 
compare_versions(params.hdp_stack_version_major, "2.2.0.0") < 0:
+  params.HdfsResource(params.webhcat_apps_dir,
+   type="directory",
+   action="create_on_execute",
+   owner=params.webhcat_user,
+   mode=0755
+  )
+  
+if params.hcat_hdfs_user_dir != params.webhcat_hdfs_user_dir:
+  params.HdfsResource(params.hcat_hdfs_user_dir,
+   type="directory",
+   action="create_on_execute",
+   owner=params.hcat_user,
+   mode=params.hcat_hdfs_user_mode
+  )
+params.HdfsResource(params.webhcat_hdfs_user_dir,
+ type="directory",
+ action="create_on_execute",
+ owner=params.webhcat_user,
+ mode=params.webhcat_hdfs_user_mode
+)
+  
+for src_filepath in glob.glob(params.hadoop_streaming_tar_source):
+  src_filename = os.path.basename(src_filepath)
+  
params.HdfsResource(InlineTemplate(params.hadoop_streaming_tar_destination_dir).get_content()
 + '/' + src_filename,
+  type="file",
+  action="create_on_execute",
+  source=src_filepath,
+  group=params.user_group,
+  mode=params.tarballs_mode
+  )
+  
+if (os.path.isfile(params.pig_tar_source)):
+  
params.HdfsResource(InlineTemplate(params.pig_tar_destination).get_content(),
+  type="file",
+  action="create_on_execute",
+  source=params.pig_tar_source,
+  group=params.user_group,
+  mode=params.tarballs_mode
+  )
+  
+
params.HdfsResource(InlineTemplate(params.hive_tar_destination).get_content(),
+type="file",
+action="create_on_execute",
+source=params.hive_tar_source,
+group=params.user_group,
+mode=params.tarballs_mode
+)
+ 
+for src_filepath in glob.glob(params.sqoop_tar_source):
+  src_filename = os.path.basename(src_filepath)
+  
params.HdfsResource(InlineTemplate(params.sqoop_tar_destination_dir).get_content()
 + '/' + src_filename,
+  type="file",
+  action="create_on_execute",
+  source=src_filepath,
+  group=params.user_group,
+  mode=params.tarballs_mode
+  )
+  
+params.HdfsResource(params.hive_apps_whs_dir,
+ type="directory",
+  action="create_on_execute",
+  owner=params.hive_user,
+  mode=0777
 )
-params.HdfsDirectory(params.hive_hdfs_user_dir,
- action="create_delayed",
- owner=params.hive_user,
- mode=params.hive_hdfs_user_mode
+params.HdfsResource(params.hive_hdfs_user_dir,
+ type="directory",
+  ac