[1/2] ambari git commit: AMBARI-11747. Modify resource pre-upload script to handle recent changes to stack and HdfsResource (smohanty)

2015-06-06 Thread smohanty
Repository: ambari
Updated Branches:
  refs/heads/branch-2.1 8c663f5ae - c53f3d09b


AMBARI-11747. Modify resource pre-upload script to handle recent changes to 
stack and HdfsResource (smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/317f0c76
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/317f0c76
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/317f0c76

Branch: refs/heads/branch-2.1
Commit: 317f0c761cedecccfcaea71f9d71e6424e6b4a7e
Parents: 8c663f5
Author: Sumit Mohanty smoha...@hortonworks.com
Authored: Fri Jun 5 18:43:45 2015 -0700
Committer: Sumit Mohanty smoha...@hortonworks.com
Committed: Sat Jun 6 07:37:32 2015 -0700

--
 .../main/resources/scripts/Ambaripreupload.py   | 37 +++-
 1 file changed, 28 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/317f0c76/ambari-server/src/main/resources/scripts/Ambaripreupload.py
--
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py 
b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 548983c..fd72f24 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -63,20 +63,21 @@ def getPropertyValueFromConfigXMLFile(xmlfile, name, 
defaultValue=None):
 if len(node.childNodes)  0:
   return node.childNodes[0].nodeValue
 else:
-  return ''
+  return defaultValue
   return defaultValue
 
 def get_fs_root(fsdefaultName=None):
-  return getPropertyValueFromConfigXMLFile(/etc/hadoop/conf/core-site.xml, 
fs.defaultFS)
-  if fsdefaultName is None:
-fsdefaultName = fake
+  fsdefaultName = fake

   while (not fsdefaultName.startswith(wasb://)):
 fsdefaultName =  
getPropertyValueFromConfigXMLFile(/etc/hadoop/conf/core-site.xml, 
fs.defaultFS)
 if fsdefaultName is None:
   fsdefaultName = fake
+print Waiting to read appropriate value of fs.defaultFS from 
/etc/hadoop/conf/core-site.xml ...
 time.sleep(10)
-  
+pass
+
+  print Returning fs.defaultFS -  + fsdefaultName
   return fsdefaultName
  
 # These values must be the suffix of the properties in cluster-env.xml
@@ -94,9 +95,20 @@ class params:
   execute_path = /usr/hdp/current/hadoop-client/bin
   ambari_libs_dir = /var/lib/ambari-agent/lib
   hdfs_site = ConfigDictionary({'dfs.webhdfs.enabled':False, 
-'dfs.namenode.http-address': 
getPropertyValueFromConfigXMLFile(/etc/hadoop/conf/hdfs-site.xml, 
dfs.namenode.http-address)
   })
   fs_default = get_fs_root()
+  oozie_env_sh_template = \
+'''
+#!/bin/bash
+
+export OOZIE_CONFIG=${OOZIE_CONFIG:-/usr/hdp/current/oozie/conf}
+export OOZIE_DATA=${OOZIE_DATA:-/var/lib/oozie/data}
+export OOZIE_LOG=${OOZIE_LOG:-/var/log/oozie}
+export 
CATALINA_BASE=${CATALINA_BASE:-/usr/hdp/current/oozie-server/oozie-server}
+export CATALINA_TMPDIR=${CATALINA_TMPDIR:-/var/tmp/oozie}
+export CATALINA_PID=${CATALINA_PID:-/var/run/oozie/oozie.pid}
+export OOZIE_CATALINA_HOME=/usr/lib/bigtop-tomcat
+'''
   
   HdfsResource = functools.partial(
 HdfsResource,
@@ -213,7 +225,12 @@ no_op_test = ls /var/run/oozie/oozie.pid /dev/null 21 
 ps -p `cat /var/run
 
 with Environment() as env:
   env.set_params(params)
-  
+
+  File(/etc/oozie/conf/oozie-env.sh,
+   owner=params.oozie_user,
+   content=params.oozie_env_sh_template
+  )
+
   hashcode_file = format({oozie_home}/.hashcode)
   hashcode = 
hashlib.md5(format('{oozie_home}/oozie-sharelib.tar.gz')).hexdigest()
   skip_recreate_sharelib = format(test -f {hashcode_file}  test -d 
{oozie_home}/share  [[ `cat {hashcode_file}` == '{hashcode}' ]])
@@ -250,13 +267,14 @@ with Environment() as env:
 source = oozie_shared_lib,
   )
 
+  print Copying tarballs...
   copy_tarballs_to_hdfs(/usr/hdp/current/hadoop-client/mapreduce.tar.gz, 
hdfs_path_prefix+/hdp/apps/{{ hdp_stack_version }}/mapreduce/, 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   copy_tarballs_to_hdfs(/usr/hdp/current/tez-client/lib/tez.tar.gz, 
hdfs_path_prefix+/hdp/apps/{{ hdp_stack_version }}/tez/, 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   copy_tarballs_to_hdfs(/usr/hdp/current/hive-client/hive.tar.gz, 
hdfs_path_prefix+/hdp/apps/{{ hdp_stack_version }}/hive/, 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   copy_tarballs_to_hdfs(/usr/hdp/current/pig-client/pig.tar.gz, 
hdfs_path_prefix+/hdp/apps/{{ hdp_stack_version }}/pig/, 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   

ambari git commit: AMBARI-11747. Modify resource pre-upload script to handle recent changes to stack and HdfsResource (smohanty)

2015-06-05 Thread smohanty
Repository: ambari
Updated Branches:
  refs/heads/trunk 89ba4a4bb - e973a534f


AMBARI-11747. Modify resource pre-upload script to handle recent changes to 
stack and HdfsResource (smohanty)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/e973a534
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/e973a534
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/e973a534

Branch: refs/heads/trunk
Commit: e973a534f0fcb331dc8d71bdb59843189fdfc2b5
Parents: 89ba4a4
Author: Sumit Mohanty smoha...@hortonworks.com
Authored: Fri Jun 5 18:43:45 2015 -0700
Committer: Sumit Mohanty smoha...@hortonworks.com
Committed: Fri Jun 5 18:43:45 2015 -0700

--
 .../main/resources/scripts/Ambaripreupload.py   | 37 +++-
 1 file changed, 28 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/e973a534/ambari-server/src/main/resources/scripts/Ambaripreupload.py
--
diff --git a/ambari-server/src/main/resources/scripts/Ambaripreupload.py 
b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
index 548983c..fd72f24 100644
--- a/ambari-server/src/main/resources/scripts/Ambaripreupload.py
+++ b/ambari-server/src/main/resources/scripts/Ambaripreupload.py
@@ -63,20 +63,21 @@ def getPropertyValueFromConfigXMLFile(xmlfile, name, 
defaultValue=None):
 if len(node.childNodes)  0:
   return node.childNodes[0].nodeValue
 else:
-  return ''
+  return defaultValue
   return defaultValue
 
 def get_fs_root(fsdefaultName=None):
-  return getPropertyValueFromConfigXMLFile(/etc/hadoop/conf/core-site.xml, 
fs.defaultFS)
-  if fsdefaultName is None:
-fsdefaultName = fake
+  fsdefaultName = fake

   while (not fsdefaultName.startswith(wasb://)):
 fsdefaultName =  
getPropertyValueFromConfigXMLFile(/etc/hadoop/conf/core-site.xml, 
fs.defaultFS)
 if fsdefaultName is None:
   fsdefaultName = fake
+print Waiting to read appropriate value of fs.defaultFS from 
/etc/hadoop/conf/core-site.xml ...
 time.sleep(10)
-  
+pass
+
+  print Returning fs.defaultFS -  + fsdefaultName
   return fsdefaultName
  
 # These values must be the suffix of the properties in cluster-env.xml
@@ -94,9 +95,20 @@ class params:
   execute_path = /usr/hdp/current/hadoop-client/bin
   ambari_libs_dir = /var/lib/ambari-agent/lib
   hdfs_site = ConfigDictionary({'dfs.webhdfs.enabled':False, 
-'dfs.namenode.http-address': 
getPropertyValueFromConfigXMLFile(/etc/hadoop/conf/hdfs-site.xml, 
dfs.namenode.http-address)
   })
   fs_default = get_fs_root()
+  oozie_env_sh_template = \
+'''
+#!/bin/bash
+
+export OOZIE_CONFIG=${OOZIE_CONFIG:-/usr/hdp/current/oozie/conf}
+export OOZIE_DATA=${OOZIE_DATA:-/var/lib/oozie/data}
+export OOZIE_LOG=${OOZIE_LOG:-/var/log/oozie}
+export 
CATALINA_BASE=${CATALINA_BASE:-/usr/hdp/current/oozie-server/oozie-server}
+export CATALINA_TMPDIR=${CATALINA_TMPDIR:-/var/tmp/oozie}
+export CATALINA_PID=${CATALINA_PID:-/var/run/oozie/oozie.pid}
+export OOZIE_CATALINA_HOME=/usr/lib/bigtop-tomcat
+'''
   
   HdfsResource = functools.partial(
 HdfsResource,
@@ -213,7 +225,12 @@ no_op_test = ls /var/run/oozie/oozie.pid /dev/null 21 
 ps -p `cat /var/run
 
 with Environment() as env:
   env.set_params(params)
-  
+
+  File(/etc/oozie/conf/oozie-env.sh,
+   owner=params.oozie_user,
+   content=params.oozie_env_sh_template
+  )
+
   hashcode_file = format({oozie_home}/.hashcode)
   hashcode = 
hashlib.md5(format('{oozie_home}/oozie-sharelib.tar.gz')).hexdigest()
   skip_recreate_sharelib = format(test -f {hashcode_file}  test -d 
{oozie_home}/share  [[ `cat {hashcode_file}` == '{hashcode}' ]])
@@ -250,13 +267,14 @@ with Environment() as env:
 source = oozie_shared_lib,
   )
 
+  print Copying tarballs...
   copy_tarballs_to_hdfs(/usr/hdp/current/hadoop-client/mapreduce.tar.gz, 
hdfs_path_prefix+/hdp/apps/{{ hdp_stack_version }}/mapreduce/, 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   copy_tarballs_to_hdfs(/usr/hdp/current/tez-client/lib/tez.tar.gz, 
hdfs_path_prefix+/hdp/apps/{{ hdp_stack_version }}/tez/, 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   copy_tarballs_to_hdfs(/usr/hdp/current/hive-client/hive.tar.gz, 
hdfs_path_prefix+/hdp/apps/{{ hdp_stack_version }}/hive/, 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)
   copy_tarballs_to_hdfs(/usr/hdp/current/pig-client/pig.tar.gz, 
hdfs_path_prefix+/hdp/apps/{{ hdp_stack_version }}/pig/, 
'hadoop-mapreduce-historyserver', params.mapred_user, params.hdfs_user, 
params.user_group)