[2/2] git commit: AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. (swagle)

2014-11-06 Thread swagle
AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command 
line. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b5ed4413
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b5ed4413
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b5ed4413

Branch: refs/heads/branch-1.7.0
Commit: b5ed44138a9781acc6eea877eb71a670f4adaa90
Parents: ceb4549
Author: Siddharth Wagle swa...@hortonworks.com
Authored: Wed Nov 5 18:58:37 2014 -0800
Committer: Siddharth Wagle swa...@hortonworks.com
Committed: Thu Nov 6 09:29:56 2014 -0800

--
 .../2.0.6/hooks/before-ANY/scripts/params.py|  3 ++
 .../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 
 .../services/HDFS/package/scripts/params.py |  2 +
 .../services/HDFS/package/scripts/utils.py  | 12 -
 .../services/HDFS/configuration/hadoop-env.xml  |  7 +++
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 57 
 6 files changed, 101 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index fa3b118..6389f33 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -136,3 +136,6 @@ if has_nagios:
 
 user_list = json.loads(config['hostLevelParams']['user_list'])
 group_list = json.loads(config['hostLevelParams']['group_list'])
+
+if security_enabled :
+  dn_proc_user=hdfs_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index 25c1067..c192682 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,3 +81,25 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
 Package(params.lzo_packages_for_current_host)
+
+def setup_hadoop_env(replace=False):
+  import params
+
+  if params.security_enabled:
+tc_owner = root
+  else:
+tc_owner = params.hdfs_user
+  Directory(params.hadoop_conf_empty_dir,
+recursive=True,
+owner='root',
+group='root'
+  )
+  Link(params.hadoop_conf_dir,
+   to=params.hadoop_conf_empty_dir,
+   not_if=format(ls {hadoop_conf_dir})
+  )
+  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+   owner=tc_owner,
+   content=InlineTemplate(params.hadoop_env_sh_template),
+   replace=replace
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 22ce519..7e446f1 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -253,3 +253,5 @@ ttnode_heapsize = 1024m
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = 
default(/configurations/mapred-env/mapred_pid_dir_prefix,/var/run/hadoop-mapreduce)
 mapred_log_dir_prefix = 
default(/configurations/mapred-env/mapred_log_dir_prefix,/var/log/hadoop-mapreduce)
+
+dn_proc_user=hdfs_user
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/b5ed4413/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index 14251cd..36fbc86 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ 

git commit: AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. (swagle)

2014-11-06 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk e50d850ce - a4b8c9b30


AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command 
line. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/a4b8c9b3
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/a4b8c9b3
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/a4b8c9b3

Branch: refs/heads/trunk
Commit: a4b8c9b30f2756cb24ee372e5741d76db7b517a1
Parents: e50d850
Author: Siddharth Wagle swa...@hortonworks.com
Authored: Thu Nov 6 09:35:53 2014 -0800
Committer: Siddharth Wagle swa...@hortonworks.com
Committed: Thu Nov 6 09:35:53 2014 -0800

--
 .../2.0.6/hooks/before-ANY/scripts/params.py|  2 +
 .../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 
 .../services/HDFS/package/scripts/params.py |  2 +
 .../services/HDFS/package/scripts/utils.py  | 12 -
 .../services/HDFS/configuration/hadoop-env.xml  |  7 +++
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 57 
 6 files changed, 100 insertions(+), 2 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/a4b8c9b3/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
index fa3b118..ddb2b0b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-ANY/scripts/params.py
@@ -136,3 +136,5 @@ if has_nagios:
 
 user_list = json.loads(config['hostLevelParams']['user_list'])
 group_list = json.loads(config['hostLevelParams']['group_list'])
+
+dn_proc_user=hdfs_user

http://git-wip-us.apache.org/repos/asf/ambari/blob/a4b8c9b3/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index 25c1067..c192682 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,3 +81,25 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
 Package(params.lzo_packages_for_current_host)
+
+def setup_hadoop_env(replace=False):
+  import params
+
+  if params.security_enabled:
+tc_owner = root
+  else:
+tc_owner = params.hdfs_user
+  Directory(params.hadoop_conf_empty_dir,
+recursive=True,
+owner='root',
+group='root'
+  )
+  Link(params.hadoop_conf_dir,
+   to=params.hadoop_conf_empty_dir,
+   not_if=format(ls {hadoop_conf_dir})
+  )
+  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
+   owner=tc_owner,
+   content=InlineTemplate(params.hadoop_env_sh_template),
+   replace=replace
+  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/a4b8c9b3/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 22ce519..7e446f1 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -253,3 +253,5 @@ ttnode_heapsize = 1024m
 dtnode_heapsize = config['configurations']['hadoop-env']['dtnode_heapsize']
 mapred_pid_dir_prefix = 
default(/configurations/mapred-env/mapred_pid_dir_prefix,/var/run/hadoop-mapreduce)
 mapred_log_dir_prefix = 
default(/configurations/mapred-env/mapred_log_dir_prefix,/var/log/hadoop-mapreduce)
+
+dn_proc_user=hdfs_user
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/a4b8c9b3/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index a52dc77..08358f5 100644
--- 

ambari git commit: AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. (swagle)

2014-11-06 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/branch-1.7.0 9502f0c9d - b7f10831d


AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command 
line. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/b7f10831
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/b7f10831
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/b7f10831

Branch: refs/heads/branch-1.7.0
Commit: b7f10831d48de70052bc308596237775c12494a4
Parents: 9502f0c
Author: Siddharth Wagle swa...@hortonworks.com
Authored: Thu Nov 6 19:10:05 2014 -0800
Committer: Siddharth Wagle swa...@hortonworks.com
Committed: Thu Nov 6 19:10:15 2014 -0800

--
 .../python/resource_management/core/source.py   |  3 +-
 .../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 ---
 .../services/HDFS/package/scripts/utils.py  | 18 --
 .../services/HDFS/configuration/hadoop-env.xml  |  7 +--
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 64 +++-
 5 files changed, 27 insertions(+), 87 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f10831/ambari-common/src/main/python/resource_management/core/source.py
--
diff --git a/ambari-common/src/main/python/resource_management/core/source.py 
b/ambari-common/src/main/python/resource_management/core/source.py
index 22e1c6d..bff22f3 100644
--- a/ambari-common/src/main/python/resource_management/core/source.py
+++ b/ambari-common/src/main/python/resource_management/core/source.py
@@ -114,7 +114,8 @@ else:
   self.context = variables.copy() if variables else {}
   if not hasattr(self, 'template_env'):
 self.template_env = JinjaEnvironment(loader=TemplateLoader(self.env),
-autoescape=False, 
undefined=StrictUndefined, trim_blocks=True)
+autoescape=False, 
undefined=StrictUndefined,
+trim_blocks=True)
 
   self.template = self.template_env.get_template(self.name) 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f10831/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index c192682..25c1067 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,25 +81,3 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
 Package(params.lzo_packages_for_current_host)
-
-def setup_hadoop_env(replace=False):
-  import params
-
-  if params.security_enabled:
-tc_owner = root
-  else:
-tc_owner = params.hdfs_user
-  Directory(params.hadoop_conf_empty_dir,
-recursive=True,
-owner='root',
-group='root'
-  )
-  Link(params.hadoop_conf_dir,
-   to=params.hadoop_conf_empty_dir,
-   not_if=format(ls {hadoop_conf_dir})
-  )
-  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-   owner=tc_owner,
-   content=InlineTemplate(params.hadoop_env_sh_template),
-   replace=replace
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/b7f10831/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index c177d9c..28a9ccb 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -103,11 +103,22 @@ def service(action=None, name=None, user=None, 
create_pid_dir=False,
 pass
   pass
 
+  service_is_up = check_process if action == start else None
+
   # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
   # in secure mode.
-  if name == 'datanode' and user == 'root':
-params.dn_proc_user = 'root'
-hdfs.setup_hadoop_env(replace=True)
+  set_secure_dn_user_cmd=sed -i 's/export HADOOP_SECURE_DN_USER=.*/export  \
+HADOOP_SECURE_DN_USER=\{0}\/' {1}
+  if name == 'datanode' and action == 'start':
+if user == 'root':
+  secure_dn_user = params.hdfs_user
+else:
+  secure_dn_user = 
+pass
+
+

ambari git commit: AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command line. (swagle)

2014-11-06 Thread swagle
Repository: ambari
Updated Branches:
  refs/heads/trunk 41cf385ff - d256ab8f9


AMBARI-8174. Ambari-deployed cluster can't start datanode as root from command 
line. (swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/d256ab8f
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/d256ab8f
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/d256ab8f

Branch: refs/heads/trunk
Commit: d256ab8f94e9dc1292c9ce69e76235b215ca8dc0
Parents: 41cf385
Author: Siddharth Wagle swa...@hortonworks.com
Authored: Thu Nov 6 19:11:39 2014 -0800
Committer: Siddharth Wagle swa...@hortonworks.com
Committed: Thu Nov 6 19:11:39 2014 -0800

--
 .../python/resource_management/core/source.py   |  3 +-
 .../2.0.6/services/HDFS/package/scripts/hdfs.py | 22 ---
 .../services/HDFS/package/scripts/utils.py  | 18 --
 .../services/HDFS/configuration/hadoop-env.xml  |  7 +--
 .../python/stacks/2.0.6/HDFS/test_datanode.py   | 64 +++-
 5 files changed, 27 insertions(+), 87 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/d256ab8f/ambari-common/src/main/python/resource_management/core/source.py
--
diff --git a/ambari-common/src/main/python/resource_management/core/source.py 
b/ambari-common/src/main/python/resource_management/core/source.py
index 22e1c6d..bff22f3 100644
--- a/ambari-common/src/main/python/resource_management/core/source.py
+++ b/ambari-common/src/main/python/resource_management/core/source.py
@@ -114,7 +114,8 @@ else:
   self.context = variables.copy() if variables else {}
   if not hasattr(self, 'template_env'):
 self.template_env = JinjaEnvironment(loader=TemplateLoader(self.env),
-autoescape=False, 
undefined=StrictUndefined, trim_blocks=True)
+autoescape=False, 
undefined=StrictUndefined,
+trim_blocks=True)
 
   self.template = self.template_env.get_template(self.name) 
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/d256ab8f/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
index c192682..25c1067 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs.py
@@ -81,25 +81,3 @@ def hdfs(name=None):
   
   if params.lzo_enabled:
 Package(params.lzo_packages_for_current_host)
-
-def setup_hadoop_env(replace=False):
-  import params
-
-  if params.security_enabled:
-tc_owner = root
-  else:
-tc_owner = params.hdfs_user
-  Directory(params.hadoop_conf_empty_dir,
-recursive=True,
-owner='root',
-group='root'
-  )
-  Link(params.hadoop_conf_dir,
-   to=params.hadoop_conf_empty_dir,
-   not_if=format(ls {hadoop_conf_dir})
-  )
-  File(os.path.join(params.hadoop_conf_dir, 'hadoop-env.sh'),
-   owner=tc_owner,
-   content=InlineTemplate(params.hadoop_env_sh_template),
-   replace=replace
-  )

http://git-wip-us.apache.org/repos/asf/ambari/blob/d256ab8f/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
--
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
index 150d0a4..0983c3f 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/utils.py
@@ -103,11 +103,22 @@ def service(action=None, name=None, user=None, 
create_pid_dir=False,
 pass
   pass
 
+  service_is_up = check_process if action == start else None
+
   # Set HADOOP_SECURE_DN_USER correctly in hadoop-env if DN is running as root
   # in secure mode.
-  if name == 'datanode' and user == 'root':
-params.dn_proc_user = 'root'
-hdfs.setup_hadoop_env(replace=True)
+  set_secure_dn_user_cmd=sed -i 's/export HADOOP_SECURE_DN_USER=.*/export  \
+HADOOP_SECURE_DN_USER=\{0}\/' {1}
+  if name == 'datanode' and action == 'start':
+if user == 'root':
+  secure_dn_user = params.hdfs_user
+else:
+  secure_dn_user = 
+pass
+
+