Repository: ambari
Updated Branches:
  refs/heads/trunk ee275bf4d -> 66ce3ed8b


AMBARI-6496. Ambari fails to start services because it still tries to kinit for 
hdfs_user when hdfs_principal_name is customized. (Alejandro Fernandez via 
swagle)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/66ce3ed8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/66ce3ed8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/66ce3ed8

Branch: refs/heads/trunk
Commit: 66ce3ed8baaf1e939959809ffdd0b7f7e934ffbf
Parents: ee275bf
Author: Siddharth Wagle <swa...@hortonworks.com>
Authored: Fri Aug 22 16:59:58 2014 -0700
Committer: Siddharth Wagle <swa...@hortonworks.com>
Committed: Fri Aug 22 16:59:58 2014 -0700

----------------------------------------------------------------------
 .../libraries/providers/hdfs_directory.py       |  2 +-
 .../services/HBASE/package/scripts/params.py    |  1 +
 .../HDFS/package/scripts/hdfs_namenode.py       |  2 +-
 .../services/HDFS/package/scripts/params.py     |  1 +
 .../HIVE/package/scripts/hcat_service_check.py  | 27 ++++++++++++++------
 .../services/HIVE/package/scripts/params.py     |  1 +
 .../MAPREDUCE/package/scripts/params.py         |  1 +
 .../services/OOZIE/package/scripts/params.py    |  1 +
 .../services/WEBHCAT/package/scripts/params.py  |  1 +
 .../services/HBASE/package/scripts/params.py    |  1 +
 .../HDFS/package/scripts/hdfs_namenode.py       |  2 +-
 .../services/HDFS/package/scripts/params.py     |  1 +
 .../HIVE/package/scripts/hcat_service_check.py  | 27 ++++++++++++++------
 .../HIVE/package/scripts/install_jars.py        |  2 +-
 .../services/HIVE/package/scripts/params.py     |  1 +
 .../services/OOZIE/package/scripts/params.py    |  1 +
 .../services/PIG/package/scripts/params.py      |  1 +
 .../services/WEBHCAT/package/scripts/params.py  |  1 +
 .../services/WEBHCAT/package/scripts/webhcat.py |  2 +-
 .../services/YARN/package/scripts/params.py     |  1 +
 .../services/FALCON/package/scripts/params.py   |  1 +
 .../services/YARN/package/scripts/params.py     |  1 +
 .../services/FALCON/package/scripts/params.py   |  1 +
 .../1.3.2/HIVE/test_hive_service_check.py       |  3 ++-
 .../python/stacks/1.3.2/configs/secured.json    |  3 ++-
 .../2.0.6/HIVE/test_hive_service_check.py       |  3 ++-
 .../python/stacks/2.0.6/configs/secured.json    |  5 ++--
 .../test/python/stacks/2.1/configs/secured.json |  5 ++--
 28 files changed, 71 insertions(+), 28 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
----------------------------------------------------------------------
diff --git 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
index 08ac9cd..6a40b6d 100644
--- 
a/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
+++ 
b/ambari-common/src/main/python/resource_management/libraries/providers/hdfs_directory.py
@@ -90,7 +90,7 @@ class HdfsDirectoryProvider(Provider):
         chown_commands.append(format("hadoop fs -chown {recursive} {chown} 
{chown_dirs_str}"))
 
     if secured:
-        Execute(format("{kinit_path} -kt {keytab_file} {hdp_hdfs_user}"),
+        Execute(format("{kinit_path} -kt {keytab_file} {hdfs_principal_name}"),
                 user=hdp_hdfs_user)
     #create all directories in one 'mkdir' call
     dir_list_str = ' '.join(directories_list)

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
index 1b7a154..becee80 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HBASE/package/scripts/params.py
@@ -100,6 +100,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
index 5395255..cb6195b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/hdfs_namenode.py
@@ -39,7 +39,7 @@ def namenode(action=None, do_format=True):
 
     namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin 
-safemode get' | grep 'Safe mode is OFF'")
     if params.security_enabled:
-      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user}"),
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name}"),
               user = params.hdfs_user)
     Execute(namenode_safe_mode_off,
             tries=40,

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
index df40ae2..58f01f8 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HDFS/package/scripts/params.py
@@ -128,6 +128,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
index 434af16..26de0d5 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/hcat_service_check.py
@@ -48,14 +48,25 @@ def hcat_service_check():
             path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
             logoutput=True)
 
-    ExecuteHadoop(test_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  security_enabled=params.security_enabled,
-                  kinit_path_local=params.kinit_path_local,
-                  keytab=params.hdfs_user_keytab
-    )
+    if params.security_enabled:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab,
+                    principal=params.hdfs_principal_name
+      )
+    else:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab
+      )
 
     cleanup_cmd = format("{kinit_cmd}sh {tmp_dir}/hcatSmoke.sh 
hcatsmoke{unique} cleanup")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
index c1f33de..ef26418 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/HIVE/package/scripts/params.py
@@ -155,6 +155,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
index a7e79d5..ddcb48b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/MAPREDUCE/package/scripts/params.py
@@ -63,6 +63,7 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 hadoop_pid_dir_prefix = 
config['configurations']['hadoop-env']['hadoop_pid_dir_prefix']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 import functools
 #create partial functions with common arguments for every HdfsDirectory call
 #to create hdfs directory we need to call params.HdfsDirectory in code

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
index 3c261b6..70aee3b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/OOZIE/package/scripts/params.py
@@ -105,6 +105,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
index a40b074..94d905c 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/WEBHCAT/package/scripts/params.py
@@ -64,6 +64,7 @@ security_param = "true" if security_enabled else "false"
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
index e706769..ac90882 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HBASE/package/scripts/params.py
@@ -107,6 +107,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
index f8850c2..c4b48c6 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py
@@ -52,7 +52,7 @@ def namenode(action=None, do_format=True):
     namenode_safe_mode_off = format("su - {hdfs_user} -c 'hadoop dfsadmin 
-safemode get' | grep 'Safe mode is OFF'")
 
     if params.security_enabled:
-      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} {hdfs_user}"),
+      Execute(format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name}"),
               user = params.hdfs_user)
     Execute(namenode_safe_mode_off,
             tries=40,

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
index 69b916d..5c2f792 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HDFS/package/scripts/params.py
@@ -94,6 +94,7 @@ hive_user = config['configurations']['hive-env']['hive_user']
 smoke_user =  config['configurations']['hadoop-env']['smokeuser']
 mapred_user = config['configurations']['mapred-env']['mapred_user']
 hdfs_user = status_params.hdfs_user
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
index d7bde73..ec8faa9 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/hcat_service_check.py
@@ -47,14 +47,25 @@ def hcat_service_check():
             path=['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
             logoutput=True)
 
-    ExecuteHadoop(test_cmd,
-                  user=params.hdfs_user,
-                  logoutput=True,
-                  conf_dir=params.hadoop_conf_dir,
-                  security_enabled=params.security_enabled,
-                  kinit_path_local=params.kinit_path_local,
-                  keytab=params.hdfs_user_keytab
-    )
+    if params.security_enabled:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab,
+                    principal=params.hdfs_principal_name
+      )
+    else:
+      ExecuteHadoop(test_cmd,
+                    user=params.hdfs_user,
+                    logoutput=True,
+                    conf_dir=params.hadoop_conf_dir,
+                    security_enabled=params.security_enabled,
+                    kinit_path_local=params.kinit_path_local,
+                    keytab=params.hdfs_user_keytab
+      )
 
     cleanup_cmd = format("{kinit_cmd} {tmp_dir}/hcatSmoke.sh hcatsmoke{unique} 
cleanup")
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
index c02dd6e..b6d542d 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/install_jars.py
@@ -39,7 +39,7 @@ def install_tez_jars():
     params.HdfsDirectory(None, action="create")
 
     if params.security_enabled:
-      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_user};")
+      kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name};")
     else:
       kinit_if_needed = ""
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
index c492f04..8a82717 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/HIVE/package/scripts/params.py
@@ -168,6 +168,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 
 # Tez libraries

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
index 5b9df78..ebcc984 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/OOZIE/package/scripts/params.py
@@ -97,6 +97,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
index 941860b..570401d 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/PIG/package/scripts/params.py
@@ -28,6 +28,7 @@ tmp_dir = Script.get_tmp_dir()
 pig_conf_dir = "/etc/pig/conf"
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 smokeuser = config['configurations']['hadoop-env']['smokeuser']
 user_group = config['configurations']['hadoop-env']['user_group']
 _authentication = 
config['configurations']['core-site']['hadoop.security.authentication']

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
index dc088e2..fa9460b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/params.py
@@ -69,6 +69,7 @@ hadoop_conf_dir = "/etc/hadoop/conf"
 security_param = "true" if security_enabled else "false"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
index 841a946..3092735 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/WEBHCAT/package/scripts/webhcat.py
@@ -74,7 +74,7 @@ def webhcat():
   )
 
   if params.security_enabled:
-    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_user};")
+    kinit_if_needed = format("{kinit_path_local} -kt {hdfs_user_keytab} 
{hdfs_principal_name};")
   else:
     kinit_if_needed = ""
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
index 8d668d6..e3be1f4 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/YARN/package/scripts/params.py
@@ -123,6 +123,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
index 576980d..3372675 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/FALCON/package/scripts/params.py
@@ -56,6 +56,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
 hdfs_user = config['configurations']['global']['hdfs_user']
+hdfs_principal_name = config['configurations']['global']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
index f96d2e9..1dda724 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.1.GlusterFS/services/YARN/package/scripts/params.py
@@ -117,6 +117,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['global']['hdfs_user_keytab']
 hdfs_user = config['configurations']['global']['hdfs_user']
+hdfs_principal_name = config['configurations']['global']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path([default("kinit_path_local",None), 
"/usr/bin", "/usr/kerberos/bin", "/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
index 93e292d..1e8fce9 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.1/services/FALCON/package/scripts/params.py
@@ -57,6 +57,7 @@ hostname = config["hostname"]
 hadoop_conf_dir = "/etc/hadoop/conf"
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 hdfs_user = config['configurations']['hadoop-env']['hdfs_user']
+hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
 kinit_path_local = functions.get_kinit_path(["/usr/bin", "/usr/kerberos/bin", 
"/usr/sbin"])
 import functools
 #create partial functions with common arguments for every HdfsDirectory call

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py 
b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
index bd2388b..9443e16 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/1.3.2/HIVE/test_hive_service_check.py
@@ -86,7 +86,8 @@ class TestServiceCheck(RMFTestCase):
                               conf_dir = '/etc/hadoop/conf',
                               
keytab='/etc/security/keytabs/hdfs.headless.keytab',
                               kinit_path_local='/usr/bin/kinit',
-                              security_enabled=True
+                              security_enabled=True,
+                              principal='hdfs'
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/smokeuser.headless.keytab ambari-qa; sh /tmp/hcatSmoke.sh 
hcatsmoke cleanup',
                         logoutput = True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json 
b/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
index c364841..9800632 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
@@ -541,7 +541,8 @@
             "namenode_opt_newsize": "200m", 
             "kerberos_domain": "EXAMPLE.COM", 
             "content": "\n# Set Hadoop-specific environment variables 
here.\n\n# The only required environment variable is JAVA_HOME.  All others 
are\n# optional.  When running a distributed configuration it is best to\n# set 
JAVA_HOME in this file, so that it is correctly defined on\n# remote 
nodes.\n\n# The java implementation to use.  Required.\nexport 
JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop 
Configuration Directory\n#TODO: if env var set that can cause problems\nexport 
HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n# Hadoop home 
directory\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n# this is 
different for HDP1 #\n# Path to jsvc required by secure HDP 2.0 datanode\n# 
export JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. 
Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport 
HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java 
runtime options.  Empty by defaul
 t.\nexport HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# 
History server logs\nexport 
HADOOP_MAPRED_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# Command specific 
options appended to HADOOP_OPTS when specified\nexport 
HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc 
-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps 
-Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT 
${HADOOP_NAMENODE_OPTS}\"\nexport HADOOP_JOBTRACKER_OPTS=\"-server 
-XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir
 _prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc -XX:+PrintGCDetails 
-XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT 
-Dhadoop.mapreduce.jobsummary.logger=INFO,JSA 
-Dmapred.log.dir=$HADOOP_MAPRED_LOG_DIR 
${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server 
-Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console 
-Dmapred.audit.logger=ERROR,console 
${HADOOP_TASKTRACKER_OPTS}\"\nHADOOP_DATANODE_OPTS=\"-Xmx{{dtnode_heapsize}} 
-Dhadoop.security.logger=ERROR,DRFAS 
${HADOOP_DATANODE_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server 
-Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\nexport 
HADOOP_SECONDARYNAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 
-XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -v
 erbose:gc -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps 
${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT 
${HADOOP_SECONDARYNAMENODE_OPTS}\"\n\n# The following applies to multiple 
commands (fs, dfs, fsck, distcp etc)\nexport 
HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n# On secure 
datanodes, user to run the datanode as after dropping privileges\nexport 
HADOOP_SECURE_DN_USER={{hdfs_user}}\n\n# Extra ssh options.  Empty by 
default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o 
SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored.  $HADOOP_HOME/logs 
by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# Where log 
files are stored in the secure data environment.\nexport 
HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# 
File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.\n# export 
HADOOP_SLAVES=${HAD
 OOP_HOME}/conf/slaves\n\n# host:path where hadoop code should be rsync'd from. 
 Unset by default.\n# export HADOOP_MASTER=master:/home/$USER/src/hadoop\n\n# 
Seconds to sleep between slave commands.  Unset by default.  This\n# can be 
useful in large clusters, where, e.g., slave rsyncs can\n# otherwise arrive 
faster than the master can service them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# 
The directory where pid files are stored. /tmp by default.\nexport 
HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport 
HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# 
History server pid\nexport 
HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n#
 A string representing this instance of hadoop. $USER by default.\nexport 
HADOOP_IDENT_STRING=$USER\n\n# The scheduling priority for daemon processes.  
See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Use libraries from stan
 dard classpath\nJAVA_JDBC_LIBS=\"\"\n#Add libraries required by mysql 
connector\nfor jarFile in `ls /usr/share/java/*mysql* 2>/dev/null`\ndo\n  
JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by 
oracle connector\nfor jarFile in `ls /usr/share/java/*ojdbc* 2>/dev/null`\ndo\n 
 JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by 
nodemanager\nMAPREDUCE_LIBS={{mapreduce_libs_path}}\nexport 
HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}\n\n# 
Setting path to hdfs command line\nexport 
HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n#Mostly required for hadoop 
2.0\nexport 
JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64\n
    ", 
-            "hdfs_user": "hdfs", 
+            "hdfs_user": "hdfs",
+            "hdfs_principal_name": "hdfs",
             "user_group": "hadoop", 
             "dtnode_heapsize": "1024m", 
             "proxyuser_group": "users", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py 
b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
index f7631dd..eefb6b9 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
+++ b/ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py
@@ -86,7 +86,8 @@ class TestServiceCheck(RMFTestCase):
                         conf_dir = '/etc/hadoop/conf',
                         keytab='/etc/security/keytabs/hdfs.headless.keytab',
                         kinit_path_local='/usr/bin/kinit',
-                        security_enabled=True
+                        security_enabled=True,
+                        principal='hdfs'
     )
     self.assertResourceCalled('Execute', '/usr/bin/kinit -kt 
/etc/security/keytabs/smokeuser.headless.keytab ambari-qa;  /tmp/hcatSmoke.sh 
hcatsmoke cleanup',
                         logoutput = True,

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
index 44a2b69..f9bfa98 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
@@ -476,8 +476,9 @@
             "namenode_opt_newsize": "200m", 
             "kerberos_domain": "EXAMPLE.COM", 
             "content": "\n# Set Hadoop-specific environment variables 
here.\n\n# The only required environment variable is JAVA_HOME.  All others 
are\n# optional.  When running a distributed configuration it is best to\n# set 
JAVA_HOME in this file, so that it is correctly defined on\n# remote 
nodes.\n\n# The java implementation to use.  Required.\nexport 
JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home 
directory\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n# Hadoop 
Configuration Directory\n#TODO: if env var set that can cause problems\nexport 
HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n{# this is different 
for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport 
JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. 
Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport 
HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java 
runtime options.  Empty by defaul
 t.\nexport HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# 
Command specific options appended to HADOOP_OPTS when specified\nexport 
HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc 
-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps 
-Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT 
${HADOOP_NAMENODE_OPTS}\"\nHADOOP_JOBTRACKER_OPTS=\"-server 
-XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc 
-XX:+PrintGCDetails -XX:+PrintGCTime
 Stamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT 
-Dhadoop.mapreduce.jobsummary.logger=INFO,JSA 
${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server 
-Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console 
-Dmapred.audit.logger=ERROR,console 
${HADOOP_TASKTRACKER_OPTS}\"\nHADOOP_DATANODE_OPTS=\"-Xmx{{dtnode_heapsize}} 
-Dhadoop.security.logger=ERROR,DRFAS 
${HADOOP_DATANODE_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server 
-Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\nexport 
HADOOP_SECONDARYNAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 
-XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc 
-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps 
${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize
 }} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT 
${HADOOP_SECONDARYNAMENODE_OPTS}\"\n\n# The following applies to multiple 
commands (fs, dfs, fsck, distcp etc)\nexport 
HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n# On secure 
datanodes, user to run the datanode as after dropping privileges\nexport 
HADOOP_SECURE_DN_USER={{hdfs_user}}\n\n# Extra ssh options.  Empty by 
default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o 
SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored.  $HADOOP_HOME/logs 
by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History 
server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# 
Where log files are stored in the secure data environment.\nexport 
HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# 
File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.\n# export 
HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where ha
 doop code should be rsync'd from.  Unset by default.\n# export 
HADOOP_MASTER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave 
commands.  Unset by default.  This\n# can be useful in large clusters, where, 
e.g., slave rsyncs can\n# otherwise arrive faster than the master can service 
them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are 
stored. /tmp by default.\nexport 
HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport 
HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# 
History server pid\nexport 
HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n#
 A string representing this instance of hadoop. $USER by default.\nexport 
HADOOP_IDENT_STRING=$USER\n\n# The scheduling priority for daemon processes.  
See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Use libraries from standard 
classpath\nJAVA_JDBC_LIBS=\"\"\n#Add lib
 raries required by mysql connector\nfor jarFile in `ls /usr/share/java/*mysql* 
2>/dev/null`\ndo\n  JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add 
libraries required by oracle connector\nfor jarFile in `ls 
/usr/share/java/*ojdbc* 2>/dev/null`\ndo\n  
JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by 
nodemanager\nMAPREDUCE_LIBS={{mapreduce_libs_path}}\nexport 
HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}\n\nif [ 
-d \"/usr/lib/tez\" ]; then\n  export 
HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/tez/*:/usr/lib/tez/lib/*:/etc/tez/conf\nfi\n\n#
 Setting path to hdfs command line\nexport 
HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n#Mostly required for hadoop 
2.0\nexport 
JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64",
 
-            "hdfs_user": "hdfs", 
-            "user_group": "hadoop", 
+            "hdfs_user": "hdfs",
+            "hdfs_principal_name": "hdfs",
+            "user_group": "hadoop",
             "dtnode_heapsize": "1024m", 
             "proxyuser_group": "users", 
             "smokeuser": "ambari-qa", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/66ce3ed8/ambari-server/src/test/python/stacks/2.1/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
index b8b840a..3e22de8 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
@@ -521,8 +521,9 @@
             "namenode_opt_newsize": "200m", 
             "kerberos_domain": "EXAMPLE.COM", 
             "content": "\n# Set Hadoop-specific environment variables 
here.\n\n# The only required environment variable is JAVA_HOME.  All others 
are\n# optional.  When running a distributed configuration it is best to\n# set 
JAVA_HOME in this file, so that it is correctly defined on\n# remote 
nodes.\n\n# The java implementation to use.  Required.\nexport 
JAVA_HOME={{java_home}}\nexport HADOOP_HOME_WARN_SUPPRESS=1\n\n# Hadoop home 
directory\nexport HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n# Hadoop 
Configuration Directory\n#TODO: if env var set that can cause problems\nexport 
HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-{{hadoop_conf_dir}}}\n\n{# this is different 
for HDP1 #}\n# Path to jsvc required by secure HDP 2.0 datanode\nexport 
JSVC_HOME={{jsvc_path}}\n\n\n# The maximum amount of heap to use, in MB. 
Default is 1000.\nexport HADOOP_HEAPSIZE=\"{{hadoop_heapsize}}\"\n\nexport 
HADOOP_NAMENODE_INIT_HEAPSIZE=\"-Xms{{namenode_heapsize}}\"\n\n# Extra Java 
runtime options.  Empty by defaul
 t.\nexport HADOOP_OPTS=\"-Djava.net.preferIPv4Stack=true ${HADOOP_OPTS}\"\n\n# 
Command specific options appended to HADOOP_OPTS when specified\nexport 
HADOOP_NAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc 
-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps 
-Xms{{namenode_heapsize}} -Xmx{{namenode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT 
${HADOOP_NAMENODE_OPTS}\"\nHADOOP_JOBTRACKER_OPTS=\"-server 
-XX:ParallelGCThreads=8 -XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{jtnode_opt_newsize}} -XX:MaxNewSize={{jtnode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc 
-XX:+PrintGCDetails -XX:+PrintGCTime
 Stamps -XX:+PrintGCDateStamps -Xmx{{jtnode_heapsize}} 
-Dhadoop.security.logger=INFO,DRFAS -Dmapred.audit.logger=INFO,MRAUDIT 
-Dhadoop.mapreduce.jobsummary.logger=INFO,JSA 
${HADOOP_JOBTRACKER_OPTS}\"\n\nHADOOP_TASKTRACKER_OPTS=\"-server 
-Xmx{{ttnode_heapsize}} -Dhadoop.security.logger=ERROR,console 
-Dmapred.audit.logger=ERROR,console 
${HADOOP_TASKTRACKER_OPTS}\"\nHADOOP_DATANODE_OPTS=\"-Xmx{{dtnode_heapsize}} 
-Dhadoop.security.logger=ERROR,DRFAS 
${HADOOP_DATANODE_OPTS}\"\nHADOOP_BALANCER_OPTS=\"-server 
-Xmx{{hadoop_heapsize}}m ${HADOOP_BALANCER_OPTS}\"\n\nexport 
HADOOP_SECONDARYNAMENODE_OPTS=\"-server -XX:ParallelGCThreads=8 
-XX:+UseConcMarkSweepGC 
-XX:ErrorFile={{hdfs_log_dir_prefix}}/$USER/hs_err_pid%p.log 
-XX:NewSize={{namenode_opt_newsize}} -XX:MaxNewSize={{namenode_opt_maxnewsize}} 
-Xloggc:{{hdfs_log_dir_prefix}}/$USER/gc.log-`date +'%Y%m%d%H%M'` -verbose:gc 
-XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps 
${HADOOP_NAMENODE_INIT_HEAPSIZE} -Xmx{{namenode_heapsize
 }} -Dhadoop.security.logger=INFO,DRFAS -Dhdfs.audit.logger=INFO,DRFAAUDIT 
${HADOOP_SECONDARYNAMENODE_OPTS}\"\n\n# The following applies to multiple 
commands (fs, dfs, fsck, distcp etc)\nexport 
HADOOP_CLIENT_OPTS=\"-Xmx${HADOOP_HEAPSIZE}m $HADOOP_CLIENT_OPTS\"\n# On secure 
datanodes, user to run the datanode as after dropping privileges\nexport 
HADOOP_SECURE_DN_USER={{hdfs_user}}\n\n# Extra ssh options.  Empty by 
default.\nexport HADOOP_SSH_OPTS=\"-o ConnectTimeout=5 -o 
SendEnv=HADOOP_CONF_DIR\"\n\n# Where log files are stored.  $HADOOP_HOME/logs 
by default.\nexport HADOOP_LOG_DIR={{hdfs_log_dir_prefix}}/$USER\n\n# History 
server logs\nexport HADOOP_MAPRED_LOG_DIR={{mapred_log_dir_prefix}}/$USER\n\n# 
Where log files are stored in the secure data environment.\nexport 
HADOOP_SECURE_DN_LOG_DIR={{hdfs_log_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# 
File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.\n# export 
HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves\n\n# host:path where ha
 doop code should be rsync'd from.  Unset by default.\n# export 
HADOOP_MASTER=master:/home/$USER/src/hadoop\n\n# Seconds to sleep between slave 
commands.  Unset by default.  This\n# can be useful in large clusters, where, 
e.g., slave rsyncs can\n# otherwise arrive faster than the master can service 
them.\n# export HADOOP_SLAVE_SLEEP=0.1\n\n# The directory where pid files are 
stored. /tmp by default.\nexport 
HADOOP_PID_DIR={{hadoop_pid_dir_prefix}}/$USER\nexport 
HADOOP_SECURE_DN_PID_DIR={{hadoop_pid_dir_prefix}}/$HADOOP_SECURE_DN_USER\n\n# 
History server pid\nexport 
HADOOP_MAPRED_PID_DIR={{mapred_pid_dir_prefix}}/$USER\n\nYARN_RESOURCEMANAGER_OPTS=\"-Dyarn.server.resourcemanager.appsummary.logger=INFO,RMSUMMARY\"\n\n#
 A string representing this instance of hadoop. $USER by default.\nexport 
HADOOP_IDENT_STRING=$USER\n\n# The scheduling priority for daemon processes.  
See 'man nice'.\n\n# export HADOOP_NICENESS=10\n\n# Use libraries from standard 
classpath\nJAVA_JDBC_LIBS=\"\"\n#Add lib
 raries required by mysql connector\nfor jarFile in `ls /usr/share/java/*mysql* 
2>/dev/null`\ndo\n  JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add 
libraries required by oracle connector\nfor jarFile in `ls 
/usr/share/java/*ojdbc* 2>/dev/null`\ndo\n  
JAVA_JDBC_LIBS=${JAVA_JDBC_LIBS}:$jarFile\ndone\n#Add libraries required by 
nodemanager\nMAPREDUCE_LIBS={{mapreduce_libs_path}}\nexport 
HADOOP_CLASSPATH=${HADOOP_CLASSPATH}${JAVA_JDBC_LIBS}:${MAPREDUCE_LIBS}\n\nif [ 
-d \"/usr/lib/tez\" ]; then\n  export 
HADOOP_CLASSPATH=$HADOOP_CLASSPATH:/usr/lib/tez/*:/usr/lib/tez/lib/*:/etc/tez/conf\nfi\n\n#
 Setting path to hdfs command line\nexport 
HADOOP_LIBEXEC_DIR={{hadoop_libexec_dir}}\n\n#Mostly required for hadoop 
2.0\nexport 
JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:/usr/lib/hadoop/lib/native/Linux-amd64-64",
 
-            "hdfs_user": "hdfs", 
-            "user_group": "hadoop", 
+            "hdfs_user": "hdfs",
+            "hdfs_principal_name": "hdfs",
+            "user_group": "hadoop",
             "dtnode_heapsize": "1024m", 
             "proxyuser_group": "users", 
             "smokeuser": "ambari-qa", 

Reply via email to