AMBARI-6955. Allow Customers To Define The Sqoop User (dlysnichenko)

Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/65417f71
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/65417f71
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/65417f71

Branch: refs/heads/branch-alerts-dev
Commit: 65417f715c39f36a5d24d7bcf6360af0bcc94fb0
Parents: 0ea954d
Author: Lisnichenko Dmitro <dlysniche...@hortonworks.com>
Authored: Thu Aug 21 20:42:48 2014 +0300
Committer: Lisnichenko Dmitro <dlysniche...@hortonworks.com>
Committed: Thu Aug 21 20:44:12 2014 +0300

----------------------------------------------------------------------
 .../HDP/1.3.2/hooks/before-INSTALL/scripts/params.py  |  4 +++-
 .../before-INSTALL/scripts/shared_initialization.py   |  7 +++++++
 .../1.3.2/services/SQOOP/configuration/sqoop-env.xml  |  5 +++++
 .../1.3.2/services/SQOOP/package/scripts/params.py    |  2 +-
 .../HDP/2.0.6/hooks/before-INSTALL/scripts/params.py  |  2 ++
 .../before-INSTALL/scripts/shared_initialization.py   |  9 ++++++++-
 .../2.0.6/services/SQOOP/configuration/sqoop-env.xml  |  6 +++++-
 .../2.0.6/services/SQOOP/package/scripts/params.py    |  2 +-
 .../HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py |  2 +-
 .../stacks/1.3.2/configs/default.hbasedecom.json      |  3 ++-
 .../src/test/python/stacks/1.3.2/configs/default.json |  3 ++-
 .../stacks/1.3.2/configs/default.non_gmetad_host.json |  3 ++-
 .../src/test/python/stacks/1.3.2/configs/secured.json |  3 ++-
 .../1.3.2/hooks/before-INSTALL/test_before_install.py |  5 +++++
 .../src/test/python/stacks/2.0.6/configs/default.json |  3 ++-
 .../stacks/2.0.6/configs/default.non_gmetad_host.json |  3 ++-
 .../test/python/stacks/2.0.6/configs/ha_default.json  |  5 +++--
 .../test/python/stacks/2.0.6/configs/ha_secured.json  |  5 +++--
 .../src/test/python/stacks/2.0.6/configs/secured.json |  3 ++-
 .../2.0.6/hooks/before-INSTALL/test_before_install.py |  5 +++++
 .../src/test/python/stacks/2.1/configs/default.json   |  3 ++-
 .../src/test/python/stacks/2.1/configs/secured.json   |  3 ++-
 ambari-web/app/data/HDP2/site_properties.js           | 14 ++++++++++++++
 ambari-web/app/data/site_properties.js                | 13 +++++++++++++
 24 files changed, 94 insertions(+), 19 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
index 723cd3d..ce8f34b 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/params.py
@@ -80,6 +80,7 @@ hdfs_user = 
config['configurations']['hadoop-env']['hdfs_user']
 zk_user = config['configurations']['zookeeper-env']['zk_user']
 gmetad_user = config['configurations']['ganglia-env']["gmetad_user"]
 gmond_user = config['configurations']['ganglia-env']["gmond_user"]
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
@@ -103,6 +104,7 @@ namenode_host = default("/clusterHostInfo/namenode_host", 
[])
 zk_hosts = default("/clusterHostInfo/zookeeper_hosts", [])
 ganglia_server_hosts = default("/clusterHostInfo/ganglia_server_host", [])
 
+has_sqoop_client = 'sqoop-env' in config['configurations']
 has_resourcemanager = not len(rm_host) == 0
 has_namenode = not len(namenode_host) == 0
 has_jt = not len(jtnode_host) == 0
@@ -130,4 +132,4 @@ ignore_groupsusers_create = 
default("/configurations/hadoop-env/ignore_groupsuse
 
 #repo params
 repo_info = config['hostLevelParams']['repo_info']
-service_repo_info = default("/hostLevelParams/service_repo_info",None)
\ No newline at end of file
+service_repo_info = default("/hostLevelParams/service_repo_info",None)

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
index 2e8c8cd..f556335 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/hooks/before-INSTALL/scripts/shared_initialization.py
@@ -124,6 +124,13 @@ def setup_users():
          ignore_failures = params.ignore_groupsusers_create
     )
 
+  if params.has_sqoop_client:
+    User(params.sqoop_user,
+         gid=params.user_group,
+         groups=[params.user_group],
+         ignore_failures=params.ignore_groupsusers_create
+    )
+
 def set_uid(user, user_dirs):
   """
   user_dirs - comma separated directories

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
index 3731f8b..f1753c6 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/configuration/sqoop-env.xml
@@ -45,5 +45,10 @@ export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
 export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}"
     </value>
   </property>
+  <property>
+    <name>sqoop_user</name>
+    <description>User to run Sqoop as</description>
+    <value>sqoop</value>
+  </property>
   
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
index 4384171..c76a95c 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/1.3.2/services/SQOOP/package/scripts/params.py
@@ -31,7 +31,7 @@ hbase_home = "/usr"
 hive_home = "/usr"
 zoo_conf_dir = "/etc/zookeeper"
 sqoop_lib = "/usr/lib/sqoop/lib"
-sqoop_user = "sqoop"
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 keytab_path = config['configurations']['hadoop-env']['keytab_path']
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
index f769b3f..bb4ee71 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/params.py
@@ -41,6 +41,7 @@ gmond_user = 
config['configurations']['ganglia-env']["gmond_user"]
 storm_user = config['configurations']['storm-env']['storm_user']
 tez_user = config['configurations']['tez-env']['tez_user']
 falcon_user = config['configurations']['falcon-env']['falcon_user']
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 user_group = config['configurations']['hadoop-env']['user_group']
 proxyuser_group =  config['configurations']['hadoop-env']['proxyuser_group']
@@ -66,6 +67,7 @@ ganglia_server_hosts = 
default("/clusterHostInfo/ganglia_server_host", [])
 storm_server_hosts = default("/clusterHostInfo/nimbus_hosts", [])
 falcon_host =  default('/clusterHostInfo/falcon_server_hosts', [])
 
+has_sqoop_client = 'sqoop-env' in config['configurations']
 has_namenode = not len(namenode_host) == 0
 has_hs = not len(hs_host) == 0
 has_resourcemanager = not len(rm_host) == 0

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
index 8da05ed..0542a3e 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/hooks/before-INSTALL/scripts/shared_initialization.py
@@ -144,13 +144,20 @@ def setup_users():
          ignore_failures = params.ignore_groupsusers_create
     )
     
-  if params.has_tez:  
+  if params.has_tez:
     User(params.tez_user,
       gid=params.user_group,
       groups=[params.proxyuser_group],
       ignore_failures = params.ignore_groupsusers_create
     )
 
+  if params.has_sqoop_client:
+    User(params.sqoop_user,
+         gid=params.user_group,
+         groups=[params.user_group],
+         ignore_failures=params.ignore_groupsusers_create
+    )
+
 def set_uid(user, user_dirs):
   """
   user_dirs - comma separated directories

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml
index 3731f8b..d1d56ff 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/configuration/sqoop-env.xml
@@ -45,5 +45,9 @@ export ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}
 export SQOOP_USER_CLASSPATH="`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}"
     </value>
   </property>
-  
+  <property>
+    <name>sqoop_user</name>
+    <description>User to run Sqoop as</description>
+    <value>sqoop</value>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
index 6c307d3..3f50bef 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/params.py
@@ -32,7 +32,7 @@ hbase_home = "/usr"
 hive_home = "/usr"
 zoo_conf_dir = "/etc/zookeeper"
 sqoop_lib = "/usr/lib/sqoop/lib"
-sqoop_user = "sqoop"
+sqoop_user = config['configurations']['sqoop-env']['sqoop_user']
 
 keytab_path = config['configurations']['hadoop-env']['keytab_path']
 smoke_user_keytab = config['configurations']['hadoop-env']['smokeuser_keytab']

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py
 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py
index 007c031..f0bd354 100644
--- 
a/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py
+++ 
b/ambari-server/src/main/resources/stacks/HDP/2.0.6/services/SQOOP/package/scripts/sqoop.py
@@ -24,7 +24,7 @@ def sqoop(type=None):
   import params
   Link(params.sqoop_lib + "/mysql-connector-java.jar",
        to = '/usr/share/java/mysql-connector-java.jar'
-  )
+  ) 
   Directory(params.sqoop_conf_dir,
             owner = params.sqoop_user,
             group = params.user_group

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json 
b/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
index cc1154d..57d1137 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default.hbasedecom.json
@@ -296,7 +296,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"\n    "
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"\n    ",
+            "sqoop_user": "sqoop"
         }, 
         "mapred-env": {
             "mapreduce_userlog_retainhours": "24", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/default.json 
b/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
index 40e79c2..b4037be 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/default.json
@@ -296,7 +296,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"\n    "
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"\n    ",
+            "sqoop_user": "sqoop"
         }, 
         "mapred-env": {
             "mapreduce_userlog_retainhours": "24", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
 
b/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
index aab868f..19bafac 100644
--- 
a/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
+++ 
b/ambari-server/src/test/python/stacks/1.3.2/configs/default.non_gmetad_host.json
@@ -296,7 +296,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"\n    "
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"\n    ",
+            "sqoop_user": "sqoop"
         }, 
         "mapred-env": {
             "mapreduce_userlog_retainhours": "24", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json 
b/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
index 03447b3..c364841 100644
--- a/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/1.3.2/configs/secured.json
@@ -474,7 +474,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n    "
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"\n    "
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"\n    ",
+            "sqoop_user": "sqoop"
         }, 
         "mapred-env": {
             "mapreduce_userlog_retainhours": "24", 

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
 
b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
index 34f7b7e..653b47c 100644
--- 
a/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
+++ 
b/ambari-server/src/test/python/stacks/1.3.2/hooks/before-INSTALL/test_before_install.py
@@ -133,5 +133,10 @@ class TestHookBeforeInstall(RMFTestCase):
         gid = 'hadoop',
         ignore_failures = False,
     )
+    self.assertResourceCalled('User', 'sqoop',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = ['hadoop'],
+    )
     self.assertResourceCalled('Package', 'unzip',)
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
index f32f4bb..c8e8588 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/default.json
@@ -543,7 +543,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif 
[ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS 
-Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }, 
         "hdfs-log4j": {
             "content": "log4jproperties\nline2"

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json
 
b/ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json
index 319863a..af007cb 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/configs/default.non_gmetad_host.json
@@ -529,7 +529,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif 
[ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS 
-Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }, 
         "hdfs-log4j": {
             "content": "log4jproperties\nline2"

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
index 7adf696..769e13d 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/ha_default.json
@@ -485,7 +485,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif 
[ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS 
-Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }
     },
     "configuration_attributes": {
@@ -616,4 +617,4 @@
             "c6402.ambari.apache.org"
         ]
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json
index 6c61278..02900f6 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/ha_secured.json
@@ -510,7 +510,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif 
[ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS 
-Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }
     },
     "configuration_attributes": {
@@ -641,4 +642,4 @@
             "c6402.ambari.apache.org"
         ]
     }
-}
\ No newline at end of file
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
index ece713b..44a2b69 100644
--- a/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.0.6/configs/secured.json
@@ -572,7 +572,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif 
[ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS 
-Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         },
         "hdfs-log4j": {
             "content": "log4jproperties\nline2"

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
----------------------------------------------------------------------
diff --git 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
index 926a6c6..9dfcff2 100644
--- 
a/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
+++ 
b/ambari-server/src/test/python/stacks/2.0.6/hooks/before-INSTALL/test_before_install.py
@@ -154,4 +154,9 @@ class TestHookBeforeInstall(RMFTestCase):
         ignore_failures = False,
         groups = ['users'],
     )
+    self.assertResourceCalled('User', 'sqoop',
+        gid = 'hadoop',
+        ignore_failures = False,
+        groups = ['hadoop'],
+    )
     self.assertNoMoreResources()

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/2.1/configs/default.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/default.json 
b/ambari-server/src/test/python/stacks/2.1/configs/default.json
index ddfe340..10f3e4e 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/default.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/default.json
@@ -659,7 +659,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif 
[ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS 
-Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         },
         "hdfs-log4j": {
             "property1": "value1"

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-server/src/test/python/stacks/2.1/configs/secured.json
----------------------------------------------------------------------
diff --git a/ambari-server/src/test/python/stacks/2.1/configs/secured.json 
b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
index 335d66c..b8b840a 100644
--- a/ambari-server/src/test/python/stacks/2.1/configs/secured.json
+++ b/ambari-server/src/test/python/stacks/2.1/configs/secured.json
@@ -640,7 +640,8 @@
             "content": 
"\nJAVA_HOME={{java64_home}}\nHADOOP_HOME=${HADOOP_HOME:-{{hadoop_home}}}\n\nif 
[ -d \"/usr/lib/tez\" ]; then\n  PIG_OPTS=\"$PIG_OPTS 
-Dmapreduce.framework.name=yarn\"\nfi"
         }, 
         "sqoop-env": {
-            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\""
+            "content": "\n# Set Hadoop-specific environment variables 
here.\n\n#Set path to where bin/hadoop is available\n#Set path to where 
bin/hadoop is available\nexport 
HADOOP_HOME=${HADOOP_HOME:-/usr/lib/hadoop}\n\n#set the path to where bin/hbase 
is available\nexport HBASE_HOME=${HBASE_HOME:-/usr/lib/hbase}\n\n#Set the path 
to where bin/hive is available\nexport 
HIVE_HOME=${HIVE_HOME:-/usr/lib/hive}\n\n#Set the path for where zookeper 
config dir is\nexport ZOOCFGDIR=${ZOOCFGDIR:-/etc/zookeeper/conf}\n\n# add 
libthrift in hive to sqoop class path first so hive imports work\nexport 
SQOOP_USER_CLASSPATH=\"`ls ${HIVE_HOME}/lib/libthrift-*.jar 2> 
/dev/null`:${SQOOP_USER_CLASSPATH}\"",
+            "sqoop_user": "sqoop"
         }, 
         "hdfs-log4j": {
             "property1": "value1"

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-web/app/data/HDP2/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/HDP2/site_properties.js 
b/ambari-web/app/data/HDP2/site_properties.js
index 169840f..a4a7d6f 100644
--- a/ambari-web/app/data/HDP2/site_properties.js
+++ b/ambari-web/app/data/HDP2/site_properties.js
@@ -3599,6 +3599,20 @@ module.exports =
     },
     {
       "id": "puppet var",
+      "name": "sqoop_user",
+      "displayName": "Sqoop User",
+      "isReconfigurable": false,
+      "displayType": "user",
+      "isOverridable": false,
+      "isVisible": true,
+      "serviceName": "MISC",
+      "filename": "sqoop-env.xml",
+      "category": "Users and Groups",
+      "belongsToService": ["SQOOP"],
+      "index": 17
+    },
+    {
+      "id": "puppet var",
       "name": "rrdcached_base_dir",
       "displayName": "Ganglia rrdcached base directory",
       "description": "Default directory for saving the rrd files on ganglia 
server",

http://git-wip-us.apache.org/repos/asf/ambari/blob/65417f71/ambari-web/app/data/site_properties.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/data/site_properties.js 
b/ambari-web/app/data/site_properties.js
index 6f82134..af85b91 100644
--- a/ambari-web/app/data/site_properties.js
+++ b/ambari-web/app/data/site_properties.js
@@ -2254,6 +2254,19 @@ module.exports =
     },
     {
       "id": "puppet var",
+      "name": "sqoop_user",
+      "displayName": "Sqoop User",
+      "isReconfigurable": false,
+      "displayType": "user",
+      "isOverridable": false,
+      "isVisible": true,
+      "serviceName": "MISC",
+      "filename": "sqoop-env.xml",
+      "category": "Users and Groups",
+      "belongsToService": ["SQOOP"]
+    },
+    {
+      "id": "puppet var",
       "name": "mapred_user",
       "displayName": "MapReduce User",
       "description": "User to run MapReduce as",

Reply via email to