ambari git commit: AMBARI-14850: Removed unused functions from params.py in HAWQ (bhuvnesh2703 via jaoki)

2016-02-02 Thread jaoki
Repository: ambari
Updated Branches:
  refs/heads/branch-2.2 17afb926f -> 2551d9f08


AMBARI-14850: Removed unused functions from params.py in HAWQ (bhuvnesh2703 via 
jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/2551d9f0
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/2551d9f0
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/2551d9f0

Branch: refs/heads/branch-2.2
Commit: 2551d9f08702b44f70f9b35a32003bd9bb156ef8
Parents: 17afb92
Author: Jun Aoki 
Authored: Tue Feb 2 17:00:14 2016 -0800
Committer: Jun Aoki 
Committed: Tue Feb 2 17:00:14 2016 -0800

--
 .../HAWQ/2.0.0/package/scripts/params.py| 21 
 1 file changed, 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/2551d9f0/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index 604ddc0..add3c63 100644
--- 
a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -22,14 +22,7 @@ from hawq_constants import PXF_PORT, pxf_hdfs_test_dir
 from resource_management import Script
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.resources.execute_hadoop import 
ExecuteHadoop
 from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import conf_select
-try:
-from resource_management.libraries.functions import hdp_select as 
hadoop_select
-except ImportError:
-from resource_management.libraries.functions import phd_select as 
hadoop_select
-
 
 config = Script.get_config()
 
@@ -70,9 +63,6 @@ security_enabled = 
config['configurations']['cluster-env']['security_enabled']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hadoop_select.get_hadoop_dir("bin")
-execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)
 
 # HDFSResource partial function
@@ -85,17 +75,6 @@ HdfsResource = functools.partial(HdfsResource,
  hdfs_site=hdfs_site,
  default_fs=default_fs)
 
-# ExecuteHadoop partial function
-ExecuteHadoop = functools.partial(ExecuteHadoop,
-  user=hdfs_superuser,
-  logoutput=True,
-  conf_dir=hadoop_conf_dir,
-  security_enabled=security_enabled,
-  kinit_path_local=kinit_path_local,
-  keytab=hdfs_user_keytab,
-  principal=hdfs_principal_name,
-  bin_dir=execute_path)
-
 
 # For service Check
 is_pxf_installed = __get_component_host("pxf_hosts") is not None



[06/11] ambari git commit: AMBARI-14850: Removed unused functions from params.py in HAWQ (bhuvnesh2703 via jaoki)

2016-02-02 Thread ncole
AMBARI-14850: Removed unused functions from params.py in HAWQ (bhuvnesh2703 via 
jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6ce15655
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6ce15655
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6ce15655

Branch: refs/heads/branch-dev-patch-upgrade
Commit: 6ce156554600214339c7e8c35219abaf570346e9
Parents: 9a03489
Author: Jun Aoki 
Authored: Mon Feb 1 17:19:59 2016 -0800
Committer: Jun Aoki 
Committed: Mon Feb 1 17:19:59 2016 -0800

--
 .../HAWQ/2.0.0/package/scripts/params.py| 21 
 1 file changed, 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6ce15655/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index 604ddc0..add3c63 100644
--- 
a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -22,14 +22,7 @@ from hawq_constants import PXF_PORT, pxf_hdfs_test_dir
 from resource_management import Script
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.resources.execute_hadoop import 
ExecuteHadoop
 from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import conf_select
-try:
-from resource_management.libraries.functions import hdp_select as 
hadoop_select
-except ImportError:
-from resource_management.libraries.functions import phd_select as 
hadoop_select
-
 
 config = Script.get_config()
 
@@ -70,9 +63,6 @@ security_enabled = 
config['configurations']['cluster-env']['security_enabled']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hadoop_select.get_hadoop_dir("bin")
-execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)
 
 # HDFSResource partial function
@@ -85,17 +75,6 @@ HdfsResource = functools.partial(HdfsResource,
  hdfs_site=hdfs_site,
  default_fs=default_fs)
 
-# ExecuteHadoop partial function
-ExecuteHadoop = functools.partial(ExecuteHadoop,
-  user=hdfs_superuser,
-  logoutput=True,
-  conf_dir=hadoop_conf_dir,
-  security_enabled=security_enabled,
-  kinit_path_local=kinit_path_local,
-  keytab=hdfs_user_keytab,
-  principal=hdfs_principal_name,
-  bin_dir=execute_path)
-
 
 # For service Check
 is_pxf_installed = __get_component_host("pxf_hosts") is not None



ambari git commit: AMBARI-14850: Removed unused functions from params.py in HAWQ (bhuvnesh2703 via jaoki)

2016-02-01 Thread jaoki
Repository: ambari
Updated Branches:
  refs/heads/trunk 9a0348957 -> 6ce156554


AMBARI-14850: Removed unused functions from params.py in HAWQ (bhuvnesh2703 via 
jaoki)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6ce15655
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6ce15655
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6ce15655

Branch: refs/heads/trunk
Commit: 6ce156554600214339c7e8c35219abaf570346e9
Parents: 9a03489
Author: Jun Aoki 
Authored: Mon Feb 1 17:19:59 2016 -0800
Committer: Jun Aoki 
Committed: Mon Feb 1 17:19:59 2016 -0800

--
 .../HAWQ/2.0.0/package/scripts/params.py| 21 
 1 file changed, 21 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/ambari/blob/6ce15655/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
--
diff --git 
a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
 
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
index 604ddc0..add3c63 100644
--- 
a/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
+++ 
b/ambari-server/src/main/resources/common-services/HAWQ/2.0.0/package/scripts/params.py
@@ -22,14 +22,7 @@ from hawq_constants import PXF_PORT, pxf_hdfs_test_dir
 from resource_management import Script
 from resource_management.libraries.functions.default import default
 from resource_management.libraries.resources.hdfs_resource import HdfsResource
-from resource_management.libraries.resources.execute_hadoop import 
ExecuteHadoop
 from resource_management.libraries.functions import get_kinit_path
-from resource_management.libraries.functions import conf_select
-try:
-from resource_management.libraries.functions import hdp_select as 
hadoop_select
-except ImportError:
-from resource_management.libraries.functions import phd_select as 
hadoop_select
-
 
 config = Script.get_config()
 
@@ -70,9 +63,6 @@ security_enabled = 
config['configurations']['cluster-env']['security_enabled']
 hdfs_user_keytab = config['configurations']['hadoop-env']['hdfs_user_keytab']
 kinit_path_local = 
get_kinit_path(default('/configurations/kerberos-env/executable_search_paths', 
None))
 hdfs_principal_name = 
config['configurations']['hadoop-env']['hdfs_principal_name']
-hadoop_conf_dir = conf_select.get_hadoop_conf_dir()
-hadoop_bin_dir = hadoop_select.get_hadoop_dir("bin")
-execute_path = os.environ['PATH'] + os.pathsep + hadoop_bin_dir
 dfs_nameservice = default('/configurations/hdfs-site/dfs.nameservices', None)
 
 # HDFSResource partial function
@@ -85,17 +75,6 @@ HdfsResource = functools.partial(HdfsResource,
  hdfs_site=hdfs_site,
  default_fs=default_fs)
 
-# ExecuteHadoop partial function
-ExecuteHadoop = functools.partial(ExecuteHadoop,
-  user=hdfs_superuser,
-  logoutput=True,
-  conf_dir=hadoop_conf_dir,
-  security_enabled=security_enabled,
-  kinit_path_local=kinit_path_local,
-  keytab=hdfs_user_keytab,
-  principal=hdfs_principal_name,
-  bin_dir=execute_path)
-
 
 # For service Check
 is_pxf_installed = __get_component_host("pxf_hosts") is not None