This is an automated email from the ASF dual-hosted git repository.
stoty pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix-queryserver.git
The following commit(s) were added to refs/heads/master by this push:
new 87cebd3 PHOENIX-5907 Remove unused part from phoenix_utils.py
87cebd3 is described below
commit 87cebd3a35e648bf562e254f194856e87c0cec8e
Author: Guanghao Zhang <[email protected]>
AuthorDate: Wed May 20 10:16:19 2020 +0800
PHOENIX-5907 Remove unused part from phoenix_utils.py
Closes #35
---
bin/phoenix_utils.py | 109 +++++++++++----------------------------------------
bin/queryserver.py | 14 +++----
2 files changed, 29 insertions(+), 94 deletions(-)
diff --git a/bin/phoenix_utils.py b/bin/phoenix_utils.py
index b03a8b4..9eb94dd 100755
--- a/bin/phoenix_utils.py
+++ b/bin/phoenix_utils.py
@@ -80,9 +80,6 @@ def setPath():
PHOENIX_THIN_CLIENT_JAR_PATTERN = "phoenix-*-thin-client.jar"
PHOENIX_QUERYSERVER_JAR_PATTERN = "phoenix-*-queryserver.jar"
PHOENIX_LOADBALANCER_JAR_PATTERN =
"load-balancer-*[!t][!e][!s][!t][!s].jar"
- PHOENIX_TRACESERVER_JAR_PATTERN = "phoenix-tracing-webapp-*-runnable.jar"
- PHOENIX_TESTS_JAR_PATTERN = "phoenix-core-*-tests*.jar"
- PHOENIX_PHERF_JAR_PATTERN = "phoenix-pherf-*-minimal*.jar"
SQLLINE_WITH_DEPS_PATTERN = "sqlline-*-jar-with-dependencies.jar"
# Backward support old env variable PHOENIX_LIB_DIR replaced by
PHOENIX_CLASS_PATH
@@ -107,66 +104,27 @@ def setPath():
global hbase_conf_path # keep conf_path around for backward compatibility
hbase_conf_path = hbase_conf_dir
+ global hadoop_conf_dir
+ hadoop_conf_dir = os.getenv('HADOOP_CONF_DIR', None)
+ if not hadoop_conf_dir:
+ if os.name == 'posix':
+ # Try to provide a sane configuration directory for Hadoop if not
otherwise provided.
+ # If there's no jaas file specified by the caller, this is
necessary when Kerberos is enabled.
+ hadoop_conf_dir = '/etc/hadoop/conf'
+ else:
+ # Try to provide something valid..
+ hadoop_conf_dir = '.'
+
global current_dir
current_dir = os.path.dirname(os.path.abspath(__file__))
global phoenix_queryserver_classpath
phoenix_queryserver_classpath = os.path.join(current_dir, "../lib/*")
- global pherf_conf_path
- pherf_conf_path = os.path.join(current_dir, "config")
- pherf_properties_file = find("pherf.properties", pherf_conf_path)
- if pherf_properties_file == "":
- pherf_conf_path = os.path.join(current_dir, "..", "phoenix-pherf",
"config")
-
- global phoenix_jar_path
- phoenix_jar_path = os.path.join(current_dir, "..", "phoenix-client",
"target","*")
-
global phoenix_client_jar
- phoenix_client_jar = find("phoenix-*[!n]-client.jar", phoenix_jar_path)
+ phoenix_client_jar = find(PHOENIX_CLIENT_JAR_PATTERN, phoenix_class_path)
if phoenix_client_jar == "":
phoenix_client_jar =
findFileInPathWithoutRecursion(PHOENIX_CLIENT_JAR_PATTERN,
os.path.join(current_dir, ".."))
- if phoenix_client_jar == "":
- phoenix_client_jar = find(PHOENIX_CLIENT_JAR_PATTERN,
phoenix_class_path)
-
- global phoenix_test_jar_path
- phoenix_test_jar_path = os.path.join(current_dir, "..", "phoenix-core",
"target","*")
-
- global hadoop_conf
- hadoop_conf = os.getenv('HADOOP_CONF_DIR', None)
- if not hadoop_conf:
- if os.name == 'posix':
- # Try to provide a sane configuration directory for Hadoop if not
otherwise provided.
- # If there's no jaas file specified by the caller, this is
necessary when Kerberos is enabled.
- hadoop_conf = '/etc/hadoop/conf'
- else:
- # Try to provide something valid..
- hadoop_conf = '.'
-
- global hadoop_classpath
- if (os.name != 'nt'):
- hadoop_classpath = findClasspath('hadoop').rstrip()
- else:
- hadoop_classpath = os.getenv('HADOOP_CLASSPATH', '').rstrip()
-
- global hadoop_common_jar_path
- hadoop_common_jar_path = os.path.join(current_dir, "..", "phoenix-client",
"target","*").rstrip()
-
- global hadoop_common_jar
- hadoop_common_jar = find("hadoop-common*.jar", hadoop_common_jar_path)
-
- global hadoop_hdfs_jar_path
- hadoop_hdfs_jar_path = os.path.join(current_dir, "..", "phoenix-client",
"target","*").rstrip()
-
- global hadoop_hdfs_jar
- hadoop_hdfs_jar = find("hadoop-hdfs*.jar", hadoop_hdfs_jar_path)
-
- global testjar
- testjar = find(PHOENIX_TESTS_JAR_PATTERN, phoenix_test_jar_path)
- if testjar == "":
- testjar = findFileInPathWithoutRecursion(PHOENIX_TESTS_JAR_PATTERN,
os.path.join(current_dir, "..", 'lib'))
- if testjar == "":
- testjar = find(PHOENIX_TESTS_JAR_PATTERN, phoenix_class_path)
global phoenix_queryserver_jar
phoenix_queryserver_jar = find(PHOENIX_QUERYSERVER_JAR_PATTERN,
os.path.join(current_dir, "..", "queryserver", "target", "*"))
@@ -182,20 +140,6 @@ def setPath():
if phoenix_loadbalancer_jar == "":
phoenix_loadbalancer_jar =
findFileInPathWithoutRecursion(PHOENIX_LOADBALANCER_JAR_PATTERN,
os.path.join(current_dir, ".."))
- global phoenix_traceserver_jar
- phoenix_traceserver_jar = find(PHOENIX_TRACESERVER_JAR_PATTERN,
os.path.join(current_dir, "..", "phoenix-tracing-webapp", "target", "*"))
- if phoenix_traceserver_jar == "":
- phoenix_traceserver_jar =
findFileInPathWithoutRecursion(PHOENIX_TRACESERVER_JAR_PATTERN,
os.path.join(current_dir, "..", "lib"))
- if phoenix_traceserver_jar == "":
- phoenix_traceserver_jar =
findFileInPathWithoutRecursion(PHOENIX_TRACESERVER_JAR_PATTERN,
os.path.join(current_dir, ".."))
-
- global phoenix_pherf_jar
- phoenix_pherf_jar = find(PHOENIX_PHERF_JAR_PATTERN,
os.path.join(current_dir, "..", "phoenix-pherf", "target", "*"))
- if phoenix_pherf_jar == "":
- phoenix_pherf_jar =
findFileInPathWithoutRecursion(PHOENIX_PHERF_JAR_PATTERN,
os.path.join(current_dir, "..", "lib"))
- if phoenix_pherf_jar == "":
- phoenix_pherf_jar =
findFileInPathWithoutRecursion(PHOENIX_PHERF_JAR_PATTERN,
os.path.join(current_dir, ".."))
-
global phoenix_thin_client_jar
phoenix_thin_client_jar = find(PHOENIX_THIN_CLIENT_JAR_PATTERN,
os.path.join(current_dir, "..", "queryserver-client", "target", "*"))
if phoenix_thin_client_jar == "":
@@ -228,21 +172,14 @@ def common_sqlline_args(parser):
if __name__ == "__main__":
setPath()
- print("phoenix_class_path:", phoenix_class_path)
- print("hbase_conf_dir:", hbase_conf_dir)
- print("hbase_conf_path:", hbase_conf_path)
- print("current_dir:", current_dir)
- print("phoenix_jar_path:", phoenix_jar_path)
- print("phoenix_client_jar:", phoenix_client_jar)
- print("phoenix_test_jar_path:", phoenix_test_jar_path)
- print("hadoop_common_jar_path:", hadoop_common_jar_path)
- print("hadoop_common_jar:", hadoop_common_jar)
- print("hadoop_hdfs_jar_path:", hadoop_hdfs_jar_path)
- print("hadoop_hdfs_jar:", hadoop_hdfs_jar)
- print("testjar:", testjar)
- print("phoenix_queryserver_jar:", phoenix_queryserver_jar)
- print("phoenix_loadbalancer_jar:", phoenix_loadbalancer_jar)
- print("phoenix_queryserver_classpath", phoenix_queryserver_classpath)
- print("phoenix_thin_client_jar:", phoenix_thin_client_jar)
- print("hadoop_classpath:", hadoop_classpath)
- print("sqlline_with_deps_jar", sqlline_with_deps_jar)
+ print "phoenix_class_path:", phoenix_class_path
+ print "hbase_conf_dir:", hbase_conf_dir
+ print "hbase_conf_path:", hbase_conf_path
+ print "hadoop_conf_dir:", hadoop_conf_dir
+ print "current_dir:", current_dir
+ print "phoenix_client_jar:", phoenix_client_jar
+ print "phoenix_queryserver_jar:", phoenix_queryserver_jar
+ print "phoenix_loadbalancer_jar:", phoenix_loadbalancer_jar
+ print "phoenix_queryserver_classpath", phoenix_queryserver_classpath
+ print "phoenix_thin_client_jar:", phoenix_thin_client_jar
+ print "sqlline_with_deps_jar", sqlline_with_deps_jar
diff --git a/bin/queryserver.py b/bin/queryserver.py
index 64f666f..b7cdd28 100755
--- a/bin/queryserver.py
+++ b/bin/queryserver.py
@@ -73,9 +73,8 @@ else:
# HBase configuration folder path (where hbase-site.xml reside) for
# HBase/Phoenix client side property override
-hbase_config_path = phoenix_utils.hbase_conf_dir
-hadoop_config_path = phoenix_utils.hadoop_conf
-hadoop_classpath = phoenix_utils.hadoop_classpath
+hbase_conf_dir = phoenix_utils.hbase_conf_dir
+hadoop_conf_dir = phoenix_utils.hadoop_conf_dir
# TODO: add windows support
phoenix_file_basename = 'phoenix-%s-queryserver' % getpass.getuser()
@@ -87,10 +86,10 @@ phoenix_pid_file = '%s.pid' % phoenix_file_basename
hbase_env_path = None
hbase_env_cmd = None
if os.name == 'posix':
- hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.sh')
+ hbase_env_path = os.path.join(hbase_conf_dir, 'hbase-env.sh')
hbase_env_cmd = ['bash', '-c', 'source %s && env' % hbase_env_path]
elif os.name == 'nt':
- hbase_env_path = os.path.join(hbase_config_path, 'hbase-env.cmd')
+ hbase_env_path = os.path.join(hbase_conf_dir, 'hbase-env.cmd')
hbase_env_cmd = ['cmd.exe', '/c', 'call %s & set' % hbase_env_path]
if not hbase_env_path or not hbase_env_cmd:
sys.stderr.write("hbase-env file unknown on platform {}{}".format(os.name,
os.linesep))
@@ -121,12 +120,11 @@ out_file_path = os.path.join(log_dir, phoenix_out_file)
# " -XX:+UnlockCommercialFeatures -XX:+FlightRecorder
-XX:FlightRecorderOptions=defaultrecording=true,dumponexit=true" + \
# The command is run through subprocess so environment variables are
automatically inherited
-java_cmd = '%(java)s -cp ' + hbase_config_path + os.pathsep +
hadoop_config_path + os.pathsep + \
+java_cmd = '%(java)s -cp ' + hbase_conf_dir + os.pathsep + hadoop_conf_dir +
os.pathsep + \
phoenix_utils.phoenix_client_jar + os.pathsep + \
phoenix_utils.phoenix_loadbalancer_jar + os.pathsep + \
phoenix_utils.phoenix_queryserver_jar + os.pathsep + \
- phoenix_utils.phoenix_queryserver_classpath + os.pathsep + \
- hadoop_classpath + \
+ phoenix_utils.phoenix_queryserver_classpath + \
" -Dproc_phoenixserver" + \
" -Dlog4j.configuration=file:" + os.path.join(phoenix_utils.current_dir,
"log4j.properties") + \
" -Dpsql.root.logger=%(root_logger)s" + \