This is an automated email from the ASF dual-hosted git repository.

stoty pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/phoenix-queryserver.git


The following commit(s) were added to refs/heads/master by this push:
     new 299f920  PHOENIX-5824 Add dependency:analyze to queryserver build
299f920 is described below

commit 299f92036d4fe2d5fdd39c90b683aba186a23f60
Author: Istvan Toth <[email protected]>
AuthorDate: Thu Mar 26 07:10:48 2020 +0100

    PHOENIX-5824 Add dependency:analyze to queryserver build
    
    also
    * update guava to 13.1 to match phoenix core
    * update jetty version to latest
    * replace commons-logging with SLF4j
    * copy canary related classes to PQS repo
---
 load-balancer/pom.xml                              |  88 ++++-
 .../phoenix/end2end/LoadBalancerEnd2EndIT.java     |   6 +-
 .../phoenix/loadbalancer/service/LoadBalancer.java |   6 +-
 .../queryserver/register/ZookeeperRegistry.java    |   7 +-
 pom.xml                                            | 195 ++++++++--
 queryserver-orchestrator/pom.xml                   |  67 +++-
 .../tool/ParameterizedPhoenixCanaryToolIT.java     | 281 ++++++++++++++
 .../org/apache/phoenix/tool/CanaryTestResult.java  |  86 +++++
 .../org/apache/phoenix/tool/PhoenixCanaryTool.java | 426 +++++++++++++++++++++
 .../apache/phoenix/tool/PhoenixCanaryToolTest.java |  89 +++++
 queryserver/pom.xml                                | 121 +++++-
 .../HttpParamImpersonationQueryServerIT.java       |   6 +-
 .../phoenix/end2end/QueryServerBasicsIT.java       |  13 +-
 .../phoenix/end2end/QueryServerEnvironment.java    |   6 +-
 .../phoenix/end2end/SecureQueryServerIT.java       |   6 +-
 .../java/org/apache/phoenix/end2end/TlsUtil.java   |   6 +-
 .../phoenix/queryserver/server/QueryServer.java    |   8 +-
 17 files changed, 1310 insertions(+), 107 deletions(-)

diff --git a/load-balancer/pom.xml b/load-balancer/pom.xml
index 97939a3..c279d15 100644
--- a/load-balancer/pom.xml
+++ b/load-balancer/pom.xml
@@ -33,27 +33,6 @@
   <name>Phoenix Load Balancer</name>
   <description>A Load balancer which routes calls to Phoenix Query 
Server</description>
 
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.hbase</groupId>
-      <artifactId>hbase-common</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.curator</groupId>
-      <artifactId>curator-client</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.phoenix</groupId>
-      <artifactId>queryserver</artifactId>
-    </dependency>
-    <!-- for tests -->
-    <dependency>
-      <groupId>org.apache.curator</groupId>
-      <artifactId>curator-test</artifactId>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-
   <build>
     <plugins>
       <plugin>
@@ -84,6 +63,18 @@
         </executions>
       </plugin>
       <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+          <configuration>
+            <!-- false positives -->
+            <ignoredUnusedDeclaredDependencies>
+              <ignoredUnusedDeclaredDependency>
+                org.slf4j:slf4j-api
+              </ignoredUnusedDeclaredDependency>
+            </ignoredUnusedDeclaredDependencies>
+          </configuration>
+      </plugin>
+      <plugin>
         <groupId>org.apache.rat</groupId>
         <artifactId>apache-rat-plugin</artifactId>
         <configuration>
@@ -96,4 +87,59 @@
     </plugins>
   </build>
 
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.phoenix</groupId>
+      <artifactId>queryserver</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-recipes</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-framework</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.calcite.avatica</groupId>
+      <artifactId>avatica</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+
+    <!-- for tests -->
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.curator</groupId>
+      <artifactId>curator-test</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
 </project>
diff --git 
a/load-balancer/src/it/java/org/apache/phoenix/end2end/LoadBalancerEnd2EndIT.java
 
b/load-balancer/src/it/java/org/apache/phoenix/end2end/LoadBalancerEnd2EndIT.java
index da454ca..448c28f 100644
--- 
a/load-balancer/src/it/java/org/apache/phoenix/end2end/LoadBalancerEnd2EndIT.java
+++ 
b/load-balancer/src/it/java/org/apache/phoenix/end2end/LoadBalancerEnd2EndIT.java
@@ -19,8 +19,6 @@
 package org.apache.phoenix.end2end;
 
 import com.google.common.net.HostAndPort;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.curator.CuratorZookeeperClient;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
@@ -34,6 +32,8 @@ import org.apache.phoenix.queryserver.register.Registry;
 import org.apache.phoenix.queryserver.register.ZookeeperRegistry;
 import org.apache.zookeeper.KeeperException;
 import org.junit.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Arrays;
 import java.util.List;
@@ -41,7 +41,7 @@ import java.util.List;
 public class LoadBalancerEnd2EndIT {
     private static TestingServer testingServer;
     private static CuratorFramework curatorFramework;
-    private static final Log LOG = 
LogFactory.getLog(LoadBalancerEnd2EndIT.class);
+    private static final Logger LOG = 
LoggerFactory.getLogger(LoadBalancerEnd2EndIT.class);
     private static final LoadBalanceZookeeperConf LOAD_BALANCER_CONFIGURATION 
= new LoadBalanceZookeeperConfImpl();
     private static  String path;
     private static LoadBalancer loadBalancer;
diff --git 
a/load-balancer/src/main/java/org/apache/phoenix/loadbalancer/service/LoadBalancer.java
 
b/load-balancer/src/main/java/org/apache/phoenix/loadbalancer/service/LoadBalancer.java
index 23e9025..9bcf2d4 100644
--- 
a/load-balancer/src/main/java/org/apache/phoenix/loadbalancer/service/LoadBalancer.java
+++ 
b/load-balancer/src/main/java/org/apache/phoenix/loadbalancer/service/LoadBalancer.java
@@ -21,8 +21,6 @@ package org.apache.phoenix.loadbalancer.service;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import com.google.common.net.HostAndPort;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.framework.api.UnhandledErrorListener;
@@ -32,6 +30,8 @@ import 
org.apache.curator.framework.state.ConnectionStateListener;
 import org.apache.curator.retry.ExponentialBackoffRetry;
 import org.apache.curator.utils.CloseableUtils;
 import org.apache.hadoop.hbase.HBaseConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.net.ConnectException;
 import java.util.ArrayList;
@@ -49,7 +49,7 @@ public class LoadBalancer {
 
     private static final LoadBalanceZookeeperConf CONFIG = new 
LoadBalanceZookeeperConfImpl(HBaseConfiguration.create());
     private static CuratorFramework curaFramework = null;
-    protected static final Log LOG = LogFactory.getLog(LoadBalancer.class);
+    protected static final Logger LOG = 
LoggerFactory.getLogger(LoadBalancer.class);
     private static PathChildrenCache   cache = null;
     private static final LoadBalancer loadBalancer = new LoadBalancer();
     private ConnectionStateListener connectionStateListener = null;
diff --git 
a/load-balancer/src/main/java/org/apache/phoenix/queryserver/register/ZookeeperRegistry.java
 
b/load-balancer/src/main/java/org/apache/phoenix/queryserver/register/ZookeeperRegistry.java
index 8aee177..7159e14 100644
--- 
a/load-balancer/src/main/java/org/apache/phoenix/queryserver/register/ZookeeperRegistry.java
+++ 
b/load-balancer/src/main/java/org/apache/phoenix/queryserver/register/ZookeeperRegistry.java
@@ -20,23 +20,22 @@ package org.apache.phoenix.queryserver.register;
 
 
 import com.google.common.net.HostAndPort;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.curator.framework.CuratorFramework;
 import org.apache.curator.framework.CuratorFrameworkFactory;
 import org.apache.curator.retry.ExponentialBackoffRetry;
 import org.apache.curator.utils.CloseableUtils;
 import org.apache.phoenix.loadbalancer.service.LoadBalanceZookeeperConf;
-import org.apache.phoenix.queryserver.server.QueryServer;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.nio.charset.StandardCharsets;
 
 
 public class ZookeeperRegistry implements Registry {
 
-    private static final Log LOG = LogFactory.getLog(ZookeeperRegistry.class);
+    private static final Logger LOG = 
LoggerFactory.getLogger(ZookeeperRegistry.class);
     private CuratorFramework client;
 
     public ZookeeperRegistry(){}
diff --git a/pom.xml b/pom.xml
index 08938b3..86ca1b5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -67,19 +67,27 @@
         <!-- General Properties -->
         <top.dir>${project.basedir}</top.dir>
 
-        <!-- Hadoop Versions -->
+        <phoenix.version>4.14.2-HBase-1.4</phoenix.version>
+
+        <!-- Hadoop/Hbase Versions -->
         <hbase.version>1.4.10</hbase.version>
         <hadoop-two.version>2.7.5</hadoop-two.version>
-        <curator.version>2.12.0</curator.version>
-        <phoenix.version>4.14.2-HBase-1.4</phoenix.version>
 
         <!-- Dependency versions -->
+        <zookeeper.version>3.4.10</zookeeper.version>
+        <curator.version>2.12.0</curator.version>
+
+        <gson.version>2.2.4</gson.version>
+        <guava.version>13.0.1</guava.version>
+        <jetty.version>9.4.27.v20200227</jetty.version>
+        <commons-io.version>2.5</commons-io.version>
+        <commons-collections.version>3.2.2</commons-collections.version>
         <sqlline.version>1.9.0</sqlline.version>
-        <commons-logging.version>1.2</commons-logging.version>
-        <!-- Do not change jodatime.version until HBASE-15199 is fixed -->
+        <slf4j.version>1.7.30</slf4j.version>
         <avatica.version>1.16.0</avatica.version>
         <servlet.api.version>3.1.0</servlet.api.version>
-        <!-- Test Dependencies -->
+
+        <!-- Test Dependency versions -->
         <mockito-all.version>1.8.5</mockito-all.version>
         <junit.version>4.12</junit.version>
 
@@ -146,6 +154,20 @@
                     </executions>
                 </plugin>
                 <plugin>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <executions>
+                        <execution>
+                            <id>enforce-dependencies</id>
+                            <goals>
+                                <goal>analyze-only</goal>
+                            </goals>
+                            <configuration>
+                                <failOnWarning>true</failOnWarning>
+                            </configuration>
+                        </execution>
+                    </executions>
+               </plugin>
+                <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-source-plugin</artifactId>
                     <executions>
@@ -162,7 +184,6 @@
                     <groupId>org.apache.maven.plugins</groupId>
                     <artifactId>maven-surefire-plugin</artifactId>
                     <configuration>
-                        <forkCount>${numForkedUT}</forkCount>
                         <reuseForks>true</reuseForks>
                         <argLine>-enableassertions -Xmx2250m 
-XX:MaxPermSize=128m
                             -Djava.security.egd=file:/dev/./urandom 
"-Djava.library.path=${hadoop.library.path}${path.separator}${java.library.path}"
 -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=./target/</argLine>
@@ -186,7 +207,7 @@
                                 
<redirectTestOutputToFile>true</redirectTestOutputToFile>
                                 <shutdown>kill</shutdown>
                                 
<testSourceDirectory>${basedir}/src/it/java</testSourceDirectory>
-                                <trimStackTraces>false</trimStackTraces>
+                                <trimStackTrace>false</trimStackTrace>
                             </configuration>
                             <goals>
                                 <goal>integration-test</goal>
@@ -263,6 +284,10 @@
             </plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
                 <artifactId>maven-surefire-plugin</artifactId>
             </plugin>
             <plugin>
@@ -326,32 +351,20 @@
             <!-- HBase dependencies -->
             <dependency>
                 <groupId>org.apache.hbase</groupId>
-                <artifactId>hbase-common</artifactId>
+                <artifactId>hbase-client</artifactId>
                 <version>${hbase.version}</version>
             </dependency>
             <dependency>
                 <groupId>org.apache.hbase</groupId>
-                <artifactId>hbase-testing-util</artifactId>
+                <artifactId>hbase-common</artifactId>
                 <version>${hbase.version}</version>
-                <scope>test</scope>
-                <optional>true</optional>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.jruby</groupId>
-                        <artifactId>jruby-complete</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.hadoop</groupId>
-                        <artifactId>hadoop-hdfs</artifactId>
-                    </exclusion>
-                </exclusions>
             </dependency>
             <dependency>
                 <groupId>org.apache.hbase</groupId>
-                <artifactId>hbase-it</artifactId>
+                <artifactId>hbase-testing-util</artifactId>
                 <version>${hbase.version}</version>
-                <type>test-jar</type>
                 <scope>test</scope>
+                <optional>true</optional>
                 <exclusions>
                     <exclusion>
                         <groupId>org.jruby</groupId>
@@ -370,11 +383,46 @@
                         <groupId>org.xerial.snappy</groupId>
                         <artifactId>snappy-java</artifactId>
                     </exclusion>
+                    <exclusion>
+                        <groupId>javax.servlet</groupId>
+                        <artifactId>servlet-api</artifactId>
+                    </exclusion>
                 </exclusions>
             </dependency>
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-auth</artifactId>
+                <version>${hadoop-two.version}</version>
+            </dependency>
 
             <!-- General Dependencies -->
             <dependency>
+              <groupId>com.google.code.gson</groupId>
+              <artifactId>gson</artifactId>
+              <version>${gson.version}</version>
+            </dependency>
+            <dependency>
+              <groupId>com.google.guava</groupId>
+              <artifactId>guava</artifactId>
+              <version>${guava.version}</version>
+            </dependency>
+            <dependency>
+              <groupId>org.eclipse.jetty</groupId>
+              <artifactId>jetty-server</artifactId>
+              <version>${jetty.version}</version>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.zookeeper</groupId>
+                <artifactId>zookeeper</artifactId>
+                <version>${zookeeper.version}</version>
+                <exclusions>
+                  <exclusion>
+                    <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
+                  </exclusion>
+                </exclusions>
+            </dependency>
+            <dependency>
                 <groupId>org.apache.curator</groupId>
                 <artifactId>curator-client</artifactId>
                 <version>${curator.version}</version>
@@ -396,6 +444,11 @@
             </dependency>
 
             <dependency>
+              <groupId>commons-collections</groupId>
+              <artifactId>commons-collections</artifactId>
+              <version>${commons-collections.version}</version>
+            </dependency>
+            <dependency>
                 <groupId>org.apache.calcite.avatica</groupId>
                 <artifactId>avatica</artifactId>
                 <version>${avatica.version}</version>
@@ -412,11 +465,6 @@
                 <version>${sqlline.version}</version>
             </dependency>
             <dependency>
-                <groupId>commons-logging</groupId>
-                <artifactId>commons-logging</artifactId>
-                <version>${commons-logging.version}</version>
-            </dependency>
-            <dependency>
                 <groupId>javax.servlet</groupId>
                 <artifactId>javax.servlet-api</artifactId>
                 <version>${servlet.api.version}</version>
@@ -424,7 +472,7 @@
             <dependency>
                 <groupId>org.slf4j</groupId>
                 <artifactId>slf4j-api</artifactId>
-                <version>1.7.26</version>
+                <version>${slf4j.version}</version>
             </dependency>
             <dependency>
                 <groupId>net.sourceforge.argparse4j</groupId>
@@ -432,7 +480,74 @@
                 <version>0.8.1</version>
             </dependency>
 
-            <!-- Test dependencies -->
+            <!-- Hbase test dependencies -->
+            <dependency>
+                <groupId>org.apache.hbase</groupId>
+                <artifactId>hbase-it</artifactId>
+                <version>${hbase.version}</version>
+                <type>test-jar</type>
+                <scope>test</scope>
+                <exclusions>
+                    <exclusion>
+                        <groupId>org.jruby</groupId>
+                        <artifactId>jruby-complete</artifactId>
+                    </exclusion>
+                </exclusions>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.hbase</groupId>
+                <artifactId>hbase-server</artifactId>
+                <version>${hbase.version}</version>
+                <scope>test</scope>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.hbase</groupId>
+                <artifactId>hbase-server</artifactId>
+                <version>${hbase.version}</version>
+                <type>test-jar</type>
+                <scope>test</scope>
+            </dependency>
+
+            <!-- Hadoop test dependencies -->
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-hdfs</artifactId>
+                <version>${hadoop-two.version}</version>
+                <scope>test</scope>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-hdfs</artifactId>
+                <version>${hadoop-two.version}</version>
+                <type>test-jar</type>
+                <scope>test</scope>
+            </dependency>
+            <dependency>
+                <groupId>org.apache.hadoop</groupId>
+                <artifactId>hadoop-common</artifactId>
+                <version>${hadoop-two.version}</version>
+                <type>test-jar</type>
+                <scope>test</scope>
+            </dependency>
+
+            <!-- Other test dependencies -->
+            <dependency>
+              <groupId>org.eclipse.jetty</groupId>
+              <artifactId>jetty-util</artifactId>
+              <version>${jetty.version}</version>
+              <scope>test</scope>
+            </dependency>
+            <dependency>
+              <groupId>org.eclipse.jetty</groupId>
+              <artifactId>jetty-security</artifactId>
+              <version>${jetty.version}</version>
+              <scope>test</scope>
+            </dependency>
+            <dependency>
+              <groupId>org.eclipse.jetty</groupId>
+              <artifactId>jetty-http</artifactId>
+              <version>${jetty.version}</version>
+            </dependency>
             <dependency>
                 <groupId>org.apache.phoenix</groupId>
                 <artifactId>phoenix-core</artifactId>
@@ -447,25 +562,23 @@
                 <scope>test</scope>
             </dependency>
             <dependency>
+               <groupId>commons-io</groupId>
+                <artifactId>commons-io</artifactId>
+                <version>${commons-io.version}</version>
+                <scope>test</scope>
+            </dependency>
+            <dependency>
                 <groupId>junit</groupId>
                 <artifactId>junit</artifactId>
                 <version>${junit.version}</version>
+                <scope>test</scope>
             </dependency>
             <dependency>
                 <groupId>org.mockito</groupId>
-                <artifactId>mockito-all</artifactId>
+                <artifactId>mockito-core</artifactId>
                 <version>${mockito-all.version}</version>
                 <scope>test</scope>
             </dependency>
-
-            <!-- Required for mini-cluster since hbase built against old 
version of hadoop -->
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-minicluster</artifactId>
-                <version>${hadoop-two.version}</version>
-                <optional>true</optional>
-                <scope>test</scope>
-            </dependency>
             <dependency>
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-minikdc</artifactId>
diff --git a/queryserver-orchestrator/pom.xml b/queryserver-orchestrator/pom.xml
index ed6cb21..852b010 100644
--- a/queryserver-orchestrator/pom.xml
+++ b/queryserver-orchestrator/pom.xml
@@ -33,10 +33,30 @@
 
     <artifactId>queryserver-orchestrator</artifactId>
 
+<build>
+<plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <configuration>
+          <ignoredUnusedDeclaredDependencies>
+            <!-- Maven has no transitive test dependencies -->
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hbase:hbase-testing-util
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hbase:hbase-it
+            </ignoredUnusedDeclaredDependency>
+          </ignoredUnusedDeclaredDependencies>
+        </configuration>
+      </plugin>
+</plugins>
+</build>
+
     <dependencies>
         <dependency>
             <groupId>org.apache.phoenix</groupId>
-            <artifactId>queryserver-client</artifactId>
+            <artifactId>phoenix-core</artifactId>
         </dependency>
         <dependency>
             <groupId>org.slf4j</groupId>
@@ -48,6 +68,10 @@
         </dependency>
         <dependency>
             <groupId>org.apache.curator</groupId>
+            <artifactId>curator-client</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.curator</groupId>
             <artifactId>curator-framework</artifactId>
         </dependency>
         <dependency>
@@ -55,9 +79,48 @@
             <artifactId>curator-recipes</artifactId>
         </dependency>
         <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-common</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>com.google.code.gson</groupId>
+            <artifactId>gson</artifactId>
+        </dependency>
+        <!-- test -->
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-core</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
             <groupId>org.apache.phoenix</groupId>
             <artifactId>phoenix-core</artifactId>
-            <scope>compile</scope>
+            <classifier>tests</classifier>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-it</artifactId>
+           <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hbase</groupId>
+            <artifactId>hbase-testing-util</artifactId>
+            <scope>test</scope>
         </dependency>
     </dependencies>
 
diff --git 
a/queryserver-orchestrator/src/it/java/org/apache/phoenix/tool/ParameterizedPhoenixCanaryToolIT.java
 
b/queryserver-orchestrator/src/it/java/org/apache/phoenix/tool/ParameterizedPhoenixCanaryToolIT.java
new file mode 100644
index 0000000..815cf67
--- /dev/null
+++ 
b/queryserver-orchestrator/src/it/java/org/apache/phoenix/tool/ParameterizedPhoenixCanaryToolIT.java
@@ -0,0 +1,281 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+package org.apache.phoenix.tool;
+
+import com.google.common.collect.Maps;
+import com.google.gson.Gson;
+
+import org.apache.phoenix.end2end.NeedsOwnMiniClusterTest;
+import org.apache.phoenix.query.BaseTest;
+import org.apache.phoenix.query.QueryServices;
+import org.apache.phoenix.util.ReadOnlyProps;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import static org.apache.phoenix.tool.PhoenixCanaryTool.propFileName;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+@RunWith(Parameterized.class)
+@Category(NeedsOwnMiniClusterTest.class)
+public class ParameterizedPhoenixCanaryToolIT extends BaseTest {
+
+       private static final Logger LOGGER =
+                       
LoggerFactory.getLogger(ParameterizedPhoenixCanaryToolIT.class);
+       private static final String stdOutSink
+                       = 
"org.apache.phoenix.tool.PhoenixCanaryTool$StdOutSink";
+       private static final String fileOutSink
+                       = 
"org.apache.phoenix.tool.PhoenixCanaryTool$FileOutSink";
+
+       private static Map<String, String> serverProps = 
Maps.newHashMapWithExpectedSize(2);
+       private static Map<String, String> clientProps = 
Maps.newHashMapWithExpectedSize(2);
+       private static String connString = "";
+       private static Properties canaryProp = new Properties();
+       private static Connection connection = null;
+       private boolean isNamespaceEnabled;
+       private boolean isPositiveTestType;
+       private List<String> cmd = new ArrayList<>();
+       private String resultSinkOption;
+       private ByteArrayOutputStream out = new ByteArrayOutputStream();
+
+       public ParameterizedPhoenixCanaryToolIT(boolean isPositiveTestType,
+                       boolean isNamespaceEnabled, String resultSinkOption) {
+               this.isPositiveTestType = isPositiveTestType;
+               this.isNamespaceEnabled = isNamespaceEnabled;
+               this.resultSinkOption = resultSinkOption;
+       }
+
+       @Parameterized.Parameters(name = 
"ParameterizedPhoenixCanaryToolIT_isPositiveTestType={0}," +
+                       "isNamespaceEnabled={1},resultSinkOption={2}")
+       public static Collection parametersList() {
+               return Arrays.asList(new Object[][] {
+                       {true, true, stdOutSink},
+                       {true, true, fileOutSink},
+                       {false, true, stdOutSink},
+                       {false, true, fileOutSink},
+                       {true, false, stdOutSink},
+                       {true, false, fileOutSink},
+                       {false, false, stdOutSink},
+                       {false, false, fileOutSink}
+               });
+       }
+
+       @Before
+       public void setup() throws Exception {
+               String createSchema;
+               String createTable;
+
+               if(needsNewCluster()) {
+                       setClientSideNamespaceProperties();
+                       setServerSideNamespaceProperties();
+                       tearDownMiniClusterAsync(1);
+                       setUpTestDriver(new 
ReadOnlyProps(serverProps.entrySet().iterator()),
+                                       new 
ReadOnlyProps(clientProps.entrySet().iterator()));
+                       LOGGER.info("New cluster is spinned up with test 
parameters " +
+                                       "isPositiveTestType" + 
this.isPositiveTestType +
+                                       "isNamespaceEnabled" + 
this.isNamespaceEnabled +
+                                       "resultSinkOption" + 
this.resultSinkOption);
+                       connString = BaseTest.getUrl();
+                       connection = getConnection();
+               }
+
+               if (this.isNamespaceEnabled) {
+                       createSchema = "CREATE SCHEMA IF NOT EXISTS TEST";
+                       connection.createStatement().execute(createSchema);
+               }
+               createTable = "CREATE TABLE IF NOT EXISTS TEST.PQSTEST " +
+                                               "(mykey INTEGER NOT NULL 
PRIMARY KEY, mycolumn VARCHAR," +
+                                               " insert_date TIMESTAMP)";
+               connection.createStatement().execute(createTable);
+               cmd.add("--constring");
+               cmd.add(connString);
+               cmd.add("--logsinkclass");
+               cmd.add(this.resultSinkOption);
+               if (this.resultSinkOption.contains(stdOutSink)) {
+                       System.setOut(new java.io.PrintStream(out));
+               } else {
+                       loadCanaryPropertiesFile(canaryProp);
+               }
+       }
+
+       private boolean needsNewCluster() {
+               if (connection == null) {
+                       return true;
+               }
+               if 
(!clientProps.get(QueryServices.IS_SYSTEM_TABLE_MAPPED_TO_NAMESPACE)
+                               
.equalsIgnoreCase(String.valueOf(this.isNamespaceEnabled))) {
+                       return true;
+               }
+               return false;
+       }
+
+       private void setClientSideNamespaceProperties() {
+
+               
clientProps.put(QueryServices.IS_SYSTEM_TABLE_MAPPED_TO_NAMESPACE,
+                               String.valueOf(this.isNamespaceEnabled));
+
+               clientProps.put(QueryServices.IS_NAMESPACE_MAPPING_ENABLED,
+                               String.valueOf(this.isNamespaceEnabled));
+       }
+
+       private Connection getConnection() throws SQLException {
+               Properties props = new Properties();
+               
props.setProperty(QueryServices.IS_SYSTEM_TABLE_MAPPED_TO_NAMESPACE,
+                               String.valueOf(this.isNamespaceEnabled));
+
+               props.setProperty(QueryServices.IS_NAMESPACE_MAPPING_ENABLED,
+                               String.valueOf(this.isNamespaceEnabled));
+               return DriverManager.getConnection(connString, props);
+       }
+
+       void setServerSideNamespaceProperties() {
+               serverProps.put(QueryServices.IS_NAMESPACE_MAPPING_ENABLED,
+                               String.valueOf(this.isNamespaceEnabled));
+               
serverProps.put(QueryServices.IS_SYSTEM_TABLE_MAPPED_TO_NAMESPACE,
+                               String.valueOf(this.isNamespaceEnabled));
+       }
+
+       /*
+       *       This test runs in the test suit with
+       *       combination of parameters provided.
+       *       It tests the tool in positive type where test expects to pass
+       *       and negative type where test expects to fail.
+       */
+       @Test
+       public void phoenixCanaryToolTest() throws SQLException, IOException {
+               if (!isPositiveTestType) {
+                       dropTestTable();
+               }
+               PhoenixCanaryTool.main(cmd.toArray(new String[cmd.size()]));
+               Boolean result = getAggregatedResult();
+               if (isPositiveTestType) {
+                       assertTrue(result);
+               } else {
+                       assertFalse(result);
+               }
+       }
+
+       private Boolean getAggregatedResult() throws IOException {
+               HashMap<String, Boolean> resultsMap;
+               Boolean result = true;
+               resultsMap = parsePublishedResults();
+               for (Boolean b : resultsMap.values()) {
+                       result = result && b;
+               }
+               return result;
+       }
+
+       private HashMap<String, Boolean> parsePublishedResults() throws 
IOException {
+               Gson parser = new Gson();
+               CanaryTestResult[] results;
+               HashMap<String, Boolean> resultsMap = new HashMap<>();
+
+               if (this.resultSinkOption.contains(fileOutSink)) {
+                       File resultFile = getTestResultsFile();
+                       results = parser.fromJson(new FileReader(resultFile),
+                                       CanaryTestResult[].class);
+               } else {
+                       String result = out.toString();
+                       results = parser.fromJson(result, 
CanaryTestResult[].class);
+               }
+               for (CanaryTestResult r : results) {
+                       resultsMap.put(r.getTestName(), r.isSuccessful());
+               }
+               return resultsMap;
+       }
+
+       private File getTestResultsFile() {
+               File[] files = getLogFileList();
+               return files[0];
+       }
+
+       @After
+       public void teardown() throws SQLException {
+               if (this.isNamespaceEnabled) {
+                       dropTestTableAndSchema();
+               } else {
+                       dropTestTable();
+               }
+               if (this.resultSinkOption.contains(fileOutSink)) {
+                       deleteResultSinkFile();
+               }
+       }
+
+       private void deleteResultSinkFile() {
+               File[] files = getLogFileList();
+               for (final File file : files) {
+                       if (!file.delete()) {
+                               System.err.println("Can't remove " + 
file.getAbsolutePath());
+                       }
+               }
+       }
+
+       private File[] getLogFileList() {
+               File dir = new File(canaryProp.getProperty("file.location"));
+               return dir.listFiles(new FilenameFilter() {
+                       @Override
+                       public boolean accept(File dir, String name) {
+                               return name.endsWith(".log");
+                       }
+               });
+       }
+
+       private void loadCanaryPropertiesFile(Properties prop) {
+               InputStream input = 
ClassLoader.getSystemResourceAsStream(propFileName);
+               try {
+                       prop.load(input);
+               } catch (IOException e) {
+                       e.printStackTrace();
+               }
+       }
+
+       private void dropTestTable() throws SQLException {
+               String dropTable = "DROP TABLE IF EXISTS TEST.PQSTEST";
+               connection.createStatement().execute(dropTable);
+       }
+
+       private void dropTestTableAndSchema() throws SQLException {
+               dropTestTable();
+               String dropSchema = "DROP SCHEMA IF EXISTS TEST";
+               connection.createStatement().execute(dropSchema);
+       }
+
+}
diff --git 
a/queryserver-orchestrator/src/main/java/org/apache/phoenix/tool/CanaryTestResult.java
 
b/queryserver-orchestrator/src/main/java/org/apache/phoenix/tool/CanaryTestResult.java
new file mode 100644
index 0000000..b72439c
--- /dev/null
+++ 
b/queryserver-orchestrator/src/main/java/org/apache/phoenix/tool/CanaryTestResult.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.tool;
+
+public class CanaryTestResult {
+
+    private boolean isSuccessful;
+    private long startTime;
+    private long executionTime;
+    private String message;
+    private String testName;
+    private String timestamp;
+    private Object miscellaneous;
+
+    public Object getMiscellaneous() {
+        return miscellaneous;
+    }
+
+    public void setMiscellaneous(Object miscellaneous) {
+        this.miscellaneous = miscellaneous;
+    }
+
+    public long getStartTime() {
+        return startTime;
+    }
+
+    public void setStartTime(long startTime) {
+        this.startTime = startTime;
+    }
+
+    public String getTimestamp() {
+        return timestamp;
+    }
+
+    public void setTimestamp(String timestamp) {
+        this.timestamp = timestamp;
+    }
+
+    public boolean isSuccessful() {
+        return isSuccessful;
+    }
+
+    public void setSuccessful(boolean successful) {
+        isSuccessful = successful;
+    }
+
+    public long getExecutionTime() {
+        return executionTime;
+    }
+
+    public void setExecutionTime(long executionTime) {
+        this.executionTime = executionTime;
+    }
+
+    public String getMessage() {
+        return message;
+    }
+
+    public void setMessage(String message) {
+        this.message = message;
+    }
+
+    public String getTestName() {
+        return testName;
+    }
+
+    public void setTestName(String testName) {
+        this.testName = testName;
+    }
+
+}
diff --git 
a/queryserver-orchestrator/src/main/java/org/apache/phoenix/tool/PhoenixCanaryTool.java
 
b/queryserver-orchestrator/src/main/java/org/apache/phoenix/tool/PhoenixCanaryTool.java
new file mode 100644
index 0000000..a008799
--- /dev/null
+++ 
b/queryserver-orchestrator/src/main/java/org/apache/phoenix/tool/PhoenixCanaryTool.java
@@ -0,0 +1,426 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.tool;
+
+import com.google.common.base.Throwables;
+import com.google.common.io.Files;
+import com.google.common.util.concurrent.SimpleTimeLimiter;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import net.sourceforge.argparse4j.ArgumentParsers;
+import net.sourceforge.argparse4j.inf.ArgumentParser;
+import net.sourceforge.argparse4j.inf.ArgumentParserException;
+import net.sourceforge.argparse4j.inf.Namespace;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.RetryCounter;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.InputStream;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.List;
+import java.util.Properties;
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * A Canary Tool to perform synthetic tests for Phoenix
+ * It assumes that TEST.PQSTEST or the schema.table passed in the argument
+ * is already present as following command
+ * CREATE TABLE IF NOT EXISTS TEST.PQSTEST (mykey INTEGER NOT NULL
+ * PRIMARY KEY, mycolumn VARCHAR, insert_date TIMESTAMP);
+ *
+ */
+public class PhoenixCanaryTool extends Configured implements Tool {
+
+    private static String TEST_SCHEMA_NAME = "TEST";
+    private static String TEST_TABLE_NAME = "PQSTEST";
+    private static String FQ_TABLE_NAME = "TEST.PQSTEST";
+    private static Timestamp timestamp;
+    private static final int MAX_CONNECTION_ATTEMPTS = 5;
+    private final int FIRST_TIME_RETRY_TIMEOUT = 5000;
+    private Sink sink = new StdOutSink();
+    public static final String propFileName = 
"phoenix-canary-file-sink.properties";
+
+    /**
+     * Base class for a Canary Test
+     */
+    private abstract static class CanaryTest {
+
+        CanaryTestResult result = new CanaryTestResult();
+
+        Connection connection = null;
+
+        private void onCreate(Connection connection) {
+            result.setTimestamp(getCurrentTimestamp());
+            result.setStartTime(System.currentTimeMillis());
+            this.connection = connection;
+        }
+
+        abstract void onExecute() throws Exception;
+
+        private void onExit() {
+            result.setExecutionTime(System.currentTimeMillis() -
+                result.getStartTime());
+        }
+
+        CanaryTestResult runTest(Connection connection) {
+            try {
+                onCreate(connection);
+                onExecute();
+                result.setSuccessful(true);
+                result.setMessage("Test " + result.getTestName() + " 
successful");
+            } catch (Exception e) {
+                result.setSuccessful(false);
+                result.setMessage(Throwables.getStackTraceAsString(e));
+            } finally {
+                onExit();
+            }
+            return result;
+        }
+    }
+
+    static class UpsertTableTest extends CanaryTest {
+        void onExecute() throws Exception {
+            result.setTestName("upsertTable");
+            // Insert data
+            timestamp = new Timestamp(System.currentTimeMillis());
+            String stmt = "UPSERT INTO " + FQ_TABLE_NAME
+                    + "(mykey, mycolumn, insert_date) VALUES (?, ?, ?)";
+            PreparedStatement ps = connection.prepareStatement(stmt);
+            ps.setInt(1, 1);
+            ps.setString(2, "Hello World");
+            ps.setTimestamp(3, timestamp);
+            ps.executeUpdate();
+            connection.commit();
+        }
+    }
+
+    static class ReadTableTest extends CanaryTest {
+        void onExecute() throws Exception {
+            result.setTestName("readTable");
+            PreparedStatement ps = connection.prepareStatement("SELECT * FROM "
+                    + FQ_TABLE_NAME+" WHERE INSERT_DATE = ?");
+            ps.setTimestamp(1,timestamp);
+            ResultSet rs = ps.executeQuery();
+
+            int totalRows = 0;
+            while (rs.next()) {
+                totalRows += 1;
+                Integer myKey = rs.getInt(1);
+                String myColumn = rs.getString(2);
+                if (myKey != 1 || !myColumn.equals("Hello World")) {
+                    throw new Exception("Retrieved values do not " +
+                            "match the inserted values");
+                }
+            }
+            if (totalRows != 1) {
+                throw new Exception(totalRows + " rows fetched instead of just 
one.");
+            }
+            ps.close();
+            rs.close();
+        }
+    }
+
+    /**
+     * Sink interface used by the canary to output information
+     */
+    public interface Sink {
+        List<CanaryTestResult> getResults();
+
+        void updateResults(CanaryTestResult result);
+
+        void publishResults() throws Exception;
+
+        void clearResults();
+    }
+
+    public static class StdOutSink implements Sink {
+        private List<CanaryTestResult> results = new ArrayList<>();
+
+        @Override
+        public void updateResults(CanaryTestResult result) {
+            results.add(result);
+        }
+
+        @Override
+        public List<CanaryTestResult> getResults() {
+            return results;
+        }
+
+        @Override
+        public void publishResults() {
+
+            Gson gson = new GsonBuilder().setPrettyPrinting().create();
+            String resultJson = gson.toJson(results);
+            System.out.println(resultJson);
+        }
+
+        @Override
+        public void clearResults() {
+            results.clear();
+        }
+    }
+
+    /**
+     * Implementation of File Out Sink
+     */
+    public static class FileOutSink implements Sink {
+        private List<CanaryTestResult> results = new ArrayList<>();
+        File dir;
+        String logfileName;
+
+        public FileOutSink() throws Exception {
+            Properties prop = new Properties();
+            InputStream input = 
ClassLoader.getSystemResourceAsStream(propFileName);
+            if (input == null) {
+                throw new Exception("Cannot load " + propFileName + " file for 
" + "FileOutSink.");
+            }
+            prop.load(input);
+            logfileName = prop.getProperty("file.name");
+            dir = new File(prop.getProperty("file.location"));
+            dir.mkdirs();
+        }
+
+        @Override
+        public void updateResults(CanaryTestResult result) {
+            results.add(result);
+        }
+
+        @Override
+        public List<CanaryTestResult> getResults() {
+            return results;
+        }
+
+        @Override
+        public void publishResults() throws Exception {
+            Gson gson = new GsonBuilder().setPrettyPrinting().create();
+            String resultJson = gson.toJson(results);
+            String fileName = logfileName + "-" + new 
SimpleDateFormat("yyyy.MM.dd.HH" + ".mm" +
+                    ".ss").format(new Date()) + ".log";
+            File file = new File(dir, fileName);
+            Files.write(Bytes.toBytes(resultJson), file);
+        }
+
+        @Override
+        public void clearResults() {
+            results.clear();
+        }
+    }
+
+    private static final Logger LOGGER = 
LoggerFactory.getLogger(PhoenixCanaryTool.class);
+
+    private static String getCurrentTimestamp() {
+        return new SimpleDateFormat("yyyy.MM.dd.HH.mm.ss.ms").format(new 
Date());
+    }
+
+    private static Namespace parseArgs(String[] args) {
+
+        ArgumentParser parser = ArgumentParsers.newFor("Phoenix Canary Test 
Tool").build()
+                .description("Phoenix Canary Test Tool");
+
+        parser.addArgument("--hostname", 
"-hn").type(String.class).nargs("?").help("Hostname on "
+                + "which Phoenix is running.");
+
+        parser.addArgument("--port", 
"-p").type(String.class).nargs("?").help("Port on " +
+                "which Phoenix is running.");
+
+        parser.addArgument("--constring", 
"-cs").type(String.class).nargs("?").help("Pass an " +
+                "explicit connection String to connect to Phoenix. " +
+                "default: 
jdbc:phoenix:thin:serialization=PROTOBUF;url=[hostName:port]");
+
+        parser.addArgument("--timeout", 
"-t").type(String.class).nargs("?").setDefault("60").help
+                ("Maximum time for which the app should run before returning 
error. default:" + "" +
+                        " 60 sec");
+
+        parser.addArgument("--testschema", 
"-ts").type(String.class).nargs("?").setDefault
+                (TEST_SCHEMA_NAME).help("Custom name for the test table. " + 
"default: " +
+                TEST_SCHEMA_NAME);
+
+        parser.addArgument("--testtable", 
"-tt").type(String.class).nargs("?").setDefault
+                (TEST_TABLE_NAME).help("Custom name for the test table." + " 
default: " +
+                TEST_TABLE_NAME);
+
+        parser.addArgument("--logsinkclass", 
"-lsc").type(String.class).nargs("?").setDefault
+                ("org.apache.phoenix.tool.PhoenixCanaryTool$StdOutSink").help
+                ("Path to a Custom implementation for log sink class. default: 
stdout");
+
+        Namespace res = null;
+        try {
+            res = parser.parseKnownArgs(args, null);
+        } catch (ArgumentParserException e) {
+            parser.handleError(e);
+        }
+        return res;
+    }
+
+    private CanaryTestResult appInfo = new CanaryTestResult();
+    private Connection connection = null;
+
+    @Override
+    public int run(String[] args) throws Exception {
+
+        try {
+            Namespace cArgs = parseArgs(args);
+            if (cArgs == null) {
+                LOGGER.error("Argument parsing failed.");
+                throw new RuntimeException("Argument parsing failed");
+            }
+
+            final String hostName = cArgs.getString("hostname");
+            final String port = cArgs.getString("port");
+            final String timeout = cArgs.getString("timeout");
+            final String conString = cArgs.getString("constring");
+            final String testSchemaName = cArgs.getString("testschema");
+            final String testTableName = cArgs.getString("testtable");
+            final String logSinkClass = cArgs.getString("logsinkclass");
+
+            TEST_TABLE_NAME = testTableName;
+            TEST_SCHEMA_NAME = testSchemaName;
+            FQ_TABLE_NAME = testSchemaName + "." + testTableName;
+
+            // Check if at least one from host+port or con string is provided.
+            if ((hostName == null || port == null) && conString == null) {
+                throw new RuntimeException("Provide at least one from 
host+port or constring");
+            }
+
+            int timeoutVal = Integer.parseInt(timeout);
+
+            // Dynamically load a class for sink
+            sink = (Sink) 
ClassLoader.getSystemClassLoader().loadClass(logSinkClass).newInstance();
+
+            long startTime = System.currentTimeMillis();
+
+            String connectionURL = (conString != null) ? conString :
+                    "jdbc:phoenix:thin:serialization=PROTOBUF;url=" + hostName 
+ ":" + port;
+
+            appInfo.setTestName("appInfo");
+            appInfo.setMiscellaneous(connectionURL);
+
+            connection = getConnectionWithRetry(connectionURL);
+
+            if (connection == null) {
+                LOGGER.error("Failed to get connection after multiple retries; 
the connection is null");
+            }
+
+            SimpleTimeLimiter limiter = new SimpleTimeLimiter();
+
+            limiter.callWithTimeout(new Callable<Void>() {
+
+                public Void call() {
+
+                    sink.clearResults();
+
+                    // Execute tests
+                    LOGGER.info("Starting UpsertTableTest");
+                    sink.updateResults(new 
UpsertTableTest().runTest(connection));
+
+                    LOGGER.info("Starting ReadTableTest");
+                    sink.updateResults(new 
ReadTableTest().runTest(connection));
+                    return null;
+
+                }
+            }, timeoutVal, TimeUnit.SECONDS, true);
+
+            long estimatedTime = System.currentTimeMillis() - startTime;
+
+            appInfo.setExecutionTime(estimatedTime);
+            appInfo.setSuccessful(true);
+
+        } catch (Exception e) {
+            LOGGER.error(Throwables.getStackTraceAsString(e));
+            appInfo.setMessage(Throwables.getStackTraceAsString(e));
+            appInfo.setSuccessful(false);
+
+        } finally {
+            sink.updateResults(appInfo);
+            sink.publishResults();
+            connection.close();
+        }
+
+        return 0;
+    }
+
+    private Connection getConnectionWithRetry(String connectionURL) {
+        Connection connection=null;
+        try{
+            connection = getConnectionWithRetry(connectionURL, true);
+        } catch (Exception e) {
+            LOGGER.info("Failed to get connection with namespace enabled", e);
+            try {
+                connection = getConnectionWithRetry(connectionURL, false);
+            } catch (Exception ex) {
+                LOGGER.info("Failed to get connection without namespace 
enabled", ex);
+            }
+        }
+        return connection;
+    }
+
+    private Connection getConnectionWithRetry(String connectionURL, boolean 
namespaceFlag)
+        throws Exception {
+        Properties connProps = new Properties();
+        Connection connection = null;
+
+        connProps.setProperty("phoenix.schema.mapSystemTablesToNamespace", 
String.valueOf(namespaceFlag));
+        connProps.setProperty("phoenix.schema.isNamespaceMappingEnabled", 
String.valueOf(namespaceFlag));
+
+        RetryCounter retrier = new RetryCounter(MAX_CONNECTION_ATTEMPTS,
+                FIRST_TIME_RETRY_TIMEOUT, TimeUnit.MILLISECONDS);
+        LOGGER.info("Trying to get the connection with "
+                + retrier.getMaxAttempts() + " attempts with "
+                + "connectionURL :" + connectionURL
+                + "connProps :" + connProps);
+        while (retrier.shouldRetry()) {
+            try {
+                connection = DriverManager.getConnection(connectionURL, 
connProps);
+            } catch (SQLException e) {
+                LOGGER.info("Trying to establish connection with "
+                        + retrier.getAttemptTimes() + " attempts", e);
+            }
+            if (connection != null) {
+                LOGGER.info("Successfully established connection within "
+                        + retrier.getAttemptTimes() + " attempts");
+                break;
+            }
+            retrier.sleepUntilNextRetry();
+        }
+        return connection;
+    }
+
+    public static void main(final String[] args) {
+        try {
+            LOGGER.info("Starting Phoenix Canary Test tool...");
+            ToolRunner.run(new PhoenixCanaryTool(), args);
+        } catch (Exception e) {
+            LOGGER.error("Error in running Phoenix Canary Test tool. " + e);
+        }
+        LOGGER.info("Exiting Phoenix Canary Test tool...");
+    }
+}
diff --git 
a/queryserver-orchestrator/src/test/java/org/apache/phoenix/tool/PhoenixCanaryToolTest.java
 
b/queryserver-orchestrator/src/test/java/org/apache/phoenix/tool/PhoenixCanaryToolTest.java
new file mode 100644
index 0000000..94229c2
--- /dev/null
+++ 
b/queryserver-orchestrator/src/test/java/org/apache/phoenix/tool/PhoenixCanaryToolTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.phoenix.tool;
+
+import static org.junit.Assert.assertEquals;
+
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import static org.mockito.Mockito.when;
+import org.mockito.MockitoAnnotations;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.Statement;
+
+public class PhoenixCanaryToolTest {
+
+    @Mock
+    private Connection connection;
+
+    @Mock
+    private Statement statement;
+
+    @Mock
+    private PreparedStatement ps;
+
+    @Mock
+    private ResultSet rs;
+
+    @Mock
+    private DatabaseMetaData dbm;
+
+    @Before
+    public void setUp() {
+        MockitoAnnotations.initMocks(this);
+    }
+
+    @Test
+    public void upsertTableTest() throws Exception {
+        when(connection.createStatement()).thenReturn(statement);
+        when(connection.prepareStatement(Mockito.anyString())).thenReturn(ps);
+        when(statement.executeUpdate(Mockito.anyString())).thenReturn(1);
+        CanaryTestResult result = new 
PhoenixCanaryTool.UpsertTableTest().runTest(connection);
+        assertEquals(true, result.isSuccessful());
+        assertEquals("Test upsertTable successful", result.getMessage());
+    }
+
+    @Test
+    public void readTableTest() throws Exception {
+        when(connection.prepareStatement(Mockito.anyString())).thenReturn(ps);
+        when(ps.executeQuery()).thenReturn(rs);
+        when(rs.next()).thenReturn(true).thenReturn(false);
+        when(rs.getInt(1)).thenReturn(1);
+        when(rs.getString(2)).thenReturn("Hello World");
+        CanaryTestResult result = new 
PhoenixCanaryTool.ReadTableTest().runTest(connection);
+        assertEquals(true, result.isSuccessful());
+        assertEquals("Test readTable successful", result.getMessage());
+    }
+
+    @Test
+    public void failTest() throws Exception {
+        when(connection.prepareStatement(Mockito.anyString())).thenReturn(ps);
+        when(ps.executeQuery()).thenReturn(rs);
+        when(rs.getInt(1)).thenReturn(3);
+        when(rs.getString(2)).thenReturn("Incorrect data");
+        when(rs.next()).thenReturn(true).thenReturn(false);
+        CanaryTestResult result = new 
PhoenixCanaryTool.ReadTableTest().runTest(connection);
+        assertEquals(false, result.isSuccessful());
+        assert (result.getMessage().contains("Retrieved values do not match 
the inserted values"));
+    }
+}
\ No newline at end of file
diff --git a/queryserver/pom.xml b/queryserver/pom.xml
index 07b0261..15ddb0e 100644
--- a/queryserver/pom.xml
+++ b/queryserver/pom.xml
@@ -75,6 +75,31 @@
       </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <configuration>
+          <ignoredUnusedDeclaredDependencies>
+            <!-- false positive - Why ? -->
+            <ignoredUnusedDeclaredDependency>
+              org.slf4j:slf4j-api
+            </ignoredUnusedDeclaredDependency>
+            <!-- Maven has no transitive test dependencies -->
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hbase:hbase-testing-util
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hbase:hbase-it
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hadoop:hadoop-hdfs:test-jar
+            </ignoredUnusedDeclaredDependency>
+            <ignoredUnusedDeclaredDependency>
+              org.apache.hadoop:hadoop-common:test-jar
+            </ignoredUnusedDeclaredDependency>
+          </ignoredUnusedDeclaredDependencies>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-shade-plugin</artifactId>
         <executions>
           <execution>
@@ -143,9 +168,25 @@
   </build>
 
   <dependencies>
-     <dependency>
-       <groupId>org.apache.hbase</groupId>
-       <artifactId>hbase-common</artifactId>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-auth</artifactId>
     </dependency>
     <dependency>
       <groupId>org.apache.calcite.avatica</groupId>
@@ -158,6 +199,10 @@
           <groupId>org.apache.calcite.avatica</groupId>
           <artifactId>avatica-core</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>org.apache.calcite.avatica</groupId>
+          <artifactId>avatica-metrics</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
     <dependency>
@@ -165,13 +210,26 @@
       <artifactId>avatica</artifactId>
     </dependency>
     <dependency>
-      <groupId>commons-logging</groupId>
-      <artifactId>commons-logging</artifactId>
-    </dependency>
-    <dependency>
       <groupId>javax.servlet</groupId>
       <artifactId>javax.servlet-api</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-server</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.slf4j</groupId>
+      <artifactId>slf4j-api</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>commons-collections</groupId>
+      <artifactId>commons-collections</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+
     <!-- for tests -->
     <dependency>
       <groupId>org.apache.phoenix</groupId>
@@ -190,8 +248,28 @@
       <classifier>tests</classifier>
     </dependency>
     <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-util</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.eclipse.jetty</groupId>
+      <artifactId>jetty-security</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>commons-io</groupId>
+      <artifactId>commons-io</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
       <groupId>org.mockito</groupId>
-      <artifactId>mockito-all</artifactId>
+      <artifactId>mockito-core</artifactId>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -201,8 +279,14 @@
       <scope>test</scope>
     </dependency>
     <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-minicluster</artifactId>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <type>test-jar</type>
       <scope>test</scope>
     </dependency>
     <dependency>
@@ -215,5 +299,22 @@
       <artifactId>hadoop-minikdc</artifactId>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-hdfs</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+     </dependency>
   </dependencies>
 </project>
diff --git 
a/queryserver/src/it/java/org/apache/phoenix/end2end/HttpParamImpersonationQueryServerIT.java
 
b/queryserver/src/it/java/org/apache/phoenix/end2end/HttpParamImpersonationQueryServerIT.java
index 3990e7c..ccde0e4 100644
--- 
a/queryserver/src/it/java/org/apache/phoenix/end2end/HttpParamImpersonationQueryServerIT.java
+++ 
b/queryserver/src/it/java/org/apache/phoenix/end2end/HttpParamImpersonationQueryServerIT.java
@@ -32,8 +32,6 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
@@ -52,12 +50,14 @@ import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @RunWith(Parameterized.class)
 @Category(NeedsOwnMiniClusterTest.class)
 public class HttpParamImpersonationQueryServerIT {
 
-    private static final Log LOG = 
LogFactory.getLog(HttpParamImpersonationQueryServerIT.class);
+    private static final Logger LOG = 
LoggerFactory.getLogger(HttpParamImpersonationQueryServerIT.class);
     private static QueryServerEnvironment environment;
 
     private static final List<TableName> SYSTEM_TABLE_NAMES = 
Arrays.asList(PhoenixDatabaseMetaData.SYSTEM_CATALOG_HBASE_TABLE_NAME,
diff --git 
a/queryserver/src/it/java/org/apache/phoenix/end2end/QueryServerBasicsIT.java 
b/queryserver/src/it/java/org/apache/phoenix/end2end/QueryServerBasicsIT.java
index 767badc..1e2fb93 100644
--- 
a/queryserver/src/it/java/org/apache/phoenix/end2end/QueryServerBasicsIT.java
+++ 
b/queryserver/src/it/java/org/apache/phoenix/end2end/QueryServerBasicsIT.java
@@ -22,7 +22,6 @@ import static 
org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_CAT;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_CATALOG;
 import static org.apache.phoenix.jdbc.PhoenixDatabaseMetaData.TABLE_SCHEM;
 import static org.apache.phoenix.query.QueryConstants.SYSTEM_SCHEMA_NAME;
-import static org.hamcrest.CoreMatchers.is;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
@@ -39,8 +38,6 @@ import java.sql.Statement;
 import java.util.Properties;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.phoenix.query.QueryServices;
 import org.apache.phoenix.queryserver.QueryServerProperties;
@@ -50,13 +47,15 @@ import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * Smoke test for query server.
  */
 public class QueryServerBasicsIT extends BaseHBaseManagedTimeIT {
 
-  private static final Log LOG = LogFactory.getLog(QueryServerBasicsIT.class);
+  private static final Logger LOG = 
LoggerFactory.getLogger(QueryServerBasicsIT.class);
 
   private static QueryServerThread AVATICA_SERVER;
   private static Configuration CONF;
@@ -96,7 +95,7 @@ public class QueryServerBasicsIT extends 
BaseHBaseManagedTimeIT {
   @Test
   public void testCatalogs() throws Exception {
     try (final Connection connection = 
DriverManager.getConnection(CONN_STRING)) {
-      assertThat(connection.isClosed(), is(false));
+      assertFalse(connection.isClosed());
       try (final ResultSet resultSet = connection.getMetaData().getCatalogs()) 
{
         final ResultSetMetaData metaData = resultSet.getMetaData();
         assertFalse("unexpected populated resultSet", resultSet.next());
@@ -112,7 +111,7 @@ public class QueryServerBasicsIT extends 
BaseHBaseManagedTimeIT {
       props.setProperty(QueryServices.IS_NAMESPACE_MAPPING_ENABLED, 
Boolean.toString(true));
       try (final Connection connection = 
DriverManager.getConnection(CONN_STRING, props)) {
       connection.createStatement().executeUpdate("CREATE SCHEMA IF NOT EXISTS 
" + SYSTEM_SCHEMA_NAME);
-      assertThat(connection.isClosed(), is(false));
+      assertFalse(connection.isClosed());
       try (final ResultSet resultSet = connection.getMetaData().getSchemas()) {
         final ResultSetMetaData metaData = resultSet.getMetaData();
         assertTrue("unexpected empty resultset", resultSet.next());
@@ -132,7 +131,7 @@ public class QueryServerBasicsIT extends 
BaseHBaseManagedTimeIT {
   public void smokeTest() throws Exception {
     final String tableName = name.getMethodName();
     try (final Connection connection = 
DriverManager.getConnection(CONN_STRING)) {
-      assertThat(connection.isClosed(), is(false));
+      assertFalse(connection.isClosed());
       connection.setAutoCommit(true);
       try (final Statement stmt = connection.createStatement()) {
         assertFalse(stmt.execute("DROP TABLE IF EXISTS " + tableName));
diff --git 
a/queryserver/src/it/java/org/apache/phoenix/end2end/QueryServerEnvironment.java
 
b/queryserver/src/it/java/org/apache/phoenix/end2end/QueryServerEnvironment.java
index 1d80d63..17c8856 100644
--- 
a/queryserver/src/it/java/org/apache/phoenix/end2end/QueryServerEnvironment.java
+++ 
b/queryserver/src/it/java/org/apache/phoenix/end2end/QueryServerEnvironment.java
@@ -27,8 +27,6 @@ import java.util.concurrent.Executors;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.io.FileUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -47,6 +45,8 @@ import org.apache.phoenix.queryserver.QueryServerProperties;
 import org.apache.phoenix.queryserver.server.QueryServer;
 import org.apache.phoenix.util.InstanceResolver;
 import org.apache.phoenix.util.ThinClientUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Maps;
@@ -62,7 +62,7 @@ import com.google.common.collect.Maps;
  * https://access.redhat.com/solutions/57330
  */
 public class QueryServerEnvironment {
-    private static final Log LOG = 
LogFactory.getLog(QueryServerEnvironment.class);
+    private static final Logger LOG = 
LoggerFactory.getLogger(QueryServerEnvironment.class);
 
     private final File TEMP_DIR = new File(getTempDir());
     private final File KEYTAB_DIR = new File(TEMP_DIR, "keytabs");
diff --git 
a/queryserver/src/it/java/org/apache/phoenix/end2end/SecureQueryServerIT.java 
b/queryserver/src/it/java/org/apache/phoenix/end2end/SecureQueryServerIT.java
index 515a339..12e2a0c 100644
--- 
a/queryserver/src/it/java/org/apache/phoenix/end2end/SecureQueryServerIT.java
+++ 
b/queryserver/src/it/java/org/apache/phoenix/end2end/SecureQueryServerIT.java
@@ -28,8 +28,6 @@ import java.sql.Statement;
 import java.util.Arrays;
 import java.util.Map.Entry;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
 import org.apache.hadoop.hbase.security.token.TokenProvider;
@@ -40,11 +38,13 @@ import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 @RunWith(Parameterized.class)
 @Category(NeedsOwnMiniClusterTest.class)
 public class SecureQueryServerIT {
-    private static final Log LOG = 
LogFactory.getLog(SecureQueryServerIT.class);
+    private static final Logger LOG = 
LoggerFactory.getLogger(SecureQueryServerIT.class);
     private static QueryServerEnvironment environment;
 
     @Parameters(name = "tls = {0}")
diff --git a/queryserver/src/it/java/org/apache/phoenix/end2end/TlsUtil.java 
b/queryserver/src/it/java/org/apache/phoenix/end2end/TlsUtil.java
index f210a46..de757b8 100644
--- a/queryserver/src/it/java/org/apache/phoenix/end2end/TlsUtil.java
+++ b/queryserver/src/it/java/org/apache/phoenix/end2end/TlsUtil.java
@@ -16,9 +16,9 @@ import java.security.cert.X509Certificate;
 import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.http.ssl.KeyStoreTestUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TlsUtil {
 
@@ -31,7 +31,7 @@ public class TlsUtil {
     protected static final File KEYSTORE = new File(TARGET_DIR, 
"avatica-test-ks.jks");
     protected static final File TRUSTSTORE = new File(TARGET_DIR, 
"avatica-test-ts.jks");
 
-    private static final Log LOG = 
LogFactory.getLog(QueryServerBasicsIT.class);
+    private static final Logger LOG = 
LoggerFactory.getLogger(QueryServerBasicsIT.class);
 
     public static File getTrustStoreFile() {
         return TRUSTSTORE;
diff --git 
a/queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
 
b/queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
index 0e755eb..6893dd0 100644
--- 
a/queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
+++ 
b/queryserver/src/main/java/org/apache/phoenix/queryserver/server/QueryServer.java
@@ -36,8 +36,6 @@ import 
org.apache.calcite.avatica.server.RemoteUserExtractionException;
 import org.apache.calcite.avatica.server.HttpRequestRemoteUserExtractor;
 import 
org.apache.calcite.avatica.server.HttpQueryStringParameterRemoteUserExtractor;
 import org.apache.calcite.avatica.server.ServerCustomizer;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.hbase.HBaseConfiguration;
@@ -56,6 +54,8 @@ import org.apache.phoenix.queryserver.QueryServerProperties;
 import org.apache.phoenix.queryserver.register.Registry;
 import org.apache.phoenix.util.InstanceResolver;
 import org.eclipse.jetty.server.Server;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
@@ -83,7 +83,7 @@ import javax.servlet.http.HttpServletRequest;
  */
 public final class QueryServer extends Configured implements Tool, Runnable {
 
-  protected static final Log LOG = LogFactory.getLog(QueryServer.class);
+  protected static final Logger LOG = 
LoggerFactory.getLogger(QueryServer.class);
 
   private final String[] argv;
   private final CountDownLatch runningLatch = new CountDownLatch(1);
@@ -266,7 +266,7 @@ public final class QueryServer extends Configured 
implements Tool, Runnable {
       server.join();
       return 0;
     } catch (Throwable t) {
-      LOG.fatal("Unrecoverable service error. Shutting down.", t);
+      LOG.error("Unrecoverable service error. Shutting down.", t);
       this.t = t;
       return -1;
     } finally {

Reply via email to