HBASE-16267 Revert due to failure of TestLogLevel

Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/4e08a8be
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/4e08a8be
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/4e08a8be

Branch: refs/heads/hbase-12439
Commit: 4e08a8bec9ef807134392ed05d7f0fd6eb0b1f38
Parents: 977858c
Author: tedyu <yuzhih...@gmail.com>
Authored: Sat Aug 6 09:27:54 2016 -0700
Committer: tedyu <yuzhih...@gmail.com>
Committed: Sat Aug 6 09:27:54 2016 -0700

----------------------------------------------------------------------
 hbase-rest/pom.xml                              |   4 -
 .../hadoop/hbase/rest/MultiRowResource.java     |   2 +-
 .../apache/hadoop/hbase/rest/client/Client.java | 210 +++++++------------
 .../hadoop/hbase/rest/client/Response.java      |  21 +-
 .../hbase/rest/TestGetAndPutResource.java       |   4 +-
 .../hadoop/hbase/rest/TestGzipFilter.java       |  21 +-
 .../hadoop/hbase/rest/TestMultiRowResource.java |   5 +-
 .../hadoop/hbase/rest/TestScannerResource.java  |   2 +-
 .../hadoop/hbase/rest/TestSchemaResource.java   |   5 +-
 .../hbase/rest/client/TestRemoteTable.java      |  11 +-
 pom.xml                                         |  18 +-
 11 files changed, 107 insertions(+), 196 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/pom.xml
----------------------------------------------------------------------
diff --git a/hbase-rest/pom.xml b/hbase-rest/pom.xml
index 4d42a7a..2e1b245 100644
--- a/hbase-rest/pom.xml
+++ b/hbase-rest/pom.xml
@@ -279,10 +279,6 @@
       <artifactId>httpclient</artifactId>
     </dependency>
     <dependency>
-      <groupId>org.apache.httpcomponents</groupId>
-      <artifactId>httpcore</artifactId>
-    </dependency>
-    <dependency>
       <groupId>commons-lang</groupId>
       <artifactId>commons-lang</artifactId>
     </dependency>

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
index b952c00..9cafe27 100644
--- 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
+++ 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/MultiRowResource.java
@@ -115,7 +115,7 @@ public class MultiRowResource extends ResourceBase 
implements Constants {
         servlet.getMetrics().incrementSucessfulGetRequests(1);
         return Response.ok(model).build();
       }
-    } catch (IOException e) {
+    } catch (Exception e) {
       servlet.getMetrics().incrementFailedGetRequests(1);
       return processException(e);
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
index 204f688..e26de63 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Client.java
@@ -19,34 +19,30 @@
 
 package org.apache.hadoop.hbase.rest.client;
 
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
-import java.net.URI;
-import java.net.URISyntaxException;
 import java.util.Collections;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
 
+import org.apache.commons.httpclient.Header;
+import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.HttpMethod;
+import org.apache.commons.httpclient.HttpVersion;
+import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
+import org.apache.commons.httpclient.URI;
+import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
+import org.apache.commons.httpclient.methods.DeleteMethod;
+import org.apache.commons.httpclient.methods.GetMethod;
+import org.apache.commons.httpclient.methods.HeadMethod;
+import org.apache.commons.httpclient.methods.PostMethod;
+import org.apache.commons.httpclient.methods.PutMethod;
+import org.apache.commons.httpclient.params.HttpClientParams;
+import org.apache.commons.httpclient.params.HttpConnectionManagerParams;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.apache.http.Header;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.methods.HttpDelete;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpHead;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpPut;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.entity.InputStreamEntity;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.message.BasicHeader;
-import org.apache.http.params.CoreConnectionPNames;
-import org.apache.http.util.EntityUtils;
 
 /**
  * A wrapper around HttpClient which provides some useful function and
@@ -62,8 +58,6 @@ public class Client {
   private HttpClient httpClient;
   private Cluster cluster;
   private boolean sslEnabled;
-  private HttpResponse resp;
-  private HttpGet httpGet = null;
 
   private Map<String, String> extraHeaders;
 
@@ -77,13 +71,18 @@ public class Client {
   private void initialize(Cluster cluster, boolean sslEnabled) {
     this.cluster = cluster;
     this.sslEnabled = sslEnabled;
+    MultiThreadedHttpConnectionManager manager =
+      new MultiThreadedHttpConnectionManager();
+    HttpConnectionManagerParams managerParams = manager.getParams();
+    managerParams.setConnectionTimeout(2000); // 2 s
+    managerParams.setDefaultMaxConnectionsPerHost(10);
+    managerParams.setMaxTotalConnections(100);
     extraHeaders = new ConcurrentHashMap<String, String>();
-    String clspath = System.getProperty("java.class.path");
-    LOG.debug("classpath " + clspath);
-    this.httpClient = new DefaultHttpClient();
-    
this.httpClient.getParams().setIntParameter(CoreConnectionPNames.CONNECTION_TIMEOUT,
 2000);
-  }
+    this.httpClient = new HttpClient(manager);
+    HttpClientParams clientParams = httpClient.getParams();
+    clientParams.setVersion(HttpVersion.HTTP_1_1);
 
+  }
   /**
    * Constructor
    * @param cluster the cluster definition
@@ -105,6 +104,9 @@ public class Client {
    * Shut down the client. Close any open persistent connections.
    */
   public void shutdown() {
+    MultiThreadedHttpConnectionManager manager =
+      (MultiThreadedHttpConnectionManager) 
httpClient.getHttpConnectionManager();
+    manager.shutdown();
   }
 
   /**
@@ -157,7 +159,7 @@ public class Client {
    * @return the HTTP response code
    * @throws IOException
    */
-  public HttpResponse executePathOnly(Cluster cluster, HttpUriRequest method,
+  public int executePathOnly(Cluster cluster, HttpMethod method,
       Header[] headers, String path) throws IOException {
     IOException lastException;
     if (cluster.nodes.size() < 1) {
@@ -176,29 +178,10 @@ public class Client {
         }
         sb.append(cluster.lastHost);
         sb.append(path);
-        URI uri = new URI(sb.toString());
-        if (method instanceof HttpPut) {
-          HttpPut put = new HttpPut(uri);
-          put.setEntity(((HttpPut) method).getEntity());
-          put.setHeaders(method.getAllHeaders());
-          method = put;
-        } else if (method instanceof HttpGet) {
-          method = new HttpGet(uri);
-        } else if (method instanceof HttpHead) {
-          method = new HttpHead(uri);
-        } else if (method instanceof HttpDelete) {
-          method = new HttpDelete(uri);
-        } else if (method instanceof HttpPost) {
-          HttpPost post = new HttpPost(uri);
-          post.setEntity(((HttpPost) method).getEntity());
-          post.setHeaders(method.getAllHeaders());
-          method = post;
-        }
+        URI uri = new URI(sb.toString(), true);
         return executeURI(method, headers, uri.toString());
       } catch (IOException e) {
         lastException = e;
-      } catch (URISyntaxException use) {
-        lastException = new IOException(use);
       }
     } while (++i != start && i < cluster.nodes.size());
     throw lastException;
@@ -212,27 +195,25 @@ public class Client {
    * @return the HTTP response code
    * @throws IOException
    */
-  public HttpResponse executeURI(HttpUriRequest method, Header[] headers, 
String uri)
+  public int executeURI(HttpMethod method, Header[] headers, String uri)
       throws IOException {
-    // method.setURI(new URI(uri, true));
+    method.setURI(new URI(uri, true));
     for (Map.Entry<String, String> e: extraHeaders.entrySet()) {
-      method.addHeader(e.getKey(), e.getValue());
+      method.addRequestHeader(e.getKey(), e.getValue());
     }
     if (headers != null) {
       for (Header header: headers) {
-        method.addHeader(header);
+        method.addRequestHeader(header);
       }
     }
     long startTime = System.currentTimeMillis();
-    if (resp != null) EntityUtils.consumeQuietly(resp.getEntity());
-    resp = httpClient.execute(method);
-
+    int code = httpClient.executeMethod(method);
     long endTime = System.currentTimeMillis();
     if (LOG.isTraceEnabled()) {
-      LOG.trace(method.getMethod() + " " + uri + " " + 
resp.getStatusLine().getStatusCode() + " " +
-          resp.getStatusLine().getReasonPhrase() + " in " + (endTime - 
startTime) + " ms");
+      LOG.trace(method.getName() + " " + uri + " " + code + " " +
+        method.getStatusText() + " in " + (endTime - startTime) + " ms");
     }
-    return resp;
+    return code;
   }
 
   /**
@@ -246,7 +227,7 @@ public class Client {
    * @return the HTTP response code
    * @throws IOException
    */
-  public HttpResponse execute(Cluster cluster, HttpUriRequest method, Header[] 
headers,
+  public int execute(Cluster cluster, HttpMethod method, Header[] headers,
       String path) throws IOException {
     if (path.startsWith("/")) {
       return executePathOnly(cluster, method, headers, path);
@@ -288,10 +269,11 @@ public class Client {
    */
   public Response head(Cluster cluster, String path, Header[] headers)
       throws IOException {
-    HttpHead method = new HttpHead(path);
+    HeadMethod method = new HeadMethod();
     try {
-      HttpResponse resp = execute(cluster, method, null, path);
-      return new Response(resp.getStatusLine().getStatusCode(), 
resp.getAllHeaders(), null);
+      int code = execute(cluster, method, null, path);
+      headers = method.getResponseHeaders();
+      return new Response(code, headers, null);
     } finally {
       method.releaseConnection();
     }
@@ -340,7 +322,7 @@ public class Client {
   public Response get(Cluster cluster, String path, String accept)
       throws IOException {
     Header[] headers = new Header[1];
-    headers[0] = new BasicHeader("Accept", accept);
+    headers[0] = new Header("Accept", accept);
     return get(cluster, path, headers);
   }
 
@@ -357,45 +339,6 @@ public class Client {
   }
 
   /**
-   * Returns the response body of the HTTPResponse, if any, as an array of 
bytes.
-   * If response body is not available or cannot be read, returns <tt>null</tt>
-   *
-   * Note: This will cause the entire response body to be buffered in memory. A
-   * malicious server may easily exhaust all the VM memory. It is strongly
-   * recommended, to use getResponseAsStream if the content length of the 
response
-   * is unknown or reasonably large.
-   *
-   * @param resp HttpResponse
-   * @return The response body, null if body is empty
-   * @throws IOException If an I/O (transport) problem occurs while obtaining 
the
-   * response body.
-   */
-  @edu.umd.cs.findbugs.annotations.SuppressWarnings(value =
-      "NP_LOAD_OF_KNOWN_NULL_VALUE", justification = "null is possible return 
value")
-  public static byte[] getResponseBody(HttpResponse resp) throws IOException {
-    if (resp.getEntity() == null) return null;
-    try (InputStream instream = resp.getEntity().getContent()) {
-      if (instream != null) {
-        long contentLength = resp.getEntity().getContentLength();
-        if (contentLength > Integer.MAX_VALUE) {
-          //guard integer cast from overflow
-          throw new IOException("Content too large to be buffered: " + 
contentLength +" bytes");
-        }
-        ByteArrayOutputStream outstream = new ByteArrayOutputStream(
-            contentLength > 0 ? (int) contentLength : 4*1024);
-        byte[] buffer = new byte[4096];
-        int len;
-        while ((len = instream.read(buffer)) > 0) {
-          outstream.write(buffer, 0, len);
-        }
-        outstream.close();
-        return outstream.toByteArray();
-      }
-      return null;
-    }
-  }
-
-  /**
    * Send a GET request
    * @param c the cluster definition
    * @param path the path or URI
@@ -405,13 +348,16 @@ public class Client {
    */
   public Response get(Cluster c, String path, Header[] headers)
       throws IOException {
-    if (httpGet != null) {
-      httpGet.releaseConnection();
+    GetMethod method = new GetMethod();
+    try {
+      int code = execute(c, method, headers, path);
+      headers = method.getResponseHeaders();
+      byte[] body = method.getResponseBody();
+      InputStream in = method.getResponseBodyAsStream();
+      return new Response(code, headers, body, in);
+    } finally {
+      method.releaseConnection();
     }
-    httpGet = new HttpGet(path);
-    HttpResponse resp = execute(c, httpGet, headers, path);
-    return new Response(resp.getStatusLine().getStatusCode(), 
resp.getAllHeaders(),
-        resp, resp.getEntity() == null ? null : resp.getEntity().getContent());
   }
 
   /**
@@ -453,7 +399,7 @@ public class Client {
   public Response put(Cluster cluster, String path, String contentType,
       byte[] content) throws IOException {
     Header[] headers = new Header[1];
-    headers[0] = new BasicHeader("Content-Type", contentType);
+    headers[0] = new Header("Content-Type", contentType);
     return put(cluster, path, headers, content);
   }
 
@@ -471,7 +417,7 @@ public class Client {
       byte[] content, Header extraHdr) throws IOException {
     int cnt = extraHdr == null ? 1 : 2;
     Header[] headers = new Header[cnt];
-    headers[0] = new BasicHeader("Content-Type", contentType);
+    headers[0] = new Header("Content-Type", contentType);
     if (extraHdr != null) {
       headers[1] = extraHdr;
     }
@@ -504,13 +450,13 @@ public class Client {
    */
   public Response put(Cluster cluster, String path, Header[] headers,
       byte[] content) throws IOException {
-    HttpPut method = new HttpPut(path);
+    PutMethod method = new PutMethod();
     try {
-      method.setEntity(new InputStreamEntity(new 
ByteArrayInputStream(content), content.length));
-      HttpResponse resp = execute(cluster, method, headers, path);
-      headers = resp.getAllHeaders();
-      content = getResponseBody(resp);
-      return new Response(resp.getStatusLine().getStatusCode(), headers, 
content);
+      method.setRequestEntity(new ByteArrayRequestEntity(content));
+      int code = execute(cluster, method, headers, path);
+      headers = method.getResponseHeaders();
+      content = method.getResponseBody();
+      return new Response(code, headers, content);
     } finally {
       method.releaseConnection();
     }
@@ -555,7 +501,7 @@ public class Client {
   public Response post(Cluster cluster, String path, String contentType,
       byte[] content) throws IOException {
     Header[] headers = new Header[1];
-    headers[0] = new BasicHeader("Content-Type", contentType);
+    headers[0] = new Header("Content-Type", contentType);
     return post(cluster, path, headers, content);
   }
 
@@ -573,7 +519,7 @@ public class Client {
       byte[] content, Header extraHdr) throws IOException {
     int cnt = extraHdr == null ? 1 : 2;
     Header[] headers = new Header[cnt];
-    headers[0] = new BasicHeader("Content-Type", contentType);
+    headers[0] = new Header("Content-Type", contentType);
     if (extraHdr != null) {
       headers[1] = extraHdr;
     }
@@ -606,13 +552,13 @@ public class Client {
    */
   public Response post(Cluster cluster, String path, Header[] headers,
       byte[] content) throws IOException {
-    HttpPost method = new HttpPost(path);
+    PostMethod method = new PostMethod();
     try {
-      method.setEntity(new InputStreamEntity(new 
ByteArrayInputStream(content), content.length));
-      HttpResponse resp = execute(cluster, method, headers, path);
-      headers = resp.getAllHeaders();
-      content = getResponseBody(resp);
-      return new Response(resp.getStatusLine().getStatusCode(), headers, 
content);
+      method.setRequestEntity(new ByteArrayRequestEntity(content));
+      int code = execute(cluster, method, headers, path);
+      headers = method.getResponseHeaders();
+      content = method.getResponseBody();
+      return new Response(code, headers, content);
     } finally {
       method.releaseConnection();
     }
@@ -647,12 +593,12 @@ public class Client {
    * @throws IOException for error
    */
   public Response delete(Cluster cluster, String path) throws IOException {
-    HttpDelete method = new HttpDelete(path);
+    DeleteMethod method = new DeleteMethod();
     try {
-      HttpResponse resp = execute(cluster, method, null, path);
-      Header[] headers = resp.getAllHeaders();
-      byte[] content = getResponseBody(resp);
-      return new Response(resp.getStatusLine().getStatusCode(), headers, 
content);
+      int code = execute(cluster, method, null, path);
+      Header[] headers = method.getResponseHeaders();
+      byte[] content = method.getResponseBody();
+      return new Response(code, headers, content);
     } finally {
       method.releaseConnection();
     }
@@ -666,13 +612,13 @@ public class Client {
    * @throws IOException for error
    */
   public Response delete(Cluster cluster, String path, Header extraHdr) throws 
IOException {
-    HttpDelete method = new HttpDelete(path);
+    DeleteMethod method = new DeleteMethod();
     try {
       Header[] headers = { extraHdr };
-      HttpResponse resp = execute(cluster, method, headers, path);
-      headers = resp.getAllHeaders();
-      byte[] content = getResponseBody(resp);
-      return new Response(resp.getStatusLine().getStatusCode(), headers, 
content);
+      int code = execute(cluster, method, headers, path);
+      headers = method.getResponseHeaders();
+      byte[] content = method.getResponseBody();
+      return new Response(code, headers, content);
     } finally {
       method.releaseConnection();
     }

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java 
b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
index 27db365..871b646 100644
--- a/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
+++ b/hbase-rest/src/main/java/org/apache/hadoop/hbase/rest/client/Response.java
@@ -19,14 +19,11 @@
 
 package org.apache.hadoop.hbase.rest.client;
 
-import java.io.IOException;
 import java.io.InputStream;
 
-import org.apache.http.Header;
-import org.apache.http.HttpResponse;
+import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
-import org.mortbay.log.Log;
 
 /**
  * The HTTP result code, response headers, and body of a HTTP response.
@@ -37,7 +34,6 @@ public class Response {
   private int code;
   private Header[] headers;
   private byte[] body;
-  private HttpResponse resp;
   private InputStream stream;
 
   /**
@@ -73,15 +69,13 @@ public class Response {
    * Constructor
    * @param code the HTTP response code
    * @param headers headers the HTTP response headers
-   * @param resp the response
+   * @param body the response body, can be null
    * @param in Inputstream if the response had one.
-   * Note: this is not thread-safe
    */
-  public Response(int code, Header[] headers, HttpResponse resp, InputStream 
in) {
+  public Response(int code, Header[] headers, byte[] body, InputStream in) {
     this.code = code;
     this.headers = headers;
-    this.body = null;
-    this.resp = resp;
+    this.body = body;
     this.stream = in;
   }
 
@@ -135,13 +129,6 @@ public class Response {
    * @return the HTTP response body
    */
   public byte[] getBody() {
-    if (body == null) {
-      try {
-        body = Client.getResponseBody(resp);
-      } catch (IOException ioe) {
-        Log.debug("encountered ioe when obtaining body", ioe);
-      }
-    }
     return body;
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java
index d6eb1b3..c6fb2ff 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGetAndPutResource.java
@@ -24,12 +24,14 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.StringWriter;
 import java.net.URLEncoder;
+import java.util.Dictionary;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import javax.xml.bind.JAXBException;
 
-import org.apache.http.Header;
+import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.hbase.CompatibilityFactory;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.rest.client.Response;

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
index 5097454..42d355d 100644
--- a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
+++ b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestGzipFilter.java
@@ -27,8 +27,7 @@ import java.io.ByteArrayOutputStream;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
 
-import org.apache.http.Header;
-import org.apache.http.message.BasicHeader;
+import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HTableDescriptor;
@@ -96,8 +95,8 @@ public class TestGzipFilter {
     // input side filter
 
     Header[] headers = new Header[2];
-    headers[0] = new BasicHeader("Content-Type", Constants.MIMETYPE_BINARY);
-    headers[1] = new BasicHeader("Content-Encoding", "gzip");
+    headers[0] = new Header("Content-Type", Constants.MIMETYPE_BINARY);
+    headers[1] = new Header("Content-Encoding", "gzip");
     Response response = client.put(path, headers, value_1_gzip);
     assertEquals(response.getCode(), 200);
 
@@ -111,8 +110,8 @@ public class TestGzipFilter {
 
     // output side filter
 
-    headers[0] = new BasicHeader("Accept", Constants.MIMETYPE_BINARY);
-    headers[1] = new BasicHeader("Accept-Encoding", "gzip");
+    headers[0] = new Header("Accept", Constants.MIMETYPE_BINARY);
+    headers[1] = new Header("Accept-Encoding", "gzip");
     response = client.get(path, headers);
     assertEquals(response.getCode(), 200);
     ByteArrayInputStream bis = new ByteArrayInputStream(response.getBody());
@@ -129,8 +128,8 @@ public class TestGzipFilter {
   @Test
   public void testErrorNotGzipped() throws Exception {
     Header[] headers = new Header[2];
-    headers[0] = new BasicHeader("Accept", Constants.MIMETYPE_BINARY);
-    headers[1] = new BasicHeader("Accept-Encoding", "gzip");
+    headers[0] = new Header("Accept", Constants.MIMETYPE_BINARY);
+    headers[1] = new Header("Accept-Encoding", "gzip");
     Response response = client.get("/" + TABLE + "/" + ROW_1 + "/" + COLUMN_2, 
headers);
     assertEquals(response.getCode(), 404);
     String contentEncoding = response.getHeader("Content-Encoding");
@@ -143,9 +142,9 @@ public class TestGzipFilter {
 
   void testScannerResultCodes() throws Exception {
     Header[] headers = new Header[3];
-    headers[0] = new BasicHeader("Content-Type", Constants.MIMETYPE_XML);
-    headers[1] = new BasicHeader("Accept", Constants.MIMETYPE_JSON);
-    headers[2] = new BasicHeader("Accept-Encoding", "gzip");
+    headers[0] = new Header("Content-Type", Constants.MIMETYPE_XML);
+    headers[1] = new Header("Accept", Constants.MIMETYPE_JSON);
+    headers[2] = new Header("Accept-Encoding", "gzip");
     Response response = client.post("/" + TABLE + "/scanner", headers,
         "<Scanner/>".getBytes());
     assertEquals(response.getCode(), 201);

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
index 84d1855..958cb15 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestMultiRowResource.java
@@ -18,8 +18,7 @@
  */
 package org.apache.hadoop.hbase.rest;
 
-import org.apache.http.Header;
-import org.apache.http.message.BasicHeader;
+import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.*;
 import org.apache.hadoop.hbase.client.Admin;
@@ -97,7 +96,7 @@ public class TestMultiRowResource {
   public static void setUpBeforeClass() throws Exception {
     conf = TEST_UTIL.getConfiguration();
     conf.setBoolean(RESTServer.REST_CSRF_ENABLED_KEY, csrfEnabled);
-    extraHdr = new BasicHeader(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
+    extraHdr = new Header(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
     TEST_UTIL.startMiniCluster();
     REST_TEST_UTIL.startServletContainer(conf);
     context = JAXBContext.newInstance(

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
index be7ee9a..5114b11 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java
@@ -37,7 +37,7 @@ import javax.xml.bind.JAXBException;
 import javax.xml.bind.Marshaller;
 import javax.xml.bind.Unmarshaller;
 
-import org.apache.http.Header;
+import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
 import org.apache.hadoop.hbase.HColumnDescriptor;

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
index 19fdaf0..d005445 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/TestSchemaResource.java
@@ -28,8 +28,7 @@ import java.util.List;
 import javax.xml.bind.JAXBContext;
 import javax.xml.bind.JAXBException;
 
-import org.apache.http.Header;
-import org.apache.http.message.BasicHeader;
+import org.apache.commons.httpclient.Header;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -87,7 +86,7 @@ public class TestSchemaResource {
   public static void setUpBeforeClass() throws Exception {
     conf = TEST_UTIL.getConfiguration();
     conf.setBoolean(RESTServer.REST_CSRF_ENABLED_KEY, csrfEnabled);
-    extraHdr = new BasicHeader(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
+    extraHdr = new Header(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
     TEST_UTIL.startMiniCluster();
     REST_TEST_UTIL.startServletContainer(conf);
     client = new Client(new Cluster().add("localhost",

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
----------------------------------------------------------------------
diff --git 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
index 1ac37fa..19d0587 100644
--- 
a/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
+++ 
b/hbase-rest/src/test/java/org/apache/hadoop/hbase/rest/client/TestRemoteTable.java
@@ -30,8 +30,7 @@ import java.util.Collections;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.http.Header;
-import org.apache.http.message.BasicHeader;
+import org.apache.commons.httpclient.Header;
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.HBaseTestingUtility;
@@ -514,16 +513,16 @@ public class TestRemoteTable {
     Response response = new Response(200);
     assertEquals(200, response.getCode());
     Header[] headers = new Header[2];
-    headers[0] = new BasicHeader("header1", "value1");
-    headers[1] = new BasicHeader("header2", "value2");
+    headers[0] = new Header("header1", "value1");
+    headers[1] = new Header("header2", "value2");
     response = new Response(200, headers);
     assertEquals("value1", response.getHeader("header1"));
     assertFalse(response.hasBody());
     response.setCode(404);
     assertEquals(404, response.getCode());
     headers = new Header[2];
-    headers[0] = new BasicHeader("header1", "value1.1");
-    headers[1] = new BasicHeader("header2", "value2");
+    headers[0] = new Header("header1", "value1.1");
+    headers[1] = new Header("header2", "value2");
     response.setHeaders(headers);
     assertEquals("value1.1", response.getHeader("header1"));
     response.setBody(Bytes.toBytes("body"));

http://git-wip-us.apache.org/repos/asf/hbase/blob/4e08a8be/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 71b77bb..6051ba5 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1275,7 +1275,7 @@
     <disruptor.version>3.3.0</disruptor.version>
     <!-- Do not use versions earlier than 3.2.2 due to a security 
vulnerability -->
     <collections.version>3.2.2</collections.version>
-    <httpclient.version>4.5.2</httpclient.version>
+    <httpclient.version>4.3.6</httpclient.version>
     <httpcore.version>4.4.4</httpcore.version>
     <metrics-core.version>3.1.2</metrics-core.version>
     <guava.version>12.0.1</guava.version>
@@ -2228,10 +2228,6 @@
             <artifactId>hadoop-common</artifactId>
             <version>${hadoop-two.version}</version>
             <exclusions>
-            <exclusion>
-              <groupId>commons-httpclient</groupId>
-              <artifactId>commons-httpclient</artifactId>
-            </exclusion>
               <exclusion>
                 <groupId>javax.servlet.jsp</groupId>
                 <artifactId>jsp-api</artifactId>
@@ -2266,10 +2262,6 @@
             <artifactId>hadoop-minicluster</artifactId>
             <version>${hadoop-two.version}</version>
             <exclusions>
-            <exclusion>
-              <groupId>commons-httpclient</groupId>
-              <artifactId>commons-httpclient</artifactId>
-            </exclusion>
               <exclusion>
                 <groupId>javax.servlet.jsp</groupId>
                 <artifactId>jsp-api</artifactId>
@@ -2418,10 +2410,6 @@
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop-three.version}</version>
            <exclusions>
-            <exclusion>
-              <groupId>commons-httpclient</groupId>
-              <artifactId>commons-httpclient</artifactId>
-            </exclusion>
              <exclusion>
                <groupId>javax.servlet.jsp</groupId>
                <artifactId>jsp-api</artifactId>
@@ -2461,10 +2449,6 @@
            <artifactId>hadoop-minicluster</artifactId>
            <version>${hadoop-three.version}</version>
            <exclusions>
-            <exclusion>
-              <groupId>commons-httpclient</groupId>
-              <artifactId>commons-httpclient</artifactId>
-            </exclusion>
              <exclusion>
                <groupId>javax.servlet.jsp</groupId>
                <artifactId>jsp-api</artifactId>

Reply via email to