http://git-wip-us.apache.org/repos/asf/hadoop/blob/8c0e0313/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2DataReceiver.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2DataReceiver.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2DataReceiver.java
new file mode 100644
index 0000000..601f7c9
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2DataReceiver.java
@@ -0,0 +1,111 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.web.http2;
+
+import static org.junit.Assert.assertEquals;
+import io.netty.handler.codec.http.HttpResponseStatus;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ThreadLocalRandom;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+
+import org.eclipse.jetty.server.Request;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.handler.AbstractHandler;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.io.ByteStreams;
+import com.google.common.io.Files;
+
+public class TestHttp2DataReceiver extends AbstractTestHttp2Client {
+
+  private File largeFile = new File(".largeFile");
+
+  @Before
+  public void setUp() throws Exception {
+    byte[] b = new byte[64 * 1024];
+    try (FileOutputStream out = new FileOutputStream(largeFile)) {
+      for (int i = 0; i < 1024; i++) {
+        ThreadLocalRandom.current().nextBytes(b);
+        out.write(b);
+      }
+    }
+    largeFile.deleteOnExit();
+    start();
+  }
+
+  @After
+  public void tearDown() throws Exception {
+    stop();
+    largeFile.delete();
+  }
+
+  @Override
+  protected void setHandler(Server server) {
+    server.setHandler(new AbstractHandler() {
+
+      @Override
+      public void handle(String target, Request baseRequest,
+          HttpServletRequest request, HttpServletResponse response)
+          throws IOException, ServletException {
+        Files.copy(largeFile, response.getOutputStream());
+        response.getOutputStream().flush();
+      }
+    });
+  }
+
+  private void assertContentEquals(byte[] expected, byte[] actual, int length) 
{
+    for (int i = 0; i < length; i++) {
+      assertEquals("differ at index " + i + ", expected " + expected[i]
+          + ", actual " + actual[i], expected[i], actual[i]);
+    }
+  }
+
+  @Test
+  public void test() throws InterruptedException, ExecutionException,
+      IOException {
+    Http2StreamChannel stream = connect(true);
+    Http2DataReceiver receiver = 
stream.pipeline().get(Http2DataReceiver.class);
+    assertEquals(HttpResponseStatus.OK.codeAsText(), receiver.waitForResponse()
+        .status());
+    byte[] buf = new byte[4 * 1024];
+    byte[] fileBuf = new byte[buf.length];
+    try (InputStream in = receiver.content();
+        FileInputStream fileIn = new FileInputStream(largeFile)) {
+      for (;;) {
+        int read = in.read(buf);
+        if (read == -1) {
+          assertEquals(-1, fileIn.read());
+          break;
+        }
+        ByteStreams.readFully(fileIn, fileBuf, 0, read);
+        assertContentEquals(fileBuf, buf, read);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8c0e0313/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2Server.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2Server.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2Server.java
index 6a8495b..b6c197d 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2Server.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2Server.java
@@ -35,6 +35,7 @@ import java.nio.charset.StandardCharsets;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.hadoop.conf.Configuration;
 import org.eclipse.jetty.http.HttpFields;
 import org.eclipse.jetty.http.HttpHeader;
 import org.eclipse.jetty.http.HttpMethod;
@@ -92,7 +93,7 @@ public class TestHttp2Server extends AbstractTestHttp2Server {
                       throws Exception {
                     ch.pipeline().addLast(new HelloWorldHandler());
                   }
-                }));
+                }, new Configuration()));
           }
 
         }).bind(0).syncUninterruptibly().channel();
@@ -137,4 +138,4 @@ public class TestHttp2Server extends 
AbstractTestHttp2Server {
     Thread.sleep(1000);
     assertEquals(2, handlerClosedCount.get());
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8c0e0313/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2ServerMultiThread.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2ServerMultiThread.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2ServerMultiThread.java
index e583ca3..d2fdf0f 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2ServerMultiThread.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/http2/TestHttp2ServerMultiThread.java
@@ -44,6 +44,7 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import org.apache.hadoop.conf.Configuration;
 import org.eclipse.jetty.http.HttpFields;
 import org.eclipse.jetty.http.HttpHeader;
 import org.eclipse.jetty.http.HttpMethod;
@@ -83,8 +84,7 @@ public class TestHttp2ServerMultiThread extends 
AbstractTestHttp2Server {
     @Override
     protected void channelRead0(ChannelHandlerContext ctx, ByteBuf msg)
         throws Exception {
-      ByteBuf out = msg.readBytes(msg.readableBytes());
-      ctx.writeAndFlush(out);
+      ctx.writeAndFlush(msg.readBytes(msg.readableBytes()));
     }
 
     @Override
@@ -133,7 +133,7 @@ public class TestHttp2ServerMultiThread extends 
AbstractTestHttp2Server {
                       throws Exception {
                     ch.pipeline().addLast(new DispatchHandler());
                   }
-                }));
+                }, new Configuration()));
           }
 
         }).bind(0).syncUninterruptibly().channel();
@@ -203,5 +203,4 @@ public class TestHttp2ServerMultiThread extends 
AbstractTestHttp2Server {
     Thread.sleep(1000);
     assertEquals(requestCount, handlerClosedCount.get());
   }
-
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/8c0e0313/hadoop-project/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index e70acca..1c2cfad 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -465,6 +465,11 @@
         <version>2.5</version>
       </dependency>
       <dependency>
+        <groupId>javax.servlet</groupId>
+        <artifactId>javax.servlet-api</artifactId>
+        <version>3.1.0</version>
+      </dependency>
+      <dependency>
         <groupId>org.mortbay.jetty</groupId>
         <artifactId>jetty</artifactId>
         <version>${jetty.version}</version>
@@ -585,14 +590,19 @@
       <dependency>
         <groupId>io.netty</groupId>
         <artifactId>netty-all</artifactId>
-        <version>4.1.0.Beta5</version>
+        <version>4.1.0.Beta6</version>
       </dependency>
 
       <dependency>
         <groupId>org.eclipse.jetty.http2</groupId>
         <artifactId>http2-client</artifactId>
         <version>9.3.0.M2</version>
-        <scope>test</scope>
+      </dependency>
+
+      <dependency>
+        <groupId>org.eclipse.jetty.http2</groupId>
+        <artifactId>http2-server</artifactId>
+        <version>9.3.0.M2</version>
       </dependency>
 
       <dependency>

Reply via email to