Author: cutting
Date: Wed Sep 30 21:23:05 2009
New Revision: 820456
URL: http://svn.apache.org/viewvc?rev=820456&view=rev
Log:
AVRO-24. Add a simple bulk-data benchmark.
Added:
hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java
hadoop/avro/trunk/src/test/schemata/BulkData.avpr
Removed:
hadoop/avro/trunk/src/test/java/org/apache/avro/TestFsData.java
hadoop/avro/trunk/src/test/schemata/FSData.avpr
Modified:
hadoop/avro/trunk/CHANGES.txt
hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java
Modified: hadoop/avro/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=820456&r1=820455&r2=820456&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Wed Sep 30 21:23:05 2009
@@ -35,6 +35,8 @@
AVRO-129. Add HTTP-based RPC client and server. (cutting)
+ AVRO-24. Add a simple bulk-data benchmark. (cutting)
+
IMPROVEMENTS
AVRO-99. Use Boost framework for C++ unit tests.
Modified: hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java?rev=820456&r1=820455&r2=820456&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/ipc/HttpTransceiver.java Wed Sep
30 21:23:05 2009
@@ -51,7 +51,7 @@
connection.setRequestProperty("Content-Length",
Integer.toString(getLength(request)));
connection.setDoOutput(true);
- LOG.info("Connecting to: "+url);
+ //LOG.info("Connecting to: "+url);
return super.transceive(request);
}
Added: hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java?rev=820456&view=auto
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java (added)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestBulkData.java Wed Sep
30 21:23:05 2009
@@ -0,0 +1,110 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.avro;
+
+import org.apache.avro.specific.SpecificRequestor;
+import org.apache.avro.specific.SpecificResponder;
+import org.apache.avro.ipc.*;
+import org.apache.avro.util.Utf8;
+import org.junit.*;
+
+import java.io.IOException;
+import java.net.URL;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import org.apache.avro.test.BulkData;
+
+public class TestBulkData {
+ private static final long COUNT =
+ Integer.parseInt(System.getProperty("test.count", "10"));
+ private static final int SIZE =
+ Integer.parseInt(System.getProperty("test.size", "65536"));
+
+ private static final ByteBuffer DATA = ByteBuffer.allocate(SIZE);
+ {
+ Random rand = new Random();
+ DATA.limit(DATA.capacity());
+ DATA.position(0);
+ rand.nextBytes(DATA.array());
+ }
+
+ public static class BulkDataImpl implements BulkData {
+ public ByteBuffer read() { return DATA.duplicate(); }
+ public Void write(ByteBuffer data) {
+ Assert.assertEquals(SIZE, data.remaining());
+ return null;
+ }
+ }
+
+ private static Server server;
+ private static Transceiver client;
+ private static Requestor requestor;
+ private static BulkData proxy;
+
+ @Before
+ public void startServer() throws Exception {
+ server =
+ new HttpServer(new SpecificResponder(BulkData.class, new BulkDataImpl()),
+ 0);
+ client =
+ new HttpTransceiver(new URL("http://127.0.0.1:"+server.getPort()+"/"));
+ proxy = (BulkData)SpecificRequestor.getClient(BulkData.class, client);
+ }
+
+ @Test
+ public void testRead() throws IOException {
+ for (int i = 0; i < COUNT; i++)
+ Assert.assertEquals(SIZE, proxy.read().remaining());
+ }
+
+ @Test
+ public void testWrite() throws IOException {
+ for (int i = 0; i < COUNT; i++)
+ proxy.write(DATA.duplicate());
+ }
+
+ @After
+ public void stopServer() throws Exception {
+ server.close();
+ }
+
+ public static void main(String[] args) throws Exception {
+ TestBulkData test = new TestBulkData();
+ test.startServer();
+ System.out.println("READ");
+ long start = System.currentTimeMillis();
+ test.testRead();
+ printStats(start);
+ System.out.println("WRITE");
+ start = System.currentTimeMillis();
+ test.testWrite();
+ printStats(start);
+ test.stopServer();
+ }
+
+ private static void printStats(long start) {
+ double seconds = (System.currentTimeMillis()-start)/1000.0;
+ System.out.println("seconds = "+(int)seconds);
+ System.out.println("requests/second = "+(int)(COUNT/seconds));
+ double megabytes = (COUNT*SIZE)/(1024*1024.0);
+ System.out.println("MB = "+(int)megabytes);
+ System.out.println("MB/second = "+ (int)(megabytes/seconds));
+ }
+
+}
Added: hadoop/avro/trunk/src/test/schemata/BulkData.avpr
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/schemata/BulkData.avpr?rev=820456&view=auto
==============================================================================
--- hadoop/avro/trunk/src/test/schemata/BulkData.avpr (added)
+++ hadoop/avro/trunk/src/test/schemata/BulkData.avpr Wed Sep 30 21:23:05 2009
@@ -0,0 +1,21 @@
+
+{"namespace": "org.apache.avro.test",
+ "protocol": "BulkData",
+
+ "types": [],
+
+ "messages": {
+
+ "read": {
+ "request": [],
+ "response": "bytes"
+ },
+
+ "write": {
+ "request": [ {"name": "data", "type": "bytes"} ],
+ "response": "null"
+ }
+
+ }
+
+}