Author: cutting
Date: Tue Jul 28 21:54:34 2009
New Revision: 798716
URL: http://svn.apache.org/viewvc?rev=798716&view=rev
Log:
AVRO-81. Switch back from TestNG to JUnit. Contributed by Konstantin Boudnik.
Added:
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java
Removed:
hadoop/avro/trunk/src/test/java/org/apache/avro/test/
Modified:
hadoop/avro/trunk/CHANGES.txt
hadoop/avro/trunk/build.xml
hadoop/avro/trunk/ivy.xml
hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java
hadoop/avro/trunk/src/test/java/org/apache/avro/TestFsData.java
hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java
hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java
hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java
hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java
hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java
Modified: hadoop/avro/trunk/CHANGES.txt
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Tue Jul 28 21:54:34 2009
@@ -22,6 +22,9 @@
AVRO-82. Add checkstyle to java compilation. (Thiruvalluvan
M. G. via cutting)
+ AVRO-81. Switch back from TestNG to JUnit. (Konstantin Boudnik via
+ cutting)
+
OPTIMIZATIONS
BUG FIXES
Modified: hadoop/avro/trunk/build.xml
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/build.xml?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/build.xml (original)
+++ hadoop/avro/trunk/build.xml Tue Jul 28 21:54:34 2009
@@ -52,7 +52,6 @@
value="${nonspace.os}-${os.arch}-${sun.arch.data.model}"/>
<property name="test.count" value="100"/>
- <property name="testcase" value="Test*"/>
<property name="test.java.src.dir" value="${basedir}/src/test/java"/>
<property name="test.schemata.dir" value="${basedir}/src/test/schemata"/>
<property name="test.java.build.dir" value="${build.dir}/test"/>
@@ -215,7 +214,6 @@
<java-compiler src="${test.java.src.dir}"
dest="${test.java.classes}"
classpath="test.java.classpath"/>
- <taskdef resource="testngtasks" classpathref="test.java.classpath"/>
</target>
<macrodef name="java-avro-compiler">
@@ -260,24 +258,35 @@
<target name="test" depends="test-java,test-py,test-c,test-interop"/>
- <target name="test-java" depends="compile-test-java,checkstyle-java"
- description="Run java unit tests with TestNG">
-
- <testng classpathref="test.java.classpath"
- sourcedir="${test.java.src.dir}"
- outputdir="${build.dir}/test-output"
- suitename="AvroTestNG"
- listeners="org.apache.avro.test.TestOutputInterceptor,
org.apache.avro.test.TestSuiteInterceptor"
- haltOnfailure="true">
- <sysproperty key="test.count" value="${test.count}"/>
- <sysproperty key="test.dir" value="${test.java.build.dir}"/>
- <sysproperty key="test.validate" value="${test.validate}"/>
- <sysproperty key="testcase" value="${testcase}"/>
-
- <classpath refid="test.java.classpath"/>
+ <macrodef name="test-runner">
+ <attribute name="files.location" />
+ <attribute name="tests.pattern" />
+ <sequential>
+ <junit showoutput="yes"
+ printsummary="withOutAndErr"
+ haltonfailure="no"
+ errorProperty="tests.failed" failureProperty="tests.failed">
+ <sysproperty key="test.count" value="${test.count}"/>
+ <sysproperty key="test.dir" value="${test.java.build.dir}"/>
+ <sysproperty key="test.validate" value="${test.validate}"/>
+ <classpath refid="test.java.classpath"/>
+ <formatter type="plain" />
+ <batchtest todir="${test.java.build.dir}" unless="testcase">
+ <fileset dir="@{files.location}"
+ includes="@{tests.pattern}"
+ excludes="**/${test.java.exclude}.java" />
+ </batchtest>
+ <batchtest todir="${test.java.build.dir}" if="testcase">
+ <fileset dir="@{files.location}" includes="**/${testcase}.java"/>
+ </batchtest>
+ </junit>
+ <antcall target="checkfailure" />
+ </sequential>
+ </macrodef>
- <classfileset dir="${test.java.classes}" includes="**/${testcase}.class"
excludes="**/*$*.class"/>
- </testng>
+ <target name="test-java" depends="compile-test-java"
+ description="Run java unit tests">
+ <test-runner files.location="${test.java.src.dir}"
tests.pattern="**/${test.java.include}.java"/>
</target>
<path id="test.py.path">
@@ -341,21 +350,12 @@
<target name="test-interop-data-java" depends="generate-test-data"
description="Run java data file interoperability tests">
+ <test-runner files.location="${test.java.classes}"
tests.pattern="**/TestDataFile$InteropTest.class" />
+ </target>
- <testng classpathref="test.java.classpath"
- sourcedir="${test.java.src.dir}"
- outputdir="${build.dir}/test-output"
- suitename="AvroTest"
- testname="InteropDataTest"
- haltOnfailure="true">
- <sysproperty key="test.count" value="${test.count}"/>
- <sysproperty key="test.dir" value="${test.java.build.dir}"/>
- <sysproperty key="test.validate" value="${test.validate}"/>
-
- <classpath refid="test.java.classpath"/>
-
- <classfileset dir="${test.java.classes}"
includes="**/TestDataFile$InteropTest.class"/>
- </testng>
+ <target name="checkfailure" if="tests.failed">
+ <touch file="${test.build.dir}/testsfailed"/>
+ <fail unless="continueOnFailure">Tests failed!</fail>
</target>
<target name="test-interop-data-py" depends="generate-test-data"
@@ -400,32 +400,7 @@
<target name="test-interop-rpc-java" depends="start-rpc-daemons"
description="Run java rpc interoperability tests">
-
- <testng classpathref="test.java.classpath"
- sourcedir="${test.java.src.dir}"
- outputdir="${build.dir}/test-output"
- suitename="AvroTestNG"
- testname="InteropRPCTest"
- haltOnfailure="true">
- <sysproperty key="test.count" value="${test.count}"/>
- <sysproperty key="test.dir" value="${test.java.build.dir}"/>
- <sysproperty key="test.validate" value="${test.validate}"/>
-
- <classpath refid="test.java.classpath"/>
-
- <classfileset dir="${test.java.classes}"
includes="**/TestProtocolSpecific$InteropTest.class"/>
- </testng>
- </target>
-
- <target name="test-reports" depends="test-java" description="This target
produce reports in JUnit format">
- <mkdir dir="${build.dir}/test-report"/>
- <junitreport todir="${build.dir}/test-report">
- <fileset dir="${build.dir}/test-output">
- <include name="**/*.xml"/>
- </fileset>
-
- <report format="noframes" todir="${build.dir}/test-report"/>
- </junitreport>
+ <test-runner files.location="${test.java.classes}"
tests.pattern="**/TestProtocolSpecific$InteropTest.class" />
</target>
<target name="test-interop-rpc-py" depends="start-rpc-daemons"
Modified: hadoop/avro/trunk/ivy.xml
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/ivy.xml?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/ivy.xml (original)
+++ hadoop/avro/trunk/ivy.xml Tue Jul 28 21:54:34 2009
@@ -39,10 +39,7 @@
rev="1.5"/>
<dependency org="com.thoughtworks.paranamer" name="paranamer-ant"
rev="1.5"/>
- <dependency org="org.testng" name="testng" rev="5.9"
- transitive="false" conf="test->default">
- <artifact name="testng" type="jar" ext="jar" e:classifier="jdk15"/>
- </dependency>
+ <dependency org="junit" name="junit" rev="4.5" conf="test->default"/>
<dependency org="checkstyle" name="checkstyle" rev="5.0"
conf="test->default"/>
</dependencies>
Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestDataFile.java Tue Jul
28 21:54:34 2009
@@ -25,9 +25,9 @@
import org.apache.avro.io.DatumReader;
import org.apache.avro.reflect.ReflectDatumReader;
import org.apache.avro.specific.SpecificDatumReader;
-import static org.testng.AssertJUnit.assertEquals;
-import static org.testng.AssertJUnit.assertNotNull;
-import org.testng.annotations.Test;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import java.io.File;
import java.io.FileOutputStream;
@@ -66,7 +66,7 @@
}
}
- @Test(dependsOnMethods="testGenericWrite")
+ @Test
public void testGenericRead() throws IOException {
DataFileReader<Object> reader =
new DataFileReader<Object>(new SeekableFileInput(FILE),
Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestFsData.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestFsData.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestFsData.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestFsData.java Tue Jul 28
21:54:34 2009
@@ -26,9 +26,7 @@
import org.apache.avro.util.Utf8;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
+import org.junit.*;
import java.io.File;
import java.io.FileInputStream;
@@ -94,7 +92,7 @@
private static Requestor requestor;
private static FileChannel fileChannel;
- @BeforeClass
+ @Before
public void testStartServer() throws Exception {
// create a file that has COUNT * BUFFER_SIZE bytes of random data
Random rand = new Random();
@@ -131,7 +129,7 @@
}
}
- @AfterClass
+ @After
public void testStopServer() throws Exception {
server.close();
fileChannel.close();
Modified:
hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java
(original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolGeneric.java
Tue Jul 28 21:54:34 2009
@@ -25,22 +25,22 @@
import org.apache.avro.generic.GenericResponder;
import org.apache.avro.ipc.*;
import org.apache.avro.util.Utf8;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.testng.AssertJUnit.assertEquals;
-import static org.testng.AssertJUnit.assertNotNull;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Random;
-import java.util.LinkedHashMap;
-import java.util.ArrayList;
public class TestProtocolGeneric {
private static final Logger LOG
@@ -95,7 +95,7 @@
private static Transceiver client;
private static Requestor requestor;
- @BeforeClass
+ @Before
public void testStartServer() throws Exception {
server = new SocketServer(new TestResponder(), new InetSocketAddress(0));
client = new SocketTransceiver(new InetSocketAddress(server.getPort()));
@@ -184,7 +184,7 @@
}
}
- @AfterClass
+ @After
public void testStopServer() throws IOException {
client.close();
server.close();
Modified:
hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java
(original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java
Tue Jul 28 21:54:34 2009
@@ -22,13 +22,13 @@
import org.apache.avro.reflect.ReflectRequestor;
import org.apache.avro.reflect.ReflectResponder;
import org.apache.avro.test.Simple;
-import org.testng.annotations.BeforeClass;
+import org.junit.Before;
import java.net.InetSocketAddress;
public class TestProtocolReflect extends TestProtocolSpecific {
- @BeforeClass
+ @Before
public void testStartServer() throws Exception {
server = new SocketServer(new ReflectResponder(Simple.class, new
TestImpl()),
new InetSocketAddress(0));
Modified:
hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java
(original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolSpecific.java
Tue Jul 28 21:54:34 2009
@@ -29,13 +29,13 @@
import org.apache.avro.test.Simple.TestError;
import org.apache.avro.test.Simple.TestRecord;
import org.apache.avro.util.Utf8;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.testng.AssertJUnit.assertEquals;
-import static org.testng.AssertJUnit.assertNotNull;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
import java.io.*;
import java.net.InetSocketAddress;
@@ -65,7 +65,7 @@
protected static Transceiver client;
protected static Simple proxy;
- @BeforeClass
+ @Before
public void testStartServer() throws Exception {
server = new SocketServer(new SpecificResponder(Simple.class, new
TestImpl()),
new InetSocketAddress(0));
@@ -123,7 +123,7 @@
assertEquals("an error", error.message.toString());
}
- @AfterClass
+ @After
public void testStopServer() throws IOException {
client.close();
server.close();
Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java Tue Jul 28
21:54:34 2009
@@ -23,8 +23,8 @@
import org.apache.avro.test.Simple.TestRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import static org.testng.AssertJUnit.assertEquals;
-import org.testng.annotations.Test;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
import java.io.*;
Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestSchema.java Tue Jul 28
21:54:34 2009
@@ -17,8 +17,9 @@
*/
package org.apache.avro;
-import static org.testng.AssertJUnit.assertEquals;
-import static org.testng.AssertJUnit.assertTrue;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -39,7 +40,6 @@
import org.apache.avro.io.JsonDecoder;
import org.apache.avro.io.JsonEncoder;
import org.apache.avro.util.Utf8;
-import org.testng.annotations.Test;
public class TestSchema {
Modified:
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
(original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBinaryDecoder.java
Tue Jul 28 21:54:34 2009
@@ -22,55 +22,55 @@
import java.io.IOException;
import org.apache.avro.util.Utf8;
-import org.testng.annotations.Test;
+import org.junit.Test;
public class TestBinaryDecoder {
/** Verify EOFException throw at EOF */
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_boolean() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readBoolean();
}
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_int() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readInt();
}
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_long() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readLong();
}
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_float() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readFloat();
}
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_double() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readDouble();
}
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_bytes() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readBytes(null);
}
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_string() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).
readString(new Utf8("a"));
}
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_fixed() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).
readFixed(new byte[1]);
}
- @Test(expectedExceptions=EOFException.class)
+ @Test(expected=EOFException.class)
public void testEOF_enum() throws IOException {
new BinaryDecoder(new ByteArrayInputStream(new byte[0])).readEnum();
}
Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java
(original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestBlockingIO.java Tue
Jul 28 21:54:34 2009
@@ -23,16 +23,32 @@
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.Stack;
+import java.util.Collection;
+import java.util.Arrays;
import org.codehaus.jackson.JsonFactory;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.JsonParser;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+...@runwith(Parameterized.class)
public class TestBlockingIO {
private static final String UTF_8 = "UTF-8";
+
+ private final int iSize;
+ private final int iDepth;
+ private final String sInput;
+
+ public TestBlockingIO (int sz, int dp, String inp) {
+ this.iSize = sz;
+ this.iDepth = dp;
+ this.sInput = inp;
+ }
private static class Tests {
private final JsonParser parser;
@@ -65,13 +81,13 @@
while (parser.nextToken() != null) {
switch (parser.getCurrentToken()) {
case END_ARRAY:
- Assert.assertEquals(0, count);
- Assert.assertTrue(countStack.peek().isArray);
+ assertEquals(0, count);
+ assertTrue(countStack.peek().isArray);
count = countStack.pop().count;
break;
case END_OBJECT:
- Assert.assertEquals(0, count);
- Assert.assertFalse(countStack.peek().isArray);
+ assertEquals(0, count);
+ assertFalse(countStack.peek().isArray);
count = countStack.pop().count;
break;
case START_ARRAY:
@@ -119,12 +135,12 @@
switch (parser.getCurrentToken()) {
case END_ARRAY:
// assertEquals(0, count);
- Assert.assertTrue(countStack.peek().isArray);
+ assertTrue(countStack.peek().isArray);
count = countStack.pop().count;
break;
case END_OBJECT:
// assertEquals(0, count);
- Assert.assertFalse(countStack.peek().isArray);
+ assertFalse(countStack.peek().isArray);
count = countStack.pop().count;
break;
case START_ARRAY:
@@ -201,29 +217,29 @@
}
}
- @Test(dataProvider="data")
- public void testScan(int bufferSize, int depth, String input)
+ @Test
+ public void testScan()
throws JsonParseException, IOException {
- Tests t = new Tests(bufferSize, depth, input);
+ Tests t = new Tests(iSize, iDepth, sInput);
t.scan();
}
- @Test(dataProvider="data")
- public void testSkip_1(int bufferSize, int depth, String input)
+ @Test
+ public void testSkip_1()
throws JsonParseException, IOException {
- testSkip(bufferSize, depth, input, 0);
+ testSkip(iSize, iDepth, sInput, 0);
}
- @Test(dataProvider="data")
- public void testSkip_2(int bufferSize, int depth, String input)
+ @Test
+ public void testSkip_2()
throws JsonParseException, IOException {
- testSkip(bufferSize, depth, input, 1);
+ testSkip(iSize, iDepth, sInput, 1);
}
- @Test(dataProvider="data")
- public void testSkip_3(int bufferSize, int depth, String input)
+ @Test
+ public void testSkip_3()
throws JsonParseException, IOException {
- testSkip(bufferSize, depth, input, 2);
+ testSkip(iSize, iDepth, sInput, 2);
}
private void testSkip(int bufferSize, int depth, String input,
@@ -264,10 +280,10 @@
private static void checkString(String s, Decoder input, int n)
throws IOException, UnsupportedEncodingException {
ByteBuffer buf = input.readBytes(null);
- Assert.assertEquals(n, buf.remaining());
+ assertEquals(n, buf.remaining());
String s2 = new String(buf.array(), buf.position(),
buf.remaining(), UTF_8);
- Assert.assertEquals(s, s2);
+ assertEquals(s, s2);
}
private static void serialize(Encoder cos, JsonParser p,
@@ -280,12 +296,12 @@
while (p.nextToken() != null) {
switch (p.getCurrentToken()) {
case END_ARRAY:
- Assert.assertTrue(isArray[stackTop]);
+ assertTrue(isArray[stackTop]);
cos.writeArrayEnd();
stackTop--;
break;
case END_OBJECT:
- Assert.assertFalse(isArray[stackTop]);
+ assertFalse(isArray[stackTop]);
cos.writeMapEnd();
stackTop--;
break;
@@ -330,9 +346,9 @@
}
}
- @DataProvider
- public static Object[][] data() {
- return new Object[][] {
+ @Parameterized.Parameters
+ public static Collection<Object[]> data() {
+ return Arrays.asList (new Object[][] {
{ 64, 0, "" },
{ 64, 0, jss(0, 'a') },
{ 64, 0, jss(3, 'a') },
@@ -440,7 +456,7 @@
{ 64, 1, "{\"n1\": \"v\", \"n2\": []}" },
{ 100, 1, "{\"n1\": \"v\", \"n2\": []}" },
{ 100, 1, "{\"n1\": \"v\", \"n2\": [\"abc\"]}" },
- };
+ });
}
/**
Modified:
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java
(original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO.java Tue
Jul 28 21:54:34 2009
@@ -20,49 +20,50 @@
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
-import java.util.Iterator;
+import java.util.Arrays;
+import java.util.Collection;
import org.apache.avro.Schema;
import org.apache.avro.io.TestValidatingIO.Encoding;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
+import org.junit.Test;
+import org.junit.runners.Parameterized;
+import org.junit.runner.RunWith;
+...@runwith(Parameterized.class)
public class TestResolvingIO {
- @Test(dataProvider="data1")
- public void test_identical(Encoding encoding,
+
+ protected final Encoding eEnc;
+ protected final int iSkipL;
+ protected final String sJsWrtSchm;
+ protected final String sWrtCls;
+ protected final String sJsRdrSchm;
+ protected final String sRdrCls;
+
+ public TestResolvingIO (Encoding encoding,
int skipLevel, String jsonWriterSchema,
- String writerCalls,
- String jsonReaderSchema, String readerCalls)
- throws IOException {
- performTest(encoding, skipLevel, jsonWriterSchema, writerCalls,
- jsonReaderSchema, readerCalls);
+ String writerCalls,
+ String jsonReaderSchema, String readerCalls
+ ) {
+ this.eEnc = encoding;
+ this.iSkipL = skipLevel;
+ this.sJsWrtSchm = jsonWriterSchema;
+ this.sWrtCls = writerCalls;
+ this.sJsRdrSchm = jsonReaderSchema;
+ this.sRdrCls = readerCalls;
}
- private static final int COUNT = 10;
-
- @Test(dataProvider="data2")
- public void test_compatible(Encoding encoding,
- int skipLevel, String jsonWriterSchema,
- String writerCalls,
- String jsonReaderSchema, String readerCalls)
+ @Test
+ public void test_identical()
throws IOException {
- performTest(encoding, skipLevel, jsonWriterSchema, writerCalls,
- jsonReaderSchema, readerCalls);
+ performTest(eEnc, iSkipL, sJsWrtSchm, sWrtCls, sJsRdrSchm, sRdrCls);
}
- @Test(dataProvider="data3")
- public void test_resolving(Encoding encoding, int skipLevel,
- String jsonWriterSchema, String writerCalls,
- Object[] writerValues,
- String jsonReaderSchema, String readerCalls, Object[] readerValues)
- throws IOException {
- Schema writerSchema = Schema.parse(jsonWriterSchema);
- byte[] bytes = TestValidatingIO.make(writerSchema, writerCalls,
- writerValues, Encoding.BINARY);
- Schema readerSchema = Schema.parse(jsonReaderSchema);
- check(writerSchema, readerSchema, bytes, readerCalls,
- readerValues,
- Encoding.BINARY, skipLevel);
+ private static final int COUNT = 10;
+
+ @Test
+ public void test_compatible()
+ throws IOException {
+ performTest(eEnc, iSkipL, sJsWrtSchm, sWrtCls, sJsRdrSchm, sRdrCls);
}
private void performTest(Encoding encoding,
@@ -92,7 +93,7 @@
encoding, skipLevel);
}
- private static void check(Schema wsc, Schema rsc, byte[] bytes,
+ static void check(Schema wsc, Schema rsc, byte[] bytes,
String calls, Object[] values, Encoding encoding,
int skipLevel)
throws IOException {
@@ -113,26 +114,14 @@
TestValidatingIO.check(vi, calls, values, skipLevel);
}
- @DataProvider
- public static Iterator<Object[]> data1() {
- return TestValidatingIO.cartesian(encodings, skipLevels,
- TestValidatingIO.paste(TestValidatingIO.testSchemas(),
- TestValidatingIO.testSchemas()));
- }
-
- @DataProvider
- public static Iterator<Object[]> data2() {
- return TestValidatingIO.cartesian(encodings, skipLevels, testSchemas());
- }
-
- @DataProvider
- public static Iterator<Object[]> data3() {
- return TestValidatingIO.cartesian(encodings, skipLevels,
- dataForResolvingTests());
+ @Parameterized.Parameters
+ public static Collection<Object[]> data2() {
+ return Arrays.asList(TestValidatingIO.convertTo2dArray(encodings,
skipLevels, testSchemas()));
}
- private static Object[][] encodings = new Object[][] { { Encoding.BINARY } };
- private static Object[][] skipLevels =
+ static Object[][] encodings = new Object[][] { { Encoding.BINARY },
+ { Encoding.BLOCKING_BINARY }, { Encoding.JSON } };
+ static Object[][] skipLevels =
new Object[][] { { -1 }, { 0 }, { 1 }, { 2 } };
private static Object[][] testSchemas() {
// The mnemonics are the same as {...@link TestValidatingIO#testSchemas}
@@ -221,46 +210,4 @@
"[\"long\", \"boolean\"]", "U0L" },
};
}
-
- private static Object[][] dataForResolvingTests() {
- // The mnemonics are the same as {...@link TestValidatingIO#testSchemas}
- return new Object[][] {
- // Reordered fields
- { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
- + "{\"name\":\"f1\", \"type\":\"int\"},"
- + "{\"name\":\"f2\", \"type\":\"string\"}]}", "IS10",
- new Object[] { 10, "hello" },
- "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
- + "{\"name\":\"f2\", \"type\":\"string\" },"
- + "{\"name\":\"f1\", \"type\":\"long\"}]}", "LS10",
- new Object[] { 10L, "hello" } },
-
- // Default values
- { "{\"type\":\"record\",\"name\":\"r\",\"fields\":[]}", "",
- new Object[] { },
- "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
- + "{\"name\":\"f\", \"type\":\"int\", \"default\": 100}]}", "I",
- new Object[] { 100 } },
- { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
- + "{\"name\":\"f2\", \"type\":\"int\"}]}", "I",
- new Object[] { 10 },
- "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
- + "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
- + "{\"name\":\"f2\", \"type\":\"int\"}]}", "II",
- new Object[] { 10, 101 } },
- { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
- + "{\"name\": \"g1\", " +
-
"\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
- + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
- + "{\"name\": \"g2\", \"type\": \"long\"}]}", "IL",
- new Object[] { 10, 11L },
- "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
- + "{\"name\": \"g1\", " +
-
"\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
- + "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
- + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
- + "{\"name\": \"g2\", \"type\": \"long\"}]}}", "IIL",
- new Object[] { 10, 101, 11L } },
- };
- }
}
Added:
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java?rev=798716&view=auto
==============================================================================
---
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java
(added)
+++
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestResolvingIO_resolving.java
Tue Jul 28 21:54:34 2009
@@ -0,0 +1,103 @@
+package org.apache.avro.io;
+
+import org.junit.Test;
+import org.junit.runners.Parameterized;
+import org.junit.runner.RunWith;
+import org.apache.avro.Schema;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Arrays;
+
+...@runwith(Parameterized.class)
+public class TestResolvingIO_resolving {
+ protected TestValidatingIO.Encoding eEnc;
+ protected final int iSkipL;
+ protected final String sJsWrtSchm;
+ protected final String sWrtCls;
+ protected final String sJsRdrSchm;
+ protected final String sRdrCls;
+
+ protected final Object[] oaWrtVals;
+ protected final Object[] oaRdrVals;
+
+ public TestResolvingIO_resolving(TestValidatingIO.Encoding encoding,
+ int skipLevel, String jsonWriterSchema,
+ String writerCalls,
+ Object[] writerValues,
+ String jsonReaderSchema, String readerCalls,
+ Object[] readerValues
+ ) {
+ this.eEnc = encoding;
+ this.iSkipL = skipLevel;
+ this.sJsWrtSchm = jsonWriterSchema;
+ this.sWrtCls = writerCalls;
+ this.oaWrtVals = writerValues;
+ this.sJsRdrSchm = jsonReaderSchema;
+ this.sRdrCls = readerCalls;
+ this.oaRdrVals = readerValues;
+ }
+
+ @Test
+ public void test_resolving()
+ throws IOException {
+ Schema writerSchema = Schema.parse(sJsWrtSchm);
+ byte[] bytes = TestValidatingIO.make(writerSchema, sWrtCls,
+ oaWrtVals, TestValidatingIO.Encoding.BINARY);
+ Schema readerSchema = Schema.parse(sJsRdrSchm);
+ TestResolvingIO.check(writerSchema, readerSchema, bytes, sRdrCls,
+ oaRdrVals,
+ TestValidatingIO.Encoding.BINARY, iSkipL);
+ }
+
+ @Parameterized.Parameters
+ public static Collection<Object[]> data3() {
+ Collection<Object[]> ret = Arrays.asList(
+ TestValidatingIO.convertTo2dArray(TestResolvingIO.encodings,
+ TestResolvingIO.skipLevels,
+ dataForResolvingTests()));
+ return ret;
+ }
+
+ private static Object[][] dataForResolvingTests() {
+ // The mnemonics are the same as {...@link TestValidatingIO#testSchemas}
+ return new Object[][] {
+ // Reordered fields
+ { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ + "{\"name\":\"f1\", \"type\":\"int\"},"
+ + "{\"name\":\"f2\", \"type\":\"string\"}]}", "IS10",
+ new Object[] { 10, "hello" },
+ "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ + "{\"name\":\"f2\", \"type\":\"string\" },"
+ + "{\"name\":\"f1\", \"type\":\"long\"}]}", "LS10",
+ new Object[] { 10L, "hello" } },
+
+ // Default values
+ { "{\"type\":\"record\",\"name\":\"r\",\"fields\":[]}", "",
+ new Object[] { },
+ "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ + "{\"name\":\"f\", \"type\":\"int\", \"default\": 100}]}", "I",
+ new Object[] { 100 } },
+ { "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ + "{\"name\":\"f2\", \"type\":\"int\"}]}", "I",
+ new Object[] { 10 },
+ "{\"type\":\"record\",\"name\":\"r\",\"fields\":["
+ + "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
+ + "{\"name\":\"f2\", \"type\":\"int\"}]}", "II",
+ new Object[] { 10, 101 } },
+ { "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+ + "{\"name\": \"g1\", " +
+
"\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+ + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
+ + "{\"name\": \"g2\", \"type\": \"long\"}]}", "IL",
+ new Object[] { 10, 11L },
+ "{\"type\":\"record\",\"name\":\"outer\",\"fields\":["
+ + "{\"name\": \"g1\", " +
+
"\"type\":{\"type\":\"record\",\"name\":\"inner\",\"fields\":["
+ + "{\"name\":\"f1\", \"type\":\"int\", \"default\": 101},"
+ + "{\"name\":\"f2\", \"type\":\"int\"}]}}, "
+ + "{\"name\": \"g2\", \"type\": \"long\"}]}}", "IIL",
+ new Object[] { 10, 101, 11L } },
+ };
+ }
+}
Modified:
hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java
URL:
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java?rev=798716&r1=798715&r2=798716&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java
(original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/io/TestValidatingIO.java
Tue Jul 28 21:54:34 2009
@@ -17,36 +17,47 @@
*/
package org.apache.avro.io;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.fail;
+import org.apache.avro.Schema;
+import org.apache.avro.util.Utf8;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
-import java.util.Iterator;
-import java.util.Random;
-import java.util.Vector;
-
-import org.apache.avro.Schema;
-import org.apache.avro.util.Utf8;
-import org.testng.Assert;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
+import java.util.*;
+...@runwith(Parameterized.class)
public class TestValidatingIO {
enum Encoding {
BINARY,
BLOCKING_BINARY,
JSON,
- };
-
+ }
+
+ private Encoding eEnc;
+ private int iSkipL;
+ private String sJsSch;
+ private String sCl;
+
+ public TestValidatingIO (Encoding _enc, int _skip, String _js, String _cls) {
+ this.eEnc = _enc;
+ this.iSkipL = _skip;
+ this.sJsSch = _js;
+ this.sCl = _cls;
+ }
private static int COUNT = 1;
- @Test(dataProvider="data")
- public void test(Encoding encoding, int skipLevel,
- String jsonSchema, String calls) throws IOException {
+ @Test
+ public void testMain() throws IOException {
for (int i = 0; i < COUNT; i++) {
- testOnce(Schema.parse(jsonSchema), calls, skipLevel, encoding);
+ testOnce(Schema.parse(sJsSch), sCl, iSkipL, eEnc);
}
}
@@ -193,7 +204,7 @@
break;
}
default:
- Assert.fail();
+ fail();
break;
}
}
@@ -243,7 +254,7 @@
case 's':
break;
default:
- Assert.fail();
+ fail();
break;
}
}
@@ -270,7 +281,7 @@
}
private static String nextString(Random r, int length) {
- char[] cc = new char[length];
+ char[] cc = new char[length];
for (int i = 0; i < length; i++) {
cc[i] = (char) ('A' + r.nextInt(26));
}
@@ -312,24 +323,24 @@
vi.readNull();
break;
case 'B':
- boolean b = ((Boolean) values[p++]).booleanValue();
- Assert.assertEquals(vi.readBoolean(), b);
+ boolean b = ((Boolean) values[p++]).booleanValue();
+ assertEquals(b, vi.readBoolean());
break;
case 'I':
- int ii = ((Integer) values[p++]).intValue();
- Assert.assertEquals(vi.readInt(), ii);
+ int ii = ((Integer) values[p++]).intValue();
+ assertEquals(ii, vi.readInt());
break;
case 'L':
- long l = longValue(values[p++]);
- Assert.assertEquals(vi.readLong(), l);
+ long l = longValue(values[p++]);
+ assertEquals(l, vi.readLong());
break;
case 'F':
- float f = floatValue(values[p++]);
- Assert.assertEquals(vi.readFloat(), f, Math.abs(f / 1000));
+ float f = floatValue(values[p++]);
+ assertEquals(f, vi.readFloat(), Math.abs(f / 1000));
break;
case 'D':
- double d = doubleValue(values[p++]);
- Assert.assertEquals(vi.readDouble(), d, Math.abs(d / 1000));
+ double d = doubleValue(values[p++]);
+ assertEquals(d, vi.readDouble(), Math.abs(d / 1000));
break;
case 'S':
extractInt(cs);
@@ -337,8 +348,8 @@
vi.skipString();
p++;
} else {
- String s = (String) values[p++];
- Assert.assertEquals(vi.readString(null), new Utf8(s));
+ String s = (String) values[p++];
+ assertEquals(new Utf8(s), vi.readString(null));
}
break;
case 'K':
@@ -347,8 +358,8 @@
vi.skipString();
p++;
} else {
- String s = (String) values[p++];
- Assert.assertEquals(vi.readString(null), new Utf8(s));
+ String s = (String) values[p++];
+ assertEquals(new Utf8(s), vi.readString(null));
}
break;
case 'b':
@@ -362,7 +373,7 @@
byte[] actBytes = new byte[bb2.remaining()];
System.arraycopy(bb2.array(), bb2.position(), actBytes,
0, bb2.remaining());
- Assert.assertEquals(actBytes, bb);
+ assertArrayEquals(bb, actBytes);
}
break;
case 'f':
@@ -375,7 +386,7 @@
byte[] bb = (byte[]) values[p++];
byte[] actBytes = new byte[len];
vi.readFixed(actBytes);
- Assert.assertEquals(actBytes, bb);
+ assertArrayEquals(bb, actBytes);
}
}
break;
@@ -385,7 +396,7 @@
if (level == skipLevel) {
vi.readEnum();
} else {
- Assert.assertEquals(vi.readEnum(), e);
+ assertEquals(e, vi.readEnum());
}
}
break;
@@ -412,16 +423,16 @@
continue;
}
case ']':
- Assert.assertEquals(counts[level], 0);
+ assertEquals(0, counts[level]);
if (! isEmpty[level]) {
- Assert.assertEquals(vi.arrayNext(), 0);
+ assertEquals(0, vi.arrayNext());
}
level--;
break;
case '}':
- Assert.assertEquals(counts[level], 0);
+ assertEquals(0, counts[level]);
if (! isEmpty[level]) {
- Assert.assertEquals(vi.mapNext(), 0);
+ assertEquals(0, vi.mapNext());
}
level--;
break;
@@ -441,14 +452,14 @@
case 'U':
{
int idx = extractInt(cs);
- Assert.assertEquals(idx, vi.readIndex());
+ assertEquals(idx, vi.readIndex());
continue;
}
default:
- Assert.fail();
+ fail();
}
}
- Assert.assertEquals(p, values.length);
+ assertEquals(values.length, p);
}
private static float floatValue(Object object) {
@@ -474,9 +485,9 @@
throws IOException {
final char end = isArray ? ']' : '}';
if (isArray) {
- Assert.assertEquals(vi.skipArray(), 0);
+ assertEquals(0, vi.skipArray());
} else if (end == '}'){
- Assert.assertEquals(vi.skipMap(), 0);
+ assertEquals(0, vi.skipMap());
}
int level = 0;
int p = 0;
@@ -512,9 +523,9 @@
throw new RuntimeException("Don't know how to skip");
}
- @DataProvider
- public static Iterator<Object[]> data() {
- return cartesian(encodings, skipLevels, testSchemas());
+ @Parameterized.Parameters
+ public static Collection<Object[]> data() {
+ return Arrays.asList(convertTo2dArray(encodings, skipLevels,
testSchemas()));
}
private static Object[][] encodings = new Object[][] {
@@ -526,6 +537,20 @@
{ -1 }, { 0 }, { 1 }, { 2 },
};
+ public static Object[][] convertTo2dArray(final Object[][]... values) {
+ ArrayList<Object[]> ret = new ArrayList<Object[]>();
+
+ Iterator<Object[]> iter = cartesian(values);
+ while (iter.hasNext()) {
+ Object[] objects = iter.next();
+ ret.add(objects);
+ }
+ Object[][] retArrays = new Object[ret.size()][];
+ for (int i = 0; i < ret.size(); i++) {
+ retArrays[i] = ret.get(i);
+ }
+ return retArrays;
+ }
/**
* Returns the Cartesian product of input sequences.
*/
@@ -585,7 +610,7 @@
*/
static Object[][] paste(Object[][]... in) {
Object[][] result = new Object[in[0].length][];
- Object[][] cc = new Object[in.length][];
+ Object[][] cc = new Object[in.length][];
for (int i = 0; i < result.length; i++) {
for (int j = 0; j < cc.length; j++) {
cc[j] = in[j][i];