Author: cutting
Date: Fri Nov 20 22:58:18 2009
New Revision: 882753

URL: http://svn.apache.org/viewvc?rev=882753&view=rev
Log:
AVRO-80.  Java reflect API no longer uses Avro-specific classes for string and 
array.

Added:
    hadoop/avro/trunk/src/java/org/apache/avro/util/WeakIdentityHashMap.java
Modified:
    hadoop/avro/trunk/CHANGES.txt
    hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericData.java
    hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java
    hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumWriter.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java
    hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java
    hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java
    hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java

Modified: hadoop/avro/trunk/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/CHANGES.txt?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/CHANGES.txt (original)
+++ hadoop/avro/trunk/CHANGES.txt Fri Nov 20 22:58:18 2009
@@ -11,6 +11,10 @@
     AVRO-201.  Move Python data file code into its own module.
     (Jeff Hammerbacher via cutting)
 
+    AVRO-80.  Java reflect API no longer uses Avro-specific classes
+    for string and array.  Instead now Java strings and arrays or
+    Lists are used. (cutting)
+
   NEW FEATURES
 
     AVRO-151. Validating Avro schema parser for C (massie)

Modified: hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericData.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericData.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericData.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericData.java Fri Nov 
20 22:58:18 2009
@@ -194,8 +194,8 @@
     case FIXED:
       return datum instanceof GenericFixed
         && ((GenericFixed)datum).bytes().length==schema.getFixedSize();
-    case STRING:  return datum instanceof Utf8;
-    case BYTES:   return datum instanceof ByteBuffer;
+    case STRING:  return isString(datum);
+    case BYTES:   return isBytes(datum);
     case INT:     return datum instanceof Integer;
     case LONG:    return datum instanceof Long;
     case FLOAT:   return datum instanceof Float;

Modified: 
hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumReader.java 
Fri Nov 20 22:58:18 2009
@@ -219,8 +219,10 @@
     case ARRAY:
       Object array = newArray(old, json.size(), schema);
       Schema element = schema.getElementType();
+      int pos = 0;
       for (JsonNode node : json)
-        addToArray(array, defaultFieldValue(peekArray(array), element, node));
+        addToArray(array, pos++,
+                   defaultFieldValue(peekArray(array), element, node));
       return array;
     case MAP:
       Object map = newMap(old, json.size());
@@ -266,12 +268,15 @@
     Schema actualType = actual.getElementType();
     Schema expectedType = expected.getElementType();
     long l = in.readArrayStart();
+    long base = 0;
     if (l > 0) {
       Object array = newArray(old, (int) l, expected);
       do {
         for (long i = 0; i < l; i++) {
-          addToArray(array, read(peekArray(array), actualType, expectedType, 
in));  
+          addToArray(array, base+i,
+                     read(peekArray(array), actualType, expectedType, in));  
         }
+        base += l;
       } while ((l = in.arrayNext()) > 0);
       
       return array;
@@ -291,7 +296,7 @@
   /** Called by the default implementation of {...@link #readArray} to add a 
value.
    * The default implementation is for {...@link GenericArray}.*/
   @SuppressWarnings("unchecked")
-  protected void addToArray(Object array, Object e) {
+  protected void addToArray(Object array, long pos, Object e) {
     ((GenericArray) array).add(e);
   }
   

Modified: 
hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumWriter.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumWriter.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumWriter.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/generic/GenericDatumWriter.java 
Fri Nov 20 22:58:18 2009
@@ -145,7 +145,7 @@
     out.setItemCount(size);
     for (Map.Entry<Object,Object> entry : getMapEntries(datum)) {
       out.startItem();
-      out.writeString((Utf8) entry.getKey());
+      writeString(entry.getKey(), out);
       write(value, entry.getValue(), out);
     }
     out.writeMapEnd();

Modified: hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectData.java Fri Nov 
20 22:58:18 2009
@@ -20,6 +20,9 @@
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
 import java.lang.reflect.Modifier;
+import java.lang.reflect.Type;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.GenericArrayType;
 import java.util.List;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -28,19 +31,32 @@
 import java.util.Map;
 
 import org.apache.avro.AvroRuntimeException;
+import org.apache.avro.AvroTypeException;
 import org.apache.avro.Protocol;
 import org.apache.avro.Schema;
-import org.apache.avro.Schema.Type;
 import org.apache.avro.Protocol.Message;
 import org.apache.avro.generic.GenericFixed;
 import org.apache.avro.specific.SpecificData;
 import org.apache.avro.specific.FixedSize;
 import org.apache.avro.ipc.AvroRemoteException;
+import org.apache.avro.util.WeakIdentityHashMap;
 
 import com.thoughtworks.paranamer.CachingParanamer;
 import com.thoughtworks.paranamer.Paranamer;
 
-/** Utilities to use existing Java classes and interfaces via reflection. */
+/** Utilities to use existing Java classes and interfaces via reflection.
+ *
+ * <p><b>Records</b> When creating a record schema, only fields of the direct
+ * class, not it's super classes, are used.  Fields are not permitted to be
+ * null.  {...@link Class#getDeclaredFields() declared fields} (not inherited)
+ * which are not static or transient are used.
+ *
+ * <p><b>Arrays</b>Both Java arrays and implementations of {...@link List} are
+ * mapped to Avro arrays.
+ *
+ * <p><b>{...@link String}</b> is mapped to Avro string.
+ * <p><b>byte[]</b> is mapped to Avro bytes.
+ */
 public class ReflectData extends SpecificData {
   
   /** {...@link ReflectData} implementation that permits null field values.  
The
@@ -71,7 +87,24 @@
   @Override
   protected boolean isRecord(Object datum) {
     if (datum == null) return false;
-    return getSchema(datum.getClass()).getType() == Type.RECORD;
+    return getSchema(datum.getClass()).getType() == Schema.Type.RECORD;
+  }
+
+  @Override
+  protected boolean isArray(Object datum) {
+    return datum instanceof List || datum.getClass().isArray();
+  }
+
+  @Override
+  protected boolean isString(Object datum) {
+    return datum instanceof String;
+  }
+
+  @Override
+  protected boolean isBytes(Object datum) {
+    if (datum == null) return false;
+    Class c = datum.getClass();
+    return c.isArray() && c.getComponentType() == Byte.TYPE;
   }
 
   @Override
@@ -95,6 +128,21 @@
         }
       }
       return true;
+    case ARRAY:
+      if (datum instanceof List) {                // list
+        for (Object element : (List)datum)
+          if (!validate(schema.getElementType(), element))
+            return false;
+        return true;
+      } else if (datum.getClass().isArray()) {    // array
+        int length = java.lang.reflect.Array.getLength(datum);
+        for (int i = 0; i < length; i++)
+          if (!validate(schema.getElementType(),
+                        java.lang.reflect.Array.get(datum, i)))
+            return false;
+        return true;
+      }
+      return false;
     default:
       return super.validate(schema, datum);
     }
@@ -110,42 +158,110 @@
     }
   }
 
-  /** Create a schema for a Java class.  Note that by design only fields of the
-   * direct class, not it's super classes, are used for creating a record
-   * schema.  Also, fields are not permitted to be null.  {...@link
-   * Class#getDeclaredFields() Declared fields} (not inherited) which are not
-   * static or transient are used.*/
+  // Indicates the Java representation for an array schema.  If an entry is
+  // present, it contains the Java List class of this array.  If no entry is
+  // present, then a Java array should be used to implement this array.
+  private static final Map<Schema,Class> LIST_CLASSES =
+    new WeakIdentityHashMap<Schema,Class>();
+  private static synchronized void setListClass(Schema schema, Class c) {
+    LIST_CLASSES.put(schema, c);
+  }
+
+  /** Return the {...@link List} subclass that implements this schema.*/
+  public static synchronized Class getListClass(Schema schema) {
+    return LIST_CLASSES.get(schema);
+  }
+
+  private static final Class BYTES_CLASS = new byte[0].getClass();
+
+  @Override
+  public Class getClass(Schema schema) {
+    switch (schema.getType()) {
+    case ARRAY:
+      Class listClass = getListClass(schema);
+      if (listClass != null)
+        return listClass;
+      return 
java.lang.reflect.Array.newInstance(getClass(schema.getElementType()),0).getClass();
+    case STRING:  return String.class;
+    case BYTES:   return BYTES_CLASS;
+    default:
+      return super.getClass(schema);
+    }
+  }
+
   @Override
   @SuppressWarnings(value="unchecked")
-  protected Schema createClassSchema(Class c, Map<String,Schema> names) {
-    String name = c.getSimpleName();
-    String space = c.getPackage().getName();
-    if (c.getEnclosingClass() != null)                   // nested class
-      space = c.getEnclosingClass().getName() + "$";
-    Schema schema;
-    if (c.isEnum()) {                                    // enum
-      List<String> symbols = new ArrayList<String>();
-      Enum[] constants = (Enum[])c.getEnumConstants();
-      for (int i = 0; i < constants.length; i++)
-        symbols.add(constants[i].name());
-      schema = Schema.createEnum(name, space, symbols);
-    } else if (GenericFixed.class.isAssignableFrom(c)) { // fixed
-      int size = ((FixedSize)c.getAnnotation(FixedSize.class)).value();
-      schema = Schema.createFixed(name, space, size);
-    } else {                                             // record
-      LinkedHashMap<String,Schema.Field> fields =
-        new LinkedHashMap<String,Schema.Field>();
-      schema = Schema.createRecord(name, space,
-                                   Throwable.class.isAssignableFrom(c));
-      names.put(c.getName(), schema);
-      for (Field field : c.getDeclaredFields())
-        if ((field.getModifiers()&(Modifier.TRANSIENT|Modifier.STATIC))==0) {
-          Schema fieldSchema = createFieldSchema(field, names);
-          fields.put(field.getName(), new Schema.Field(fieldSchema, null));
+  protected Schema createSchema(Type type, Map<String,Schema> names) {
+    if (type instanceof GenericArrayType) {                  // generic array
+      Type component = ((GenericArrayType)type).getGenericComponentType();
+      if (component == Byte.TYPE)                            // byte array
+        return Schema.create(Schema.Type.BYTES);           
+      return Schema.createArray(createSchema(component, names));
+    } else if (type instanceof ParameterizedType) {
+      ParameterizedType ptype = (ParameterizedType)type;
+      Class raw = (Class)ptype.getRawType();
+      Type[] params = ptype.getActualTypeArguments();
+      for (int i = 0; i < params.length; i++)
+        if (List.class.isAssignableFrom(raw)) {              // List
+          if (params.length != 1)
+            throw new AvroTypeException("No array type specified.");
+          Schema schema = Schema.createArray(createSchema(params[0], names));
+          setListClass(schema, raw);
+          return schema;
+        } else if (Map.class.isAssignableFrom(raw)) {        // Map
+          Type key = params[0];
+          Type value = params[1];
+          if (!(key == String.class))
+            throw new AvroTypeException("Map key class not String: "+key);
+          return Schema.createMap(createSchema(value, names));
+        }
+    } else if (type instanceof Class) {                      // Class
+      Class c = (Class)type;
+      if (c.isPrimitive() || Number.class.isAssignableFrom(c)
+          || c == Void.class || c == Boolean.class)          // primitive
+        return super.createSchema(type, names);
+      if (c.isArray()) {                                     // array
+        Class component = c.getComponentType();
+        if (component == Byte.TYPE)                          // byte array
+          return Schema.create(Schema.Type.BYTES);
+        return Schema.createArray(createSchema(component, names));
+      }
+      if (c == String.class)                                 // String
+        return Schema.create(Schema.Type.STRING);
+      String fullName = c.getName();
+      Schema schema = names.get(fullName);
+      if (schema == null) {
+        String name = c.getSimpleName();
+        String space = c.getPackage().getName();
+        if (c.getEnclosingClass() != null)                   // nested class
+          space = c.getEnclosingClass().getName() + "$";
+        if (c.isEnum()) {                                    // Enum
+          List<String> symbols = new ArrayList<String>();
+          Enum[] constants = (Enum[])c.getEnumConstants();
+          for (int i = 0; i < constants.length; i++)
+            symbols.add(constants[i].name());
+          schema = Schema.createEnum(name, space, symbols);
+        } else if (GenericFixed.class.isAssignableFrom(c)) { // fixed
+          int size = ((FixedSize)c.getAnnotation(FixedSize.class)).value();
+          schema = Schema.createFixed(name, space, size);
+        } else {                                             // record
+          LinkedHashMap<String,Schema.Field> fields =
+            new LinkedHashMap<String,Schema.Field>();
+          schema = Schema.createRecord(name, space,
+                                       Throwable.class.isAssignableFrom(c));
+          names.put(c.getName(), schema);
+          for (Field field : c.getDeclaredFields())
+            if 
((field.getModifiers()&(Modifier.TRANSIENT|Modifier.STATIC))==0){
+              Schema fieldSchema = createFieldSchema(field, names);
+              fields.put(field.getName(), new Schema.Field(fieldSchema, null));
+            }
+          schema.setFields(fields);
         }
-      schema.setFields(fields);
+        names.put(fullName, schema);
+      }
+      return schema;
     }
-    return schema;
+    return super.createSchema(type, names);
   }
 
   /** Create a schema for a field. */
@@ -184,17 +300,21 @@
     LinkedHashMap<String,Schema.Field> fields =
       new LinkedHashMap<String,Schema.Field>();
     String[] paramNames = paranamer.lookupParameterNames(method);
-    java.lang.reflect.Type[] paramTypes = method.getGenericParameterTypes();
-    for (int i = 0; i < paramTypes.length; i++)
-      fields.put(paramNames[i],
-                 new Schema.Field(createSchema(paramTypes[i], names), null));
+    Type[] paramTypes = method.getGenericParameterTypes();
+    for (int i = 0; i < paramTypes.length; i++) {
+      Schema paramSchema = createSchema(paramTypes[i], names);
+      String paramName =  paramNames.length == paramTypes.length
+        ? paramNames[i]
+        : paramSchema.getName()+i;
+      fields.put(paramName, new Schema.Field(paramSchema, null));
+    }
     Schema request = Schema.createRecord(fields);
 
     Schema response = createSchema(method.getGenericReturnType(), names);
 
     List<Schema> errs = new ArrayList<Schema>();
     errs.add(Protocol.SYSTEM_ERROR);              // every method can throw
-    for (java.lang.reflect.Type err : method.getGenericExceptionTypes())
+    for (Type err : method.getGenericExceptionTypes())
       if (err != AvroRemoteException.class) 
         errs.add(createSchema(err, names));
     Schema errors = Schema.createUnion(errs);

Modified: 
hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumReader.java 
Fri Nov 20 22:58:18 2009
@@ -17,9 +17,16 @@
  */
 package org.apache.avro.reflect;
 
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+import java.lang.reflect.Array;
+import java.nio.ByteBuffer;
+
 import org.apache.avro.AvroRuntimeException;
 import org.apache.avro.Schema;
 import org.apache.avro.specific.SpecificDatumReader;
+import org.apache.avro.io.Decoder;
 
 /**
  * {...@link org.apache.avro.io.DatumReader DatumReader} for existing classes 
via
@@ -59,5 +66,53 @@
     addField(record, name, position, null);
   }
 
+  @Override
+  @SuppressWarnings(value="unchecked")
+  protected Object newArray(Object old, int size, Schema schema) {
+    Class listClass = ReflectData.get().getListClass(schema);
+    if (listClass != null) {
+      if (old instanceof List) {
+        ((List)old).clear();
+        return old;
+      }
+      if (listClass.isAssignableFrom(ArrayList.class))
+        return new ArrayList();
+      return newInstance(listClass);
+    }
+    Class elementClass = ReflectData.get().getClass(schema.getElementType());
+    return Array.newInstance(elementClass, size);
+  }
+
+  @Override
+  protected Object peekArray(Object array) {
+    return null;
+  }
+
+  @Override
+  @SuppressWarnings(value="unchecked")
+  protected void addToArray(Object array, long pos, Object e) {
+    if (array instanceof List) {
+      ((List)array).add(e);
+    } else {
+      Array.set(array, (int)pos, e);
+    }
+  }
+
+  @Override
+  protected Object readString(Object old, Decoder in) throws IOException {
+    return super.readString(null, in).toString();
+  }
+
+  @Override
+  protected Object createString(String value) { return value; }
+
+  @Override
+  protected Object readBytes(Object old, Decoder in) throws IOException {
+    ByteBuffer bytes = in.readBytes(null);
+    byte[] result = new byte[bytes.remaining()];
+    bytes.get(result);
+    return result;
+  }
+
 }
 

Modified: 
hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectDatumWriter.java 
Fri Nov 20 22:58:18 2009
@@ -17,9 +17,16 @@
  */
 package org.apache.avro.reflect;
 
+import java.lang.reflect.Array;
+import java.io.IOException;
+import java.util.Iterator;
+import java.util.List;
+
 import org.apache.avro.AvroRuntimeException;
 import org.apache.avro.Schema;
 import org.apache.avro.specific.SpecificDatumWriter;
+import org.apache.avro.io.Encoder;
+import org.apache.avro.util.Utf8;
 
 /**
  * {...@link org.apache.avro.io.DatumWriter DatumWriter} for existing classes
@@ -59,5 +66,40 @@
     }
   }
   
+  @Override
+  @SuppressWarnings("unchecked")
+  protected long getArraySize(Object array) {
+    if (array instanceof List)
+      return ((List)array).size();
+    return Array.getLength(array);
+        
+  }
+
+  @Override
+  @SuppressWarnings("unchecked")
+  protected Iterator<Object> getArrayElements(final Object array) {
+    if (array instanceof List)
+      return ((List<Object>)array).iterator();
+    return new Iterator<Object>() {
+      private int i = 0;
+      private final int length = Array.getLength(array);
+      public boolean hasNext() { return i < length; }
+      public Object next() { return Array.get(array, i++); }
+      public void remove() { throw new UnsupportedOperationException(); }
+    };
+  }
+
+  @Override
+  protected void writeString(Object datum, Encoder out) throws IOException {
+    out.writeString(new Utf8((String)datum));
+  }
+
+  @Override
+  protected void writeBytes(Object datum, Encoder out) throws IOException {
+    out.writeBytes((byte[])datum);
+  }
+
+
 }
 
+

Modified: 
hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/reflect/ReflectResponder.java 
Fri Nov 20 22:58:18 2009
@@ -27,11 +27,11 @@
 /** {...@link org.apache.avro.ipc.Responder} for existing interfaces.*/
 public class ReflectResponder extends SpecificResponder {
   public ReflectResponder(Class iface, Object impl) {
-    this(ReflectData.get().getProtocol(iface), impl);
+    super(ReflectData.get().getProtocol(iface), impl, ReflectData.get());
   }
   
   public ReflectResponder(Protocol protocol, Object impl) {
-    super(protocol, impl);
+    super(protocol, impl, ReflectData.get());
   }
 
   @Override

Modified: hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificData.java Fri 
Nov 20 22:58:18 2009
@@ -178,24 +178,19 @@
       String fullName = c.getName();
       Schema schema = names.get(fullName);
       if (schema == null)
-        schema = createClassSchema(c, names);
+        try {
+          schema = (Schema)(c.getDeclaredField("SCHEMA$").get(null));
+        } catch (NoSuchFieldException e) {
+          throw new AvroRuntimeException(e);
+        } catch (IllegalAccessException e) {
+          throw new AvroRuntimeException(e);
+        }
       names.put(fullName, schema);
       return schema;
     }
     throw new AvroTypeException("Unknown type: "+type);
   }
 
-  /** Create a schema for a Java class. */
-  protected Schema createClassSchema(Class c, Map<String,Schema> names) {
-    try {
-      return (Schema)(c.getDeclaredField("SCHEMA$").get(null));
-    } catch (NoSuchFieldException e) {
-      throw new AvroRuntimeException(e);
-    } catch (IllegalAccessException e) {
-      throw new AvroRuntimeException(e);
-    }
-  }
-
   /** Return the protocol for a Java interface. */
   public Protocol getProtocol(Class iface) {
     try {

Modified: 
hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- 
hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java 
(original)
+++ 
hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificDatumReader.java 
Fri Nov 20 22:58:18 2009
@@ -71,8 +71,9 @@
   private static final Map<Class,Constructor> CTOR_CACHE =
     new ConcurrentHashMap<Class,Constructor>();
 
+  /** Create an instance of a class. */
   @SuppressWarnings("unchecked")
-  private static Object newInstance(Class c) {
+  protected static Object newInstance(Class c) {
     Object result;
     try {
       Constructor meth = (Constructor)CTOR_CACHE.get(c);

Modified: 
hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java 
(original)
+++ hadoop/avro/trunk/src/java/org/apache/avro/specific/SpecificResponder.java 
Fri Nov 20 22:58:18 2009
@@ -37,14 +37,21 @@
 /** {...@link org.apache.avro.ipc.Responder Responder} for generated 
interfaces.*/
 public class SpecificResponder extends Responder {
   private Object impl;
+  private SpecificData data;
 
   public SpecificResponder(Class iface, Object impl) {
     this(SpecificData.get().getProtocol(iface), impl);
   }
     
   public SpecificResponder(Protocol protocol, Object impl) {
+    this(protocol, impl, SpecificData.get());
+  }
+
+  protected SpecificResponder(Protocol protocol, Object impl,
+                              SpecificData data) {
     super(protocol);
     this.impl = impl;
+    this.data = data;
   }
 
   protected DatumWriter<Object> getDatumWriter(Schema schema) {
@@ -83,7 +90,7 @@
     int i = 0;
     try {
       for (Map.Entry<String,Schema> param: 
message.getRequest().getFieldSchemas())
-        paramTypes[i++] = SpecificData.get().getClass(param.getValue());
+        paramTypes[i++] = data.getClass(param.getValue());
       Method method = impl.getClass().getMethod(message.getName(), paramTypes);
       return method.invoke(impl, (Object[])request);
     } catch (InvocationTargetException e) {

Added: hadoop/avro/trunk/src/java/org/apache/avro/util/WeakIdentityHashMap.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/java/org/apache/avro/util/WeakIdentityHashMap.java?rev=882753&view=auto
==============================================================================
--- hadoop/avro/trunk/src/java/org/apache/avro/util/WeakIdentityHashMap.java 
(added)
+++ hadoop/avro/trunk/src/java/org/apache/avro/util/WeakIdentityHashMap.java 
Fri Nov 20 22:58:18 2009
@@ -0,0 +1,169 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.avro.util;
+
+import java.lang.ref.ReferenceQueue;
+import java.lang.ref.WeakReference;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Implements a combination of WeakHashMap and IdentityHashMap.
+ * Useful for caches that need to key off of a == comparison
+ * instead of a .equals.
+ * 
+ * <b>
+ * This class is not a general-purpose Map implementation! While
+ * this class implements the Map interface, it intentionally violates
+ * Map's general contract, which mandates the use of the equals method
+ * when comparing objects. This class is designed for use only in the
+ * rare cases wherein reference-equality semantics are required.
+ * 
+ * Note that this implementation is not synchronized.
+ * </b>
+ */
+public class WeakIdentityHashMap<K, V> implements Map<K, V> {
+  private final ReferenceQueue<K> queue = new ReferenceQueue<K>();
+  private Map<IdentityWeakReference, V> backingStore
+    = new HashMap<IdentityWeakReference, V>();
+
+  public WeakIdentityHashMap() {}
+
+  public void clear() {
+    backingStore.clear();
+    reap();
+  }
+
+  public boolean containsKey(Object key) {
+    reap();
+    return backingStore.containsKey(new IdentityWeakReference(key));
+  }
+
+  public boolean containsValue(Object value)  {
+    reap();
+    return backingStore.containsValue(value);
+  }
+
+  public Set<Map.Entry<K, V>> entrySet() {
+    reap();
+    Set<Map.Entry<K, V>> ret = new HashSet<Map.Entry<K, V>>();
+    for (Map.Entry<IdentityWeakReference, V> ref : backingStore.entrySet()) {
+      final K key = ref.getKey().get();
+      final V value = ref.getValue();
+      Map.Entry<K, V> entry = new Map.Entry<K, V>() {
+        public K getKey() {
+          return key;
+        }
+        public V getValue() {
+          return value;
+        }
+        public V setValue(V value) {
+          throw new UnsupportedOperationException();
+        }
+      };
+      ret.add(entry);
+    }
+    return Collections.unmodifiableSet(ret);
+  }
+
+  public Set<K> keySet() {
+    reap();
+    Set<K> ret = new HashSet<K>();
+    for (IdentityWeakReference ref : backingStore.keySet()) {
+      ret.add(ref.get());
+    }
+    return Collections.unmodifiableSet(ret);
+  }
+
+  public boolean equals(Object o) {
+    return backingStore.equals(((WeakIdentityHashMap)o).backingStore);
+  }
+
+  public V get(Object key) {
+    reap();
+    return backingStore.get(new IdentityWeakReference(key));
+  }
+  public V put(K key, V value) {
+    reap();
+    return backingStore.put(new IdentityWeakReference(key), value);
+  }
+
+  public int hashCode() {
+    reap();
+    return backingStore.hashCode();
+  }
+  public boolean isEmpty() {
+    reap();
+    return backingStore.isEmpty();
+  }
+  public void putAll(Map t) {
+    throw new UnsupportedOperationException();
+  }
+  public V remove(Object key) {
+    reap();
+    return backingStore.remove(new IdentityWeakReference(key));
+  }
+  public int size() {
+    reap();
+    return backingStore.size();
+  }
+  public Collection<V> values() {
+    reap();
+    return backingStore.values();
+  }
+
+  private synchronized void reap() {
+      Object zombie = queue.poll();
+
+      while (zombie != null) {
+        IdentityWeakReference victim = (IdentityWeakReference)zombie;
+        backingStore.remove(victim);
+        zombie = queue.poll();
+      }
+    }
+
+  class IdentityWeakReference extends WeakReference<K> {
+    int hash;
+        
+    @SuppressWarnings("unchecked")
+      IdentityWeakReference(Object obj) {
+      super((K)obj, queue);
+      hash = System.identityHashCode(obj);
+    }
+
+    public int hashCode() {
+      return hash;
+    }
+
+    public boolean equals(Object o) {
+      if (this == o) {
+        return true;
+      }
+      IdentityWeakReference ref = (IdentityWeakReference)o;
+      if (this.get() == ref.get()) {
+        return true;
+      }
+      return false;
+    }
+  }
+}

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/BarRecord.java Fri Nov 20 
22:58:18 2009
@@ -17,16 +17,14 @@
  */
 package org.apache.avro;
 
-import org.apache.avro.util.Utf8;
-
 public class BarRecord {
-  private Utf8 beerMsg;
+  private String beerMsg;
 
   public BarRecord() {
   }
 
   public BarRecord(String beerMsg) {
-    this.beerMsg = new Utf8(beerMsg);
+    this.beerMsg = beerMsg;
   }
 
   @Override

Modified: 
hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java 
(original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestProtocolReflect.java 
Fri Nov 20 22:58:18 2009
@@ -17,16 +17,51 @@
  */
 package org.apache.avro;
 
+import org.apache.avro.ipc.Server;
+import org.apache.avro.ipc.Transceiver;
 import org.apache.avro.ipc.SocketServer;
 import org.apache.avro.ipc.SocketTransceiver;
 import org.apache.avro.reflect.ReflectRequestor;
 import org.apache.avro.reflect.ReflectResponder;
-import org.apache.avro.test.Simple;
+
+import org.junit.After;
 import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertArrayEquals;
 
 import java.net.InetSocketAddress;
+import java.util.Random;
+import java.io.IOException;
+
+public class TestProtocolReflect {
+
+  public static class TestRecord {
+    private String name;
+    public int hashCode() { return this.name.hashCode(); }
+    public boolean equals(Object that) {
+      return this.name.equals(((TestRecord)that).name);
+    }
+  }
+
+  public interface Simple {
+    String hello(String greeting);
+    TestRecord echo(TestRecord record);
+    int add(int arg1, int arg2);
+    byte[] echoBytes(byte[] data);
+  }
+  
+  public static class TestImpl implements Simple {
+    public String hello(String greeting) { return "goodbye"; }
+    public int add(int arg1, int arg2) { return arg1 + arg2; }
+    public TestRecord echo(TestRecord record) { return record; }
+    public byte[] echoBytes(byte[] data) { return data; }
+  }
+
 
-public class TestProtocolReflect extends TestProtocolSpecific {
+  protected static Server server;
+  protected static Transceiver client;
+  protected static Simple proxy;
 
   @Before
   public void testStartServer() throws Exception {
@@ -36,4 +71,41 @@
     proxy = (Simple)ReflectRequestor.getClient(Simple.class, client);
   }
 
+  @Test
+  public void testHello() throws IOException {
+    String response = proxy.hello("bob");
+    assertEquals("goodbye", response);
+  }
+
+  @Test
+  public void testEcho() throws IOException {
+    TestRecord record = new TestRecord();
+    record.name = "foo";
+    TestRecord echoed = proxy.echo(record);
+    assertEquals(record, echoed);
+  }
+
+  @Test
+  public void testAdd() throws IOException {
+    int result = proxy.add(1, 2);
+    assertEquals(3, result);
+  }
+
+  @Test
+  public void testEchoBytes() throws IOException {
+    Random random = new Random();
+    int length = random.nextInt(1024*16);
+    byte[] data = new byte[length];
+    random.nextBytes(data);
+    byte[] echoed = proxy.echoBytes(data);
+    assertArrayEquals(data, echoed);
+  }
+
+  @After
+  public void testStopServer() throws IOException {
+    client.close();
+    server.close();
+  }
+
+
 }

Modified: hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java
URL: 
http://svn.apache.org/viewvc/hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java?rev=882753&r1=882752&r2=882753&view=diff
==============================================================================
--- hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java (original)
+++ hadoop/avro/trunk/src/test/java/org/apache/avro/TestReflect.java Fri Nov 20 
22:58:18 2009
@@ -21,8 +21,13 @@
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
-import java.io.File;
 import java.io.IOException;
+import java.lang.reflect.Type;
+import java.util.Map;
+import java.util.List;
+import java.util.HashMap;
+import java.util.ArrayList;
+import java.util.Arrays;
 
 import org.apache.avro.TestReflect.SampleRecord.AnotherSampleRecord;
 import org.apache.avro.io.BinaryDecoder;
@@ -30,35 +35,158 @@
 import org.apache.avro.reflect.ReflectData;
 import org.apache.avro.reflect.ReflectDatumReader;
 import org.apache.avro.reflect.ReflectDatumWriter;
-import org.apache.avro.test.Simple;
-import org.apache.avro.test.TestRecord;
+
 import org.junit.Test;
 
 public class TestReflect {
 
-  private static final File FILE = new File("src/test/schemata/simple.avpr");
-  private static final Protocol PROTOCOL;
-  static {
-    try {
-      PROTOCOL = Protocol.parse(FILE);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
+  @Test public void testVoid() {
+    check(Void.TYPE, "\"null\"");
+    check(Void.class, "\"null\"");
+  }
+
+  @Test public void testBoolean() {
+    check(Boolean.TYPE, "\"boolean\"");
+    check(Boolean.class, "\"boolean\"");
+  }
+
+  @Test public void testInt() {
+    check(Integer.TYPE, "\"int\"");
+    check(Integer.class, "\"int\"");
+  }
+
+  @Test public void testLong() {
+    check(Long.TYPE, "\"long\"");
+    check(Long.class, "\"long\"");
+  }
+
+  @Test public void testFloat() {
+    check(Float.TYPE, "\"float\"");
+    check(Float.class, "\"float\"");
+  }
+
+  @Test public void testDouble() {
+    check(Double.TYPE, "\"double\"");
+    check(Double.class, "\"double\"");
+  }
+
+  @Test public void testString() {
+    check("Foo", "\"string\"");
+  }
+
+  @Test public void testBytes() {
+    check(new byte[0], "\"bytes\"");
+  }
+
+  public static class R1 {
+    private Map<String,String> mapField = new HashMap<String,String>();
+    private String[] arrayField = new String[] { "foo" };
+    private List<String> listField = new ArrayList<String>();
+
+    {
+      mapField.put("foo", "bar");
+      listField.add("foo");
+    }
+    
+    public boolean equals(Object o) {
+      if (!(o instanceof R1)) return false;
+      R1 that = (R1)o;
+      return mapField.equals(that.mapField)
+        && Arrays.equals(this.arrayField, that.arrayField) 
+        &&  listField.equals(that.listField);
     }
   }
 
-  @Test
-  public void testSchema() throws IOException {
-    assertEquals(PROTOCOL.getType("TestRecord"),
-                 ReflectData.get().getSchema(TestRecord.class));
+  @Test public void testMap() throws Exception {
+    check(R1.class.getDeclaredField("mapField").getGenericType(),
+          "{\"type\":\"map\",\"values\":\"string\"}");
   }
 
-  @Test
-  public void testProtocol() throws IOException {
-    assertEquals(PROTOCOL, ReflectData.get().getProtocol(Simple.class));
+  @Test public void testArray() throws Exception {
+    check(R1.class.getDeclaredField("arrayField").getGenericType(),
+          "{\"type\":\"array\",\"items\":\"string\"}");
+  }
+  @Test public void testList() throws Exception {
+    check(R1.class.getDeclaredField("listField").getGenericType(),
+          "{\"type\":\"array\",\"items\":\"string\"}");
+  }
+
+  @Test public void testR1() throws Exception {
+    checkReadWrite(new R1());
+  }
+
+  public static class R2 {
+    private String[] arrayField;
+    private List<String> listField;
+    
+    public boolean equals(Object o) {
+      if (!(o instanceof R2)) return false;
+      R2 that = (R2)o;
+      return Arrays.equals(this.arrayField, that.arrayField) 
+        &&  listField.equals(that.listField);
+    }
+  }
+
+  @Test public void testR2() throws Exception {
+    R2 r2 = new R2();
+    r2.arrayField = new String[] {"foo"};
+    r2.listField = new ArrayList<String>();
+    r2.listField.add("foo");
+    checkReadWrite(r2);
+  }
+
+  public static class R3 {
+    private int[] intArray;
+    
+    public boolean equals(Object o) {
+      if (!(o instanceof R3)) return false;
+      R3 that = (R3)o;
+      return Arrays.equals(this.intArray, that.intArray);
+    }
+  }
+
+  @Test public void testR3() throws Exception {
+    R3 r3 = new R3();
+    r3.intArray = new int[] {1};
+    checkReadWrite(r3);
+  }
+
+  void checkReadWrite(Object object) throws Exception {
+    Schema s = ReflectData.get().getSchema(object.getClass());
+    ReflectDatumWriter writer = new ReflectDatumWriter(s);
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    writer.write(object, new BinaryEncoder(out));
+    ReflectDatumReader reader = new ReflectDatumReader(s);
+    Object after =
+      reader.read(null, new BinaryDecoder
+                  (new ByteArrayInputStream(out.toByteArray())));
+    assertEquals(object, after);
+  }
+
+  public static enum E { A, B };
+  @Test public void testEnum() throws Exception {
+    check(E.class, "{\"type\":\"enum\",\"name\":\"E\",\"namespace\":"
+          +"\"org.apache.avro.TestReflect$\",\"symbols\":[\"A\",\"B\"]}");
+  }
+
+  public static class R { int a; long b; }
+  @Test public void testRecord() throws Exception {
+    check(R.class, "{\"type\":\"record\",\"name\":\"R\",\"namespace\":"
+          +"\"org.apache.avro.TestReflect$\",\"fields\":["
+          +"{\"name\":\"a\",\"type\":\"int\"},"
+          +"{\"name\":\"b\",\"type\":\"long\"}]}");
+  }
+
+  private void check(Object o, String schemaJson) {
+    check(o.getClass(), schemaJson);
+  }
+
+  private void check(Type type, String schemaJson) {
+    assertEquals(schemaJson, ReflectData.get().getSchema(type).toString());
   }
 
   @Test
-  public void testRecord() throws IOException {
+  public void testRecordIO() throws IOException {
     Schema schm = ReflectData.get().getSchema(SampleRecord.class);
     ReflectDatumWriter writer = new ReflectDatumWriter(schm);
     ByteArrayOutputStream out = new ByteArrayOutputStream();
@@ -74,7 +202,7 @@
   }
 
   @Test
-  public void testRecordWithNull() throws IOException {
+  public void testRecordWithNullIO() throws IOException {
     ReflectData reflectData = ReflectData.AllowNull.get();
     Schema schm = reflectData.getSchema(AnotherSampleRecord.class);
     ReflectDatumWriter writer = new ReflectDatumWriter(schm);


Reply via email to