Author: brock
Date: Fri Feb 13 18:41:54 2015
New Revision: 1659644

URL: http://svn.apache.org/r1659644
Log:
HIVE-9605 - Remove parquet nested objects from wrapper writable objects (Sergio 
via Brock)

Modified:
    
hive/branches/parquet/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
    
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
    
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
    
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java
    
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
    
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java
    
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
    
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java
    
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
    
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
    
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
    
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java
    
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java
    
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java
    
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java

Modified: 
hive/branches/parquet/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
 (original)
+++ 
hive/branches/parquet/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java
 Fri Feb 13 18:41:54 2015
@@ -209,13 +209,13 @@ public class ColumnarStorageBench {
         case LIST: {
           List<TypeInfo> elementType = new ArrayList<TypeInfo>();
           elementType.add(((ListTypeInfo) type).getListElementTypeInfo());
-          fields[pos++] = record(createRecord(elementType));
+          fields[pos++] = createRecord(elementType);
         } break;
         case MAP: {
           List<TypeInfo> keyValueType = new ArrayList<TypeInfo>();
           keyValueType.add(((MapTypeInfo) type).getMapKeyTypeInfo());
           keyValueType.add(((MapTypeInfo) type).getMapValueTypeInfo());
-          fields[pos++] = record(record(createRecord(keyValueType)));
+          fields[pos++] = record(createRecord(keyValueType));
         } break;
         case STRUCT: {
           List<TypeInfo> elementType = ((StructTypeInfo) 
type).getAllStructFieldTypeInfos();

Modified: 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
 (original)
+++ 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveCollectionConverter.java
 Fri Feb 13 18:41:54 2015
@@ -3,7 +3,6 @@ package org.apache.hadoop.hive.ql.io.par
 import com.google.common.base.Preconditions;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.Writable;
@@ -65,8 +64,8 @@ public class HiveCollectionConverter ext
 
   @Override
   public void end() {
-    parent.set(index, wrapList(new ArrayWritable(
-        Writable.class, list.toArray(new Writable[list.size()]))));
+    parent.set(index, new ArrayWritable(
+        Writable.class, list.toArray(new Writable[0])));
   }
 
   @Override

Modified: 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
 (original)
+++ 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/HiveGroupConverter.java
 Fri Feb 13 18:41:54 2015
@@ -13,7 +13,6 @@
  */
 package org.apache.hadoop.hive.ql.io.parquet.convert;
 
-import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.Writable;
 import parquet.io.api.Converter;
 import parquet.io.api.GroupConverter;
@@ -72,15 +71,6 @@ public abstract class HiveGroupConverter
     return getConverterFromDescription(type.asGroupType(), index, parent);
   }
 
-  /**
-   * The original list and map conversion didn't remove the synthetic layer and
-   * the ObjectInspector had to remove it. This is a temporary fix that adds an
-   * extra layer for the ObjectInspector to remove.
-   */
-  static ArrayWritable wrapList(ArrayWritable list) {
-    return new ArrayWritable(Writable.class, new Writable[] {list});
-  }
-
   public abstract void set(int index, Writable value);
 
 }

Modified: 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java
 (original)
+++ 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/convert/Repeated.java
 Fri Feb 13 18:41:54 2015
@@ -107,8 +107,8 @@ public interface Repeated extends Conver
 
     @Override
     public void parentEnd() {
-      parent.set(index, HiveGroupConverter.wrapList(new ArrayWritable(
-          Writable.class, list.toArray(new Writable[list.size()]))));
+      parent.set(index, new ArrayWritable(
+          Writable.class, list.toArray(new Writable[list.size()])));
     }
 
     @Override
@@ -167,8 +167,8 @@ public interface Repeated extends Conver
 
     @Override
     public void parentEnd() {
-      parent.set(index, wrapList(new ArrayWritable(
-          Writable.class, list.toArray(new Writable[list.size()]))));
+      parent.set(index, new ArrayWritable(
+          Writable.class, list.toArray(new Writable[list.size()])));
     }
   }
 }

Modified: 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
 (original)
+++ 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/AbstractParquetMapInspector.java
 Fri Feb 13 18:41:54 2015
@@ -59,15 +59,12 @@ public abstract class AbstractParquetMap
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] mapContainer = ((ArrayWritable) data).get();
-
-      if (mapContainer == null || mapContainer.length == 0) {
+      final Writable[] mapArray = ((ArrayWritable) data).get();
+      if (mapArray == null || mapArray.length == 0) {
         return null;
       }
 
-      final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get();
       final Map<Writable, Writable> map = new LinkedHashMap<Writable, 
Writable>();
-
       for (final Writable obj : mapArray) {
         final ArrayWritable mapObj = (ArrayWritable) obj;
         final Writable[] arr = mapObj.get();
@@ -91,12 +88,12 @@ public abstract class AbstractParquetMap
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] mapContainer = ((ArrayWritable) data).get();
+      final Writable[] mapArray = ((ArrayWritable) data).get();
 
-      if (mapContainer == null || mapContainer.length == 0) {
+      if (mapArray == null || mapArray.length == 0) {
         return -1;
       } else {
-        return ((ArrayWritable) mapContainer[0]).get().length;
+        return mapArray.length;
       }
     }
 

Modified: 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java
 (original)
+++ 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/DeepParquetHiveMapInspector.java
 Fri Feb 13 18:41:54 2015
@@ -40,14 +40,12 @@ public class DeepParquetHiveMapInspector
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] mapContainer = ((ArrayWritable) data).get();
+      final Writable[] mapArray = ((ArrayWritable) data).get();
 
-      if (mapContainer == null || mapContainer.length == 0) {
+      if (mapArray == null || mapArray.length == 0) {
         return null;
       }
 
-      final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get();
-
       for (final Writable obj : mapArray) {
         final ArrayWritable mapObj = (ArrayWritable) obj;
         final Writable[] arr = mapObj.get();

Modified: 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
 (original)
+++ 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/ParquetHiveArrayInspector.java
 Fri Feb 13 18:41:54 2015
@@ -56,20 +56,13 @@ public class ParquetHiveArrayInspector i
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] listContainer = ((ArrayWritable) data).get();
-
-      if (listContainer == null || listContainer.length == 0) {
-        return null;
-      }
-
-      final Writable subObj = listContainer[0];
-
-      if (subObj == null) {
+      final Writable[] array = ((ArrayWritable) data).get();
+      if (array == null || array.length == 0) {
         return null;
       }
 
-      if (index >= 0 && index < ((ArrayWritable) subObj).get().length) {
-        return ((ArrayWritable) subObj).get()[index];
+      if (index >= 0 && index < array.length) {
+        return array[index];
       } else {
         return null;
       }
@@ -85,19 +78,12 @@ public class ParquetHiveArrayInspector i
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] listContainer = ((ArrayWritable) data).get();
-
-      if (listContainer == null || listContainer.length == 0) {
+      final Writable[] array = ((ArrayWritable) data).get();
+      if (array == null || array.length == 0) {
         return -1;
       }
 
-      final Writable subObj = listContainer[0];
-
-      if (subObj == null) {
-        return 0;
-      }
-
-      return ((ArrayWritable) subObj).get().length;
+      return array.length;
     }
 
     throw new UnsupportedOperationException("Cannot inspect " + 
data.getClass().getCanonicalName());
@@ -110,21 +96,12 @@ public class ParquetHiveArrayInspector i
     }
 
     if (data instanceof ArrayWritable) {
-      final Writable[] listContainer = ((ArrayWritable) data).get();
-
-      if (listContainer == null || listContainer.length == 0) {
+      final Writable[] array = ((ArrayWritable) data).get();
+      if (array == null || array.length == 0) {
         return null;
       }
 
-      final Writable subObj = listContainer[0];
-
-      if (subObj == null) {
-        return null;
-      }
-
-      final Writable[] array = ((ArrayWritable) subObj).get();
-      final List<Writable> list = new ArrayList<Writable>();
-
+      final List<Writable> list = new ArrayList<Writable>(array.length);
       for (final Writable obj : array) {
         list.add(obj);
       }

Modified: 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java
 (original)
+++ 
hive/branches/parquet/ql/src/java/org/apache/hadoop/hive/ql/io/parquet/serde/StandardParquetHiveMapInspector.java
 Fri Feb 13 18:41:54 2015
@@ -37,12 +37,12 @@ public class StandardParquetHiveMapInspe
       return null;
     }
     if (data instanceof ArrayWritable) {
-      final Writable[] mapContainer = ((ArrayWritable) data).get();
+      final Writable[] mapArray = ((ArrayWritable) data).get();
 
-      if (mapContainer == null || mapContainer.length == 0) {
+      if (mapArray == null || mapArray.length == 0) {
         return null;
       }
-      final Writable[] mapArray = ((ArrayWritable) mapContainer[0]).get();
+
       for (final Writable obj : mapArray) {
         final ArrayWritable mapObj = (ArrayWritable) obj;
         final Writable[] arr = mapObj.get();

Modified: 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
 (original)
+++ 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestArrayCompatibility.java
 Fri Feb 13 18:41:54 2015
@@ -44,8 +44,8 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
-        new IntWritable(34), new IntWritable(35), new IntWritable(36)));
+    ArrayWritable expected = list(
+        new IntWritable(34), new IntWritable(35), new IntWritable(36));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -91,9 +91,9 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new FloatWritable(1.0f), new FloatWritable(1.0f)),
-        record(new FloatWritable(2.0f), new FloatWritable(2.0f))));
+        record(new FloatWritable(2.0f), new FloatWritable(2.0f)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -130,8 +130,8 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
-        new IntWritable(34), new IntWritable(35), new IntWritable(36)));
+    ArrayWritable expected = list(
+        new IntWritable(34), new IntWritable(35), new IntWritable(36));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -180,9 +180,9 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new LongWritable(1234L)),
-        record(new LongWritable(2345L))));
+        record(new LongWritable(2345L)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -219,8 +219,8 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
-        new IntWritable(34), new IntWritable(35), new IntWritable(36)));
+    ArrayWritable expected = list(
+        new IntWritable(34), new IntWritable(35), new IntWritable(36));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -269,9 +269,9 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new LongWritable(1234L)),
-        record(new LongWritable(2345L))));
+        record(new LongWritable(2345L)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -321,9 +321,9 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         new LongWritable(1234L),
-        new LongWritable(2345L)));
+        new LongWritable(2345L));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -379,9 +379,9 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new DoubleWritable(0.0), new DoubleWritable(0.0)),
-        record(new DoubleWritable(0.0), new DoubleWritable(180.0))));
+        record(new DoubleWritable(0.0), new DoubleWritable(180.0)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -455,10 +455,10 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new DoubleWritable(0.0), new DoubleWritable(0.0)),
         null,
-        record(new DoubleWritable(0.0), new DoubleWritable(180.0))));
+        record(new DoubleWritable(0.0), new DoubleWritable(180.0)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -528,9 +528,9 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new DoubleWritable(0.0), new DoubleWritable(180.0)),
-        record(new DoubleWritable(0.0), new DoubleWritable(0.0))));
+        record(new DoubleWritable(0.0), new DoubleWritable(0.0)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -601,9 +601,9 @@ public class TestArrayCompatibility exte
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new DoubleWritable(0.0), new DoubleWritable(180.0)),
-        record(new DoubleWritable(0.0), new DoubleWritable(0.0))));
+        record(new DoubleWritable(0.0), new DoubleWritable(0.0)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());

Modified: 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
 (original)
+++ 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestMapStructures.java
 Fri Feb 13 18:41:54 2015
@@ -64,9 +64,9 @@ public class TestMapStructures extends A
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new Text("lettuce"), new IntWritable(34)),
-        record(new Text("cabbage"), new IntWritable(18))));
+        record(new Text("cabbage"), new IntWritable(18)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -131,10 +131,10 @@ public class TestMapStructures extends A
           }
         });
 
-    ArrayWritable expected = record(list(
+    ArrayWritable expected = list(
         record(new Text("lettuce"), new IntWritable(34)),
         record(new Text("kale"), null),
-        record(new Text("cabbage"), new IntWritable(18))));
+        record(new Text("cabbage"), new IntWritable(18)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -212,9 +212,9 @@ public class TestMapStructures extends A
           }
         });
 
-    ArrayWritable expected = record(list(
-        record(new Text("green"), list(new Text("lettuce"), new Text("kale"), 
null)),
-        record(new Text("brown"), null)));
+    ArrayWritable expected = list(
+        record(new Text("green"), record(new Text("lettuce"), new 
Text("kale"), null)),
+        record(new Text("brown"), null));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -307,9 +307,9 @@ public class TestMapStructures extends A
           }
         });
 
-    ArrayWritable expected = record(list(
-        record(new Text("low"), list(new IntWritable(34), new IntWritable(35), 
null)),
-        record(new Text("high"), list(new IntWritable(340), new 
IntWritable(360)))));
+    ArrayWritable expected = list(
+        record(new Text("low"), record(new IntWritable(34), new 
IntWritable(35), null)),
+        record(new Text("high"), record(new IntWritable(340), new 
IntWritable(360))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -368,9 +368,9 @@ public class TestMapStructures extends A
           }
         });
 
-    ArrayWritable expected = record(list(record(
+    ArrayWritable expected = list(record(
         record(new IntWritable(7), new IntWritable(22)),
-        new DoubleWritable(3.14))));
+        new DoubleWritable(3.14)));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -429,9 +429,9 @@ public class TestMapStructures extends A
           }
         });
 
-    ArrayWritable expected = record(list(record(
+    ArrayWritable expected = list(record(
         new DoubleWritable(3.14),
-        record(new IntWritable(7), new IntWritable(22)))));
+        record(new IntWritable(7), new IntWritable(22))));
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());
@@ -524,13 +524,13 @@ public class TestMapStructures extends A
           }
         });
 
-    ArrayWritable expected = record(list(
-        record(new Text("a"), list(
+    ArrayWritable expected = list(
+        record(new Text("a"), record(
             record(new Text("b"), new IntWritable(1)))),
-        record(new Text("b"), list(
+        record(new Text("b"), record(
             record(new Text("a"), new IntWritable(-1)),
             record(new Text("b"), new IntWritable(-2))))
-    ));
+    );
 
     List<ArrayWritable> records = read(test);
     Assert.assertEquals("Should have only one record", 1, records.size());

Modified: 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
 (original)
+++ 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/TestParquetSerDe.java
 Fri Feb 13 18:41:54 2015
@@ -55,7 +55,6 @@ public class TestParquetSerDe extends Te
       arr[4] = new DoubleWritable((double) 5.3);
       arr[5] = new BytesWritable("hive and hadoop and parquet. Big 
family.".getBytes("UTF-8"));
       arr[6] = new BytesWritable("parquetSerde binary".getBytes("UTF-8"));
-      final Writable[] mapContainer = new Writable[1];
       final Writable[] map = new Writable[3];
       for (int i = 0; i < 3; ++i) {
         final Writable[] pair = new Writable[2];
@@ -63,16 +62,13 @@ public class TestParquetSerDe extends Te
         pair[1] = new IntWritable(i);
         map[i] = new ArrayWritable(Writable.class, pair);
       }
-      mapContainer[0] = new ArrayWritable(Writable.class, map);
-      arr[7] = new ArrayWritable(Writable.class, mapContainer);
+      arr[7] = new ArrayWritable(Writable.class, map);
 
-      final Writable[] arrayContainer = new Writable[1];
       final Writable[] array = new Writable[5];
       for (int i = 0; i < 5; ++i) {
         array[i] = new BytesWritable(("elem_" + i).getBytes("UTF-8"));
       }
-      arrayContainer[0] = new ArrayWritable(Writable.class, array);
-      arr[8] = new ArrayWritable(Writable.class, arrayContainer);
+      arr[8] = new ArrayWritable(Writable.class, array);
 
       final ArrayWritable arrWritable = new ArrayWritable(Writable.class, arr);
       // Test

Modified: 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java
 (original)
+++ 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestAbstractParquetMapInspector.java
 Fri Feb 13 18:41:54 2015
@@ -71,11 +71,9 @@ public class TestAbstractParquetMapInspe
     final Writable[] entry1 = new Writable[]{new IntWritable(0), new 
IntWritable(1)};
     final Writable[] entry2 = new Writable[]{new IntWritable(2), new 
IntWritable(3)};
 
-    final ArrayWritable internalMap = new ArrayWritable(ArrayWritable.class, 
new Writable[]{
+    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new 
Writable[]{
       new ArrayWritable(Writable.class, entry1), new 
ArrayWritable(Writable.class, entry2)});
 
-    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new 
Writable[]{internalMap});
-
     final Map<Writable, Writable> expected = new HashMap<Writable, Writable>();
     expected.put(new IntWritable(0), new IntWritable(1));
     expected.put(new IntWritable(2), new IntWritable(3));

Modified: 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java
 (original)
+++ 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestDeepParquetHiveMapInspector.java
 Fri Feb 13 18:41:54 2015
@@ -58,11 +58,9 @@ public class TestDeepParquetHiveMapInspe
     final Writable[] entry1 = new Writable[]{new IntWritable(0), new 
IntWritable(1)};
     final Writable[] entry2 = new Writable[]{new IntWritable(2), new 
IntWritable(3)};
 
-    final ArrayWritable internalMap = new ArrayWritable(ArrayWritable.class, 
new Writable[]{
+    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new 
Writable[]{
       new ArrayWritable(Writable.class, entry1), new 
ArrayWritable(Writable.class, entry2)});
 
-    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new 
Writable[]{internalMap});
-
     assertEquals("Wrong result of inspection", new IntWritable(1), 
inspector.getMapValueElement(map, new IntWritable(0)));
     assertEquals("Wrong result of inspection", new IntWritable(3), 
inspector.getMapValueElement(map, new IntWritable(2)));
     assertEquals("Wrong result of inspection", new IntWritable(1), 
inspector.getMapValueElement(map, new ShortWritable((short) 0)));

Modified: 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java
 (original)
+++ 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestParquetHiveArrayInspector.java
 Fri Feb 13 18:41:54 2015
@@ -58,9 +58,8 @@ public class TestParquetHiveArrayInspect
 
   @Test
   public void testRegularList() {
-    final ArrayWritable internalList = new ArrayWritable(Writable.class,
+    final ArrayWritable list = new ArrayWritable(Writable.class,
             new Writable[]{new IntWritable(3), new IntWritable(5), new 
IntWritable(1)});
-    final ArrayWritable list = new ArrayWritable(ArrayWritable.class, new 
ArrayWritable[]{internalList});
 
     final List<Writable> expected = new ArrayList<Writable>();
     expected.add(new IntWritable(3));

Modified: 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java
URL: 
http://svn.apache.org/viewvc/hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java?rev=1659644&r1=1659643&r2=1659644&view=diff
==============================================================================
--- 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java
 (original)
+++ 
hive/branches/parquet/ql/src/test/org/apache/hadoop/hive/ql/io/parquet/serde/TestStandardParquetHiveMapInspector.java
 Fri Feb 13 18:41:54 2015
@@ -57,11 +57,9 @@ public class TestStandardParquetHiveMapI
     final Writable[] entry1 = new Writable[]{new IntWritable(0), new 
IntWritable(1)};
     final Writable[] entry2 = new Writable[]{new IntWritable(2), new 
IntWritable(3)};
 
-    final ArrayWritable internalMap = new ArrayWritable(ArrayWritable.class, 
new Writable[]{
+    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new 
Writable[]{
       new ArrayWritable(Writable.class, entry1), new 
ArrayWritable(Writable.class, entry2)});
 
-    final ArrayWritable map = new ArrayWritable(ArrayWritable.class, new 
Writable[]{internalMap});
-
     assertEquals("Wrong result of inspection", new IntWritable(1), 
inspector.getMapValueElement(map, new IntWritable(0)));
     assertEquals("Wrong result of inspection", new IntWritable(3), 
inspector.getMapValueElement(map, new IntWritable(2)));
     assertNull("Wrong result of inspection", inspector.getMapValueElement(map, 
new ShortWritable((short) 0)));


Reply via email to