lcspinter commented on a change in pull request #2078:
URL: https://github.com/apache/iceberg/pull/2078#discussion_r556586758



##########
File path: mr/src/main/java/org/apache/iceberg/mr/hive/Deserializer.java
##########
@@ -221,51 +223,54 @@ public ObjectInspector listElementPartner(ObjectInspector 
inspector) {
   /**
    * Hive query results schema column names do not match the target Iceberg 
column names.
    * Instead we have to rely on the column order. To keep the other parts of 
the code generic we fix this with a
-   * wrapper around the ObjectInspector. This wrapper uses the Iceberg schema 
column names instead of the Hive column
-   * names for {@link #getStructFieldRef(String) getStructFieldRef}
+   * wrapper around the ObjectInspectorPair. This wrapper maps the Iceberg 
schema column names instead of the Hive
+   * column names.
    */
-  private static class FixNameMappingObjectInspector extends 
StructObjectInspector {
-    private final StructObjectInspector innerInspector;
-    private final Map<String, StructField> nameMap;
+  private static class FixNameMappingObjectInspectorPair extends 
ObjectInspectorPair {
+    private final Map<String, String> sourceNameMap;
+
+    FixNameMappingObjectInspectorPair(Schema schema, ObjectInspectorPair pair) 
{
+      super(pair.writerInspector(), pair.sourceInspector());
 
-    private FixNameMappingObjectInspector(Schema schema, ObjectInspector 
inspector) {
-      this.nameMap = new HashMap<>(schema.columns().size());
-      this.innerInspector = (StructObjectInspector) inspector;
-      List<? extends StructField> fields = 
innerInspector.getAllStructFieldRefs();
+      this.sourceNameMap = new HashMap<>(schema.columns().size());
 
+      List<? extends StructField> fields = ((StructObjectInspector) 
sourceInspector()).getAllStructFieldRefs();
       for (int i = 0; i < schema.columns().size(); ++i) {
-        nameMap.put(schema.columns().get(i).name(), fields.get(i));
+        sourceNameMap.put(schema.columns().get(i).name(), 
fields.get(i).getFieldName());
       }
     }
 
     @Override
-    public List<? extends StructField> getAllStructFieldRefs() {
-      return innerInspector.getAllStructFieldRefs();
+    String sourceName(String originalName) {
+      return sourceNameMap.get(originalName);
     }
+  }
 
-    @Override
-    public StructField getStructFieldRef(String fieldName) {
-      return nameMap.get(fieldName);
-    }
+  /**
+   * To get the data for Iceberg {@link Record}s we have to use the Hive 
ObjectInspectors (sourceInspector) to get

Review comment:
       nit: Could you please reformulate this doc? Just split up the long 
sentence into two parts :)

##########
File path: 
mr/src/main/java/org/apache/iceberg/mr/hive/serde/objectinspector/WriteObjectInspector.java
##########
@@ -19,6 +19,11 @@
 
 package org.apache.iceberg.mr.hive.serde.objectinspector;
 
+/**
+ * Interface for converting the Hive primitive objects for to the objects 
which could be added to an Iceberg Record.

Review comment:
       typo:  ... for to ...

##########
File path: mr/src/main/java/org/apache/iceberg/mr/hive/Deserializer.java
##########
@@ -78,62 +82,55 @@ Record deserialize(Object data) {
     return (Record) fieldDeserializer.value(data);
   }
 
-  private Deserializer(Schema schema, ObjectInspector fieldInspector) {
-    this.fieldDeserializer = DeserializerVisitor.visit(schema, fieldInspector);
+  private Deserializer(Schema schema, ObjectInspectorPair pair) {
+    this.fieldDeserializer = DeserializerVisitor.visit(schema, pair);
   }
 
-  private static class DeserializerVisitor extends 
SchemaWithPartnerVisitor<ObjectInspector, FieldDeserializer> {
+  private static class DeserializerVisitor extends 
SchemaWithPartnerVisitor<ObjectInspectorPair, FieldDeserializer> {
 
-    public static FieldDeserializer visit(Schema schema, ObjectInspector 
objectInspector) {
-      return visit(schema, new FixNameMappingObjectInspector(schema, 
objectInspector), new DeserializerVisitor(),
+    public static FieldDeserializer visit(Schema schema, ObjectInspectorPair 
pair) {
+      return visit(schema, new FixNameMappingObjectInspectorPair(schema, 
pair), new DeserializerVisitor(),
           new PartnerObjectInspectorByNameAccessors());
     }
 
     @Override
-    public FieldDeserializer schema(Schema schema, ObjectInspector inspector, 
FieldDeserializer deserializer) {
+    public FieldDeserializer schema(Schema schema, ObjectInspectorPair pair, 
FieldDeserializer deserializer) {
       return deserializer;
     }
 
     @Override
-    public FieldDeserializer field(NestedField field, ObjectInspector 
inspector, FieldDeserializer deserializer) {
+    public FieldDeserializer field(NestedField field, ObjectInspectorPair 
pair, FieldDeserializer deserializer) {
       return deserializer;
     }
 
     @Override
-    public FieldDeserializer primitive(PrimitiveType type, ObjectInspector 
inspector) {
-      switch (type.typeId()) {
-        case BOOLEAN:
-        case INTEGER:
-        case LONG:
-        case FLOAT:
-        case DOUBLE:
-        case STRING:
-          // Generic conversions where Iceberg and Hive are using the same 
java object
-          return o -> ((PrimitiveObjectInspector) 
inspector).getPrimitiveJavaObject(o);
-        case UUID:
-          // TODO: This will not work with Parquet. Parquet UUID expect 
byte[], others are expecting UUID
-          return o -> UUID.fromString(((StringObjectInspector) 
inspector).getPrimitiveJavaObject(o));
-        case DATE:
-        case TIMESTAMP:
-        case FIXED:
-        case BINARY:
-        case DECIMAL:
-          // Iceberg specific conversions
-          return o -> ((WriteObjectInspector) inspector).convert(o);
-        case TIME:
-        default:
-          throw new IllegalArgumentException("Unsupported column type: " + 
type);
-      }
+    public FieldDeserializer primitive(PrimitiveType type, ObjectInspectorPair 
pair) {
+      return o -> {
+        if (o == null) {
+          return null;
+        }
+
+        ObjectInspector writerFieldInspector = pair.writerInspector();
+        ObjectInspector sourceFieldInspector = pair.sourceInspector();
+
+        Object result = ((PrimitiveObjectInspector) 
sourceFieldInspector).getPrimitiveJavaObject(o);
+        if (writerFieldInspector instanceof WriteObjectInspector) {

Review comment:
       nit: I would put this into an if..else statement to avoid unnecessary 
getPrimitiveJavaObject call. 




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to