DRILL-1524: Fix Hive Parquet SerDe reading issue when all columns are projected.


Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/83f3728e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/83f3728e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/83f3728e

Branch: refs/heads/master
Commit: 83f3728e7be7c3f99d88a3d8768554454d20f20f
Parents: bf31a9a
Author: vkorukanti <[email protected]>
Authored: Sun Oct 19 20:27:52 2014 -0700
Committer: vkorukanti <[email protected]>
Committed: Mon Nov 3 10:31:09 2014 -0800

----------------------------------------------------------------------
 .../org/apache/drill/exec/store/hive/HiveRecordReader.java    | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/83f3728e/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
----------------------------------------------------------------------
diff --git 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
index f30aa1b..0f59095 100644
--- 
a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
+++ 
b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
@@ -181,12 +181,15 @@ public class HiveRecordReader extends 
AbstractRecordReader {
       }
       sInspector = (StructObjectInspector) oi;
       StructTypeInfo sTypeInfo = (StructTypeInfo) 
TypeInfoUtils.getTypeInfoFromObjectInspector(sInspector);
+      List<Integer> columnIds = Lists.newArrayList();
       if (isStarQuery()) {
         selectedColumnNames = sTypeInfo.getAllStructFieldNames();
         tableColumns = selectedColumnNames;
+        for(int i=0; i<selectedColumnNames.size(); i++) {
+          columnIds.add(i);
+        }
       } else {
         tableColumns = sTypeInfo.getAllStructFieldNames();
-        List<Integer> columnIds = Lists.newArrayList();
         selectedColumnNames = Lists.newArrayList();
         for (SchemaPath field : getColumns()) {
           String columnName = field.getRootSegment().getPath();
@@ -201,8 +204,8 @@ public class HiveRecordReader extends AbstractRecordReader {
             selectedColumnNames.add(columnName);
           }
         }
-        ColumnProjectionUtils.appendReadColumns(job, columnIds, 
selectedColumnNames);
       }
+      ColumnProjectionUtils.appendReadColumns(job, columnIds, 
selectedColumnNames);
 
       for (String columnName : selectedColumnNames) {
         ObjectInspector fieldOI = 
sInspector.getStructFieldRef(columnName).getFieldObjectInspector();

Reply via email to