[ 
https://issues.apache.org/jira/browse/HIVE-21492?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17073305#comment-17073305
 ] 

Hive QA commented on HIVE-21492:
--------------------------------



Here are the results of testing the latest attachment:
https://issues.apache.org/jira/secure/attachment/12998415/HIVE-21492.3.patch

{color:green}SUCCESS:{color} +1 due to 1 test(s) being added or modified.

{color:red}ERROR:{color} -1 due to 68 failed/errored test(s), 18162 tests 
executed
*Failed tests:*
{noformat}
org.apache.hadoop.hive.metastore.TestMetastoreHousekeepingLeaderEmptyConfig.testHouseKeepingThreadExistence
 (batchId=252)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.alterTableBogusCatalog[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.createTableInBogusCatalog[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.dropTableBogusCatalog[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.getAllTablesInBogusCatalog[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.getMaterializedViewsInBogusCatalog[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.getTableInBogusCatalog[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.getTableObjectsByNameBogusCatalog[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.moveTablesBetweenCatalogsOnAlter[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.tablesInOtherCatalogs[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableAlreadyExists[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableCascade[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableChangeCols[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableChangingDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableEmptyTableNameInNew[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableExternalTableChangeLocation[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableExternalTable[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidStorageDescriptorAddPartitionColumns[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidStorageDescriptorAlterPartitionColumnName[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidStorageDescriptorInvalidColumnType[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidStorageDescriptorNullCols[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidStorageDescriptorNullColumnType[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidStorageDescriptorNullLocation[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidStorageDescriptorNullSerdeInfo[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidStorageDescriptorRemovePartitionColumn[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableInvalidTableNameInNew[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNoSuchDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNoSuchTableInThisDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNoSuchTable[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNullDatabaseInNew[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNullDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNullNewTable[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNullStorageDescriptorInNew[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNullTableNameInNew[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableNullTableName[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTableRename[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testAlterTable[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateGetDeleteTable[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableAlreadyExists[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableDefaultLocationInSpecificDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableDefaultValuesView[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableDefaultValues[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableEmptyName[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableInvalidStorageDescriptorInvalidColumnType[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableInvalidStorageDescriptorNullColumnType[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableInvalidStorageDescriptorNullColumns[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableInvalidStorageDescriptorNullSerdeInfo[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableInvalidTableName[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableNoSuchDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableNullDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableNullStorageDescriptor[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testCreateTableNullTableName[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableCaseInsensitive[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableDeleteDir[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableExternalWithPurge[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableExternalWithoutPurge[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableIgnoreUnknown[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableNoSuchDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableNoSuchTableInTheDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableNoSuchTable[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableNullDatabase[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableNullTableName[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableWithPurge[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testDropTableWithoutPurge[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testTruncateTablePartitionedDeleteAll[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testTruncateTablePartitioned[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.testTruncateTableUnpartitioned[Remote]
 (batchId=230)
org.apache.hadoop.hive.metastore.client.TestTablesCreateDropAlterTruncate.truncateTableBogusCatalog[Remote]
 (batchId=230)
{noformat}

Test results: 
https://builds.apache.org/job/PreCommit-HIVE-Build/21377/testReport
Console output: https://builds.apache.org/job/PreCommit-HIVE-Build/21377/console
Test logs: http://104.198.109.242/logs/PreCommit-HIVE-Build-21377/

Messages:
{noformat}
Executing org.apache.hive.ptest.execution.TestCheckPhase
Executing org.apache.hive.ptest.execution.PrepPhase
Executing org.apache.hive.ptest.execution.YetusPhase
Executing org.apache.hive.ptest.execution.ExecutionPhase
Executing org.apache.hive.ptest.execution.ReportingPhase
Tests exited with: TestsFailedException: 68 tests failed
{noformat}

This message is automatically generated.

ATTACHMENT ID: 12998415 - PreCommit-HIVE-Build

> VectorizedParquetRecordReader can't to read parquet file generated using 
> thrift/custom tool
> -------------------------------------------------------------------------------------------
>
>                 Key: HIVE-21492
>                 URL: https://issues.apache.org/jira/browse/HIVE-21492
>             Project: Hive
>          Issue Type: Bug
>            Reporter: Ganesha Shreedhara
>            Assignee: Ganesha Shreedhara
>            Priority: Major
>         Attachments: HIVE-21492.2.patch, HIVE-21492.3.patch, HIVE-21492.patch
>
>
> Taking an example of a parquet table having array of integers as below. 
> {code:java}
> CREATE EXTERNAL TABLE ( list_of_ints` array<int>)
> STORED AS PARQUET 
> LOCATION '{location}';
> {code}
> Parquet file generated using hive will have schema for Type as below:
> {code:java}
> group list_of_ints (LIST) { repeated group bag { optional int32 array;\n};\n} 
> {code}
> Parquet file generated using thrift or any custom tool (using 
> org.apache.parquet.io.api.RecordConsumer)
> may have schema for Type as below:
> {code:java}
> required group list_of_ints (LIST) { repeated int32 list_of_tuple} {code}
> VectorizedParquetRecordReader handles only parquet file generated using hive. 
> It throws the following exception when parquet file generated using thrift is 
> read because of the changes done as part of HIVE-18553 .
> {code:java}
> Caused by: java.lang.ClassCastException: repeated int32 list_of_ints_tuple is 
> not a group
>  at org.apache.parquet.schema.Type.asGroupType(Type.java:207)
>  at 
> org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader.getElementType(VectorizedParquetRecordReader.java:479)
>  at 
> org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader.buildVectorizedParquetReader(VectorizedParquetRecordReader.java:532)
>  at 
> org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader.checkEndOfRowGroup(VectorizedParquetRecordReader.java:440)
>  at 
> org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader.nextBatch(VectorizedParquetRecordReader.java:401)
>  at 
> org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader.next(VectorizedParquetRecordReader.java:353)
>  at 
> org.apache.hadoop.hive.ql.io.parquet.vector.VectorizedParquetRecordReader.next(VectorizedParquetRecordReader.java:92)
>  at 
> org.apache.hadoop.hive.ql.io.HiveContextAwareRecordReader.doNext(HiveContextAwareRecordReader.java:365){code}
>  
>  I have done a small change to handle the case where the child type of group 
> type can be PrimitiveType.



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Reply via email to