[
https://issues.apache.org/jira/browse/HIVE-18975?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16402648#comment-16402648
]
Hive QA commented on HIVE-18975:
--------------------------------
Here are the results of testing the latest attachment:
https://issues.apache.org/jira/secure/attachment/12914854/HIVE-18975.1.patch
{color:green}SUCCESS:{color} +1 due to 1 test(s) being added or modified.
{color:red}ERROR:{color} -1 due to 129 failed/errored test(s), 13819 tests
executed
*Failed tests:*
{noformat}
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[acid_table_stats]
(batchId=54)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[auto_sortmerge_join_2]
(batchId=49)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[ppd_join5] (batchId=36)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[results_cache_1]
(batchId=68)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[results_cache_with_masking]
(batchId=70)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[sample6] (batchId=68)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[source] (batchId=68)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[udf_10_trims]
(batchId=70)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[udf_from_unixtime]
(batchId=68)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[udf_instr] (batchId=70)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[union25] (batchId=70)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[union_remove_17]
(batchId=70)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[union_script]
(batchId=70)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[vector_between_columns]
(batchId=70)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[vector_char_2]
(batchId=70)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[view_cbo] (batchId=68)
org.apache.hadoop.hive.cli.TestCliDriver.testCliDriver[windowing_streaming]
(batchId=68)
org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver[druidmini_mv]
(batchId=251)
org.apache.hadoop.hive.cli.TestMiniLlapCliDriver.testCliDriver[llap_smb]
(batchId=152)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[bucket_map_join_tez_empty]
(batchId=158)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[groupby_groupingset_bug]
(batchId=172)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[insert_values_orig_table_use_metadata]
(batchId=168)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[mergejoin]
(batchId=167)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[results_cache_1]
(batchId=168)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[sysdb]
(batchId=161)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[tez_smb_main]
(batchId=159)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[update_access_time_non_current_db]
(batchId=170)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[vectorization_div0]
(batchId=169)
org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[vectorized_dynamic_semijoin_reduction]
(batchId=153)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.org.apache.hadoop.hive.cli.TestNegativeCliDriver
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[mm_convert]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[mm_truncate_cols]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[set_hiveconf_validation0]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[set_hiveconf_validation1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[set_hiveconf_validation2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[show_create_table_does_not_exist]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[show_tables_bad1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[show_tables_bad2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[show_tables_bad_db1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[show_tablestatus]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[smb_bucketmapjoin]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[smb_mapjoin_14]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[sortmerge_mapjoin_mismatch_1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[special_character_in_tabnames_1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[split_sample_out_of_range]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[split_sample_wrong_format]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_aggregator_error_1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_aggregator_error_2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_publisher_error_1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[strict_join_2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[strict_orderby]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[strict_orderby_2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[strict_pruning_2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_corr_grandparent]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_corr_in_agg]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_in_groupby]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_multiple_cols_in_select]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_notin_implicit_gby]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_scalar_corr_multi_rows]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_scalar_multi_rows]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_select_aggregate]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_select_distinct]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_subquery_chain_exists]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[temp_table_rename]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[touch2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_bucketed_column]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_column_seqfile]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_table_failure3]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_table_failure5]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udaf_collect_set_unsupported]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_array_contains_wrong2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_assert_true2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_assert_true]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_coalesce]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_concat_ws_wrong1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_concat_ws_wrong3]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_elt_wrong_type]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_format_number_wrong3]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_format_number_wrong5]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_greatest_error_2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_if_not_bool]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_if_wrong_args_len]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_in]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_instr_wrong_args_len]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_invalid]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_likeall_wrong1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_likeany_wrong1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_keys_arg_num]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_keys_arg_type]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_values_arg_type]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_max]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_next_day_error_1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_next_day_error_2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_nonexistent_resource]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_printf_wrong4]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_reflect_neg]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_by_wrong1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong3]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_test_error]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error3]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_explode_not_supported4]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_not_supported1]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_not_supported3]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[union22]
(batchId=94)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[union2]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unionSortBy]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[uniquejoin3]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[uniquejoin]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unset_table_property]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[updateBasicStats]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_bucket_col]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_non_acid_table]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_notnull_constraint]
(batchId=94)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_partition_col]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[view_update]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_invalid_udaf]
(batchId=95)
org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_leadlag_in_udaf]
(batchId=95)
org.apache.hadoop.hive.cli.TestSparkCliDriver.testCliDriver[ppd_join5]
(batchId=122)
org.apache.hadoop.hive.cli.TestSparkCliDriver.testCliDriver[subquery_scalar]
(batchId=124)
org.apache.hadoop.hive.metastore.TestMetastoreVersion.testMetastoreVersion
(batchId=225)
org.apache.hadoop.hive.metastore.TestMetastoreVersion.testVersionMatching
(batchId=225)
org.apache.hadoop.hive.ql.TestAcidOnTez.testGetSplitsLocks (batchId=226)
org.apache.hadoop.hive.ql.TestMTQueries.testMTQueries1 (batchId=229)
org.apache.hadoop.hive.ql.io.TestDruidRecordWriter.testWrite (batchId=259)
org.apache.hive.jdbc.TestJdbcWithMiniLlap.testLlapInputFormatEndToEnd
(batchId=237)
{noformat}
Test results: https://builds.apache.org/job/PreCommit-HIVE-Build/9670/testReport
Console output: https://builds.apache.org/job/PreCommit-HIVE-Build/9670/console
Test logs: http://104.198.109.242/logs/PreCommit-HIVE-Build-9670/
Messages:
{noformat}
Executing org.apache.hive.ptest.execution.TestCheckPhase
Executing org.apache.hive.ptest.execution.PrepPhase
Executing org.apache.hive.ptest.execution.YetusPhase
Executing org.apache.hive.ptest.execution.ExecutionPhase
Executing org.apache.hive.ptest.execution.ReportingPhase
Tests exited with: TestsFailedException: 129 tests failed
{noformat}
This message is automatically generated.
ATTACHMENT ID: 12914854 - PreCommit-HIVE-Build
> NPE when inserting NULL value in structure and array with HBase table
> ---------------------------------------------------------------------
>
> Key: HIVE-18975
> URL: https://issues.apache.org/jira/browse/HIVE-18975
> Project: Hive
> Issue Type: Bug
> Components: HBase Handler
> Reporter: Oleksiy Sayankin
> Assignee: Oleksiy Sayankin
> Priority: Major
> Fix For: 3.0.0
>
> Attachments: HIVE-18975.1.patch
>
>
> STR (Structure)
> *STEP 1. Create tables*
> {code}
> CREATE TABLE IF NOT EXISTS t1 (id INT);
> INSERT INTO TABLE t1 VALUES (1),(2),(3),(4),(5);
> CREATE TABLE IF NOT EXISTS `htable`(
> `id` INT,
> `map_column` STRUCT<s_int:INT,s_string:STRING,s_date:DATE>) ROW FORMAT
> SERDE 'org.apache.hadoop.hive.hbase.HBaseSerDe' STORED BY
> 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES (
> 'hbase.columns.mapping'=':key,id:id', 'serialization.format'='1')
> TBLPROPERTIES ( 'hbase.table.name'='tmp/h');
> {code}
> *STEP 2. Insert into table stored in HBase the struct with NULL value in it*
> {code}
> INSERT INTO `htable` SELECT 2,NAMED_STRUCT("s_int",CAST(NULL AS
> INT),"s_string","s1","s_date",CAST('2018-03-12' AS DATE)) FROM t1 LIMIT 1;
> {code}
> *ACTUAL RESULT*
> The query fails with NPE.
> {code}
> Diagnostic Messages for this Task:
> Error: java.lang.RuntimeException:
> org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while
> processing row (tag=0)
> {"key":{},"value":{"_col0":2,"_col1":{"s_int":null,"s_string":"s1","s_date":"2018-03-12"}}}
> at
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:257)
> at
> org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:444)
> at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:392)
> at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:163)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1631)
> at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime
> Error while processing row (tag=0)
> {"key":{},"value":{"_col0":2,"_col1":{"s_int":null,"s_string":"s1","s_date":"2018-03-12"}}}
> at
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:245)
> ... 7 more
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
> org.apache.hadoop.hive.serde2.SerDeException: java.lang.NullPointerException
> at
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:787)
> at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:879)
> at
> org.apache.hadoop.hive.ql.exec.LimitOperator.process(LimitOperator.java:63)
> at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:879)
> at
> org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95)
> at
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:236)
> ... 7 more
> Caused by: org.apache.hadoop.hive.serde2.SerDeException:
> java.lang.NullPointerException
> at
> org.apache.hadoop.hive.hbase.HBaseSerDe.serialize(HBaseSerDe.java:301)
> at
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:714)
> ... 12 more
> Caused by: java.lang.NullPointerException
> at
> org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector.get(WritableIntObjectInspector.java:36)
> at
> org.apache.hadoop.hive.serde2.lazy.LazyUtils.writePrimitiveUTF8(LazyUtils.java:239)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serialize(HBaseRowSerializer.java:236)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serialize(HBaseRowSerializer.java:295)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serialize(HBaseRowSerializer.java:222)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serializeField(HBaseRowSerializer.java:194)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serialize(HBaseRowSerializer.java:118)
> at
> org.apache.hadoop.hive.hbase.HBaseSerDe.serialize(HBaseSerDe.java:297)
> ... 13 more
> {code}
> *EXPECTED RESULT*
> The query finished successfully.
> STR (arrays)
> *STEP 1. Create tables*
> {code}
> CREATE TABLE hbase_list(id INT, list_column array<string>) ROW FORMAT SERDE
> 'org.apache.hadoop.hive.hbase.HBaseSerDe' STORED BY
> 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES
> ('hbase.columns.mapping'=':key,id:id', 'serialization.format'='1')
> TBLPROPERTIES ( 'hbase.table.name'='tmp/htest');
> {code}
> *STEP 2. Insert into table stored in HBase the array with NULL value in it*
> {code}
> insert into hbase_list SELECT 2, array("a", CAST (NULL AS STRING), "b") FROM
> t1 LIMIT 1;
> {code}
> *ACTUAL RESULT*
> The query fails with NPE.
> {code}
> Diagnostic Messages for this Task:
> Error: java.lang.RuntimeException:
> org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while
> processing row (tag=0) {"key":{},"value":{"_col0":2,"_col1":["a",null,"b"]}}
> at
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:257)
> at
> org.apache.hadoop.mapred.ReduceTask.runOldReducer(ReduceTask.java:444)
> at org.apache.hadoop.mapred.ReduceTask.run(ReduceTask.java:392)
> at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:163)
> at java.security.AccessController.doPrivileged(Native Method)
> at javax.security.auth.Subject.doAs(Subject.java:422)
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1631)
> at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:158)
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime
> Error while processing row (tag=0)
> {"key":{},"value":{"_col0":2,"_col1":["a",null,"b"]}}
> at
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:245)
> ... 7 more
> Caused by: org.apache.hadoop.hive.ql.metadata.HiveException:
> org.apache.hadoop.hive.serde2.SerDeException: java.lang.NullPointerException
> at
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:787)
> at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:879)
> at
> org.apache.hadoop.hive.ql.exec.LimitOperator.process(LimitOperator.java:63)
> at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:879)
> at
> org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95)
> at
> org.apache.hadoop.hive.ql.exec.mr.ExecReducer.reduce(ExecReducer.java:236)
> ... 7 more
> Caused by: org.apache.hadoop.hive.serde2.SerDeException:
> java.lang.NullPointerException
> at
> org.apache.hadoop.hive.hbase.HBaseSerDe.serialize(HBaseSerDe.java:301)
> at
> org.apache.hadoop.hive.ql.exec.FileSinkOperator.process(FileSinkOperator.java:714)
> ... 12 more
> Caused by: java.lang.NullPointerException
> at
> org.apache.hadoop.hive.serde2.lazy.LazyUtils.writePrimitiveUTF8(LazyUtils.java:260)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serialize(HBaseRowSerializer.java:236)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serialize(HBaseRowSerializer.java:251)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serialize(HBaseRowSerializer.java:222)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serializeField(HBaseRowSerializer.java:194)
> at
> org.apache.hadoop.hive.hbase.HBaseRowSerializer.serialize(HBaseRowSerializer.java:118)
> at
> org.apache.hadoop.hive.hbase.HBaseSerDe.serialize(HBaseSerDe.java:297)
> ... 13 more
> {code}
> *EXPECTED RESULT*
> The query finished successfully.
--
This message was sent by Atlassian JIRA
(v7.6.3#76005)