[ 
https://issues.apache.org/jira/browse/HIVE-29488?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=18063584#comment-18063584
 ] 

Thomas Rebele commented on HIVE-29488:
--------------------------------------

I've simplified the qfile test a bit more so that it needs only one table.

> KryoException: NullPointerException: Cannot invoke 
> "java.util.Collection.isEmpty()" because "this.delegate" is null
> -------------------------------------------------------------------------------------------------------------------
>
>                 Key: HIVE-29488
>                 URL: https://issues.apache.org/jira/browse/HIVE-29488
>             Project: Hive
>          Issue Type: Bug
>            Reporter: Thomas Rebele
>            Assignee: Thomas Rebele
>            Priority: Major
>              Labels: pull-request-available
>
> The following q file (executed with {{{}mvn surefire:test -Pitests -pl 
> itests/qtest -Dtest=TestMiniLlapLocalCliDriver -Dqfile=kryo_expr_node.q 
> -Dtest.output.overwrite=true{}}}) leads to a failure:
> {code:java}
> set hive.cbo.enable=false;
> CREATE TABLE tab1(attr1 string);
> CREATE TABLE tab2(attr1 varchar(5));
> select * from tab1 left join tab2 
> on tab2.attr1 in ( trim(tab1.attr1), '*');
> {code}
> Here the exception:
> {code:java}
> org.apache.hadoop.hive.ql.exec.tez.TezRuntimeException: Vertex failed, 
> vertexName=Reducer 2, vertexId=vertex_1772810274097_0001_1_02, 
> diagnostics=[Task failed, taskId=task_1772810274097_0001_1_02_000000, 
> diagnostics=[TaskAttempt 0 failed, info=[Error: Error while running task ( 
> failure ) : 
> attempt_1772810274097_0001_1_02_000000_0:org.apache.hadoop.hive.ql.metadata.HiveException:
>  java.lang.RuntimeException: Failed to load plan: 
> file:/home/trebele/dev/hive/h7/itests/qtest/target/tmp/localscratchdir/e295ba98-f379-48b9-a6d4-706b2299ab09/hive_2026-03-06_07-17-56_529_1132572440107801904-1/trebele/_tez_scratch_dir/81a8d648-3095-44c3-b958-7ac9b1ea5930/reduce.xml
>     at 
> org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieve(ObjectCache.java:66)
>     at 
> org.apache.hadoop.hive.ql.exec.ObjectCacheWrapper.retrieve(ObjectCacheWrapper.java:45)
>     at 
> org.apache.hadoop.hive.ql.exec.tez.ReduceRecordProcessor.<init>(ReduceRecordProcessor.java:95)
>     at 
> org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:270)
>     at 
> org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:381)
>     at 
> org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:86)
>     at 
> org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:72)
>     at 
> java.base/java.security.AccessController.doPrivileged(AccessController.java:714)
>     at java.base/javax.security.auth.Subject.doAs(Subject.java:525)
>     at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
>     at 
> org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:72)
>     at 
> org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:42)
>     at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36)
>     at 
> org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:111)
>     at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317)
>     at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)
>     at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)
>     at java.base/java.lang.Thread.run(Thread.java:1583)
> Caused by: java.lang.RuntimeException: Failed to load plan: 
> file:/home/trebele/dev/hive/h7/itests/qtest/target/tmp/localscratchdir/e295ba98-f379-48b9-a6d4-706b2299ab09/hive_2026-03-06_07-17-56_529_1132572440107801904-1/trebele/_tez_scratch_dir/81a8d648-3095-44c3-b958-7ac9b1ea5930/reduce.xml
>     at 
> org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:528)
>     at 
> org.apache.hadoop.hive.ql.exec.Utilities.getReduceWork(Utilities.java:383)
>     at 
> org.apache.hadoop.hive.ql.exec.tez.ReduceRecordProcessor.lambda$new$0(ReduceRecordProcessor.java:95)
>     at 
> org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieve(ObjectCache.java:64)
>     ... 17 more
> Caused by: org.apache.hive.com.esotericsoftware.kryo.kryo5.KryoException: 
> java.lang.NullPointerException: Cannot invoke 
> "java.util.Collection.isEmpty()" because "this.delegate" is null
> Serialization trace:
> children (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)
> children (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)
> residualFilterExprs (org.apache.hadoop.hive.ql.plan.CommonMergeJoinDesc)
> conf (org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator)
> reducer (org.apache.hadoop.hive.ql.plan.ReduceWork)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:146)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:241)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:44)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:241)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:44)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:774)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:214)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializeObjectByKryo(SerializationUtilities.java:839)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializePlan(SerializationUtilities.java:746)
>     at 
> org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:503)
>     ... 20 more
> Caused by: java.lang.NullPointerException: Cannot invoke 
> "java.util.Collection.isEmpty()" because "this.delegate" is null
>     at 
> org.apache.hive.com.google.common.collect.AbstractMapBasedMultimap$WrappedCollection.refreshIfEmpty(AbstractMapBasedMultimap.java:381)
>     at 
> org.apache.hive.com.google.common.collect.AbstractMapBasedMultimap$WrappedCollection.add(AbstractMapBasedMultimap.java:514)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:245)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:44)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     ... 50 more
> ], TaskAttempt 1 failed, info=[Error: Error while running task ( failure ) : 
> attempt_1772810274097_0001_1_02_000000_1:org.apache.hadoop.hive.ql.metadata.HiveException:
>  java.lang.RuntimeException: Failed to load plan: 
> file:/home/trebele/dev/hive/h7/itests/qtest/target/tmp/localscratchdir/e295ba98-f379-48b9-a6d4-706b2299ab09/hive_2026-03-06_07-17-56_529_1132572440107801904-1/trebele/_tez_scratch_dir/81a8d648-3095-44c3-b958-7ac9b1ea5930/reduce.xml
>     at 
> org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieve(ObjectCache.java:66)
>     at 
> org.apache.hadoop.hive.ql.exec.ObjectCacheWrapper.retrieve(ObjectCacheWrapper.java:45)
>     at 
> org.apache.hadoop.hive.ql.exec.tez.ReduceRecordProcessor.<init>(ReduceRecordProcessor.java:95)
>     at 
> org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:270)
>     at 
> org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:381)
>     at 
> org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:86)
>     at 
> org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:72)
>     at 
> java.base/java.security.AccessController.doPrivileged(AccessController.java:714)
>     at java.base/javax.security.auth.Subject.doAs(Subject.java:525)
>     at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
>     at 
> org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:72)
>     at 
> org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:42)
>     at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36)
>     at 
> org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:111)
>     at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:317)
>     at 
> java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144)
>     at 
> java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642)
>     at java.base/java.lang.Thread.run(Thread.java:1583)
> Caused by: java.lang.RuntimeException: Failed to load plan: 
> file:/home/trebele/dev/hive/h7/itests/qtest/target/tmp/localscratchdir/e295ba98-f379-48b9-a6d4-706b2299ab09/hive_2026-03-06_07-17-56_529_1132572440107801904-1/trebele/_tez_scratch_dir/81a8d648-3095-44c3-b958-7ac9b1ea5930/reduce.xml
>     at 
> org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:528)
>     at 
> org.apache.hadoop.hive.ql.exec.Utilities.getReduceWork(Utilities.java:383)
>     at 
> org.apache.hadoop.hive.ql.exec.tez.ReduceRecordProcessor.lambda$new$0(ReduceRecordProcessor.java:95)
>     at 
> org.apache.hadoop.hive.ql.exec.mr.ObjectCache.retrieve(ObjectCache.java:64)
>     ... 17 more
> Caused by: org.apache.hive.com.esotericsoftware.kryo.kryo5.KryoException: 
> java.lang.NullPointerException: Cannot invoke 
> "java.util.Collection.isEmpty()" because "this.delegate" is null
> Serialization trace:
> children (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)
> children (org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc)
> residualFilterExprs (org.apache.hadoop.hive.ql.plan.CommonMergeJoinDesc)
> conf (org.apache.hadoop.hive.ql.exec.CommonMergeJoinOperator)
> reducer (org.apache.hadoop.hive.ql.plan.ReduceWork)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:146)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:241)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:44)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:241)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:44)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.FieldSerializer.read(FieldSerializer.java:129)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:774)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:214)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializeObjectByKryo(SerializationUtilities.java:839)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities.deserializePlan(SerializationUtilities.java:746)
>     at 
> org.apache.hadoop.hive.ql.exec.Utilities.getBaseWork(Utilities.java:503)
>     ... 20 more
> Caused by: java.lang.NullPointerException: Cannot invoke 
> "java.util.Collection.isEmpty()" because "this.delegate" is null
>     at 
> org.apache.hive.com.google.common.collect.AbstractMapBasedMultimap$WrappedCollection.refreshIfEmpty(AbstractMapBasedMultimap.java:381)
>     at 
> org.apache.hive.com.google.common.collect.AbstractMapBasedMultimap$WrappedCollection.add(AbstractMapBasedMultimap.java:514)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:245)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.CollectionSerializer.read(CollectionSerializer.java:44)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.Kryo.readObject(Kryo.java:796)
>     at 
> org.apache.hadoop.hive.ql.exec.SerializationUtilities$KryoWithHooks.readObject(SerializationUtilities.java:222)
>     at 
> org.apache.hive.com.esotericsoftware.kryo.kryo5.serializers.ReflectField.read(ReflectField.java:124)
>     ... 50 more
> ]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1 
> killedTasks:0, Vertex vertex_1772810274097_0001_1_02 [Reducer 2] 
> killed/failed due to:OWN_TASK_FAILURE]DAG did not succeed due to 
> VERTEX_FAILURE. failedVertices:1 killedVertices:0
>     at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:286)
>     at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214)
>     at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105)
>     at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:354)
>     at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:327)
>     at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:244)
>     at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:105)
>     at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:345)
>     at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:189)
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:142)
>     at org.apache.hadoop.hive.ql.Driver.run(Driver.java:137)
>     at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:190)
>     at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:235)
>     at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:259)
>     at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:203)
>     at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:129)
>     at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:430)
>     at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:358)
>     at 
> org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:760)
>     at org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:730)
>     at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:115)
>     at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:139)
>     at 
> org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver(TestMiniLlapLocalCliDriver.java:62)
>     at 
> java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:103)
>     at java.base/java.lang.reflect.Method.invoke(Method.java:580)
>     at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>     at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>     at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>     at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>     at 
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:118)
>     at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>     at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>     at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
>     at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>     at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>     at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
>     at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
>     at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
>     at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
>     at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
>     at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
>     at org.junit.runners.Suite.runChild(Suite.java:128)
>     at org.junit.runners.Suite.runChild(Suite.java:27)
>     at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
>     at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
>     at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
>     at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
>     at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
>     at 
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:89)
>     at org.junit.rules.RunRules.evaluate(RunRules.java:20)
>     at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
>     at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
>     at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:316)
>     at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:240)
>     at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:214)
>     at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:155)
>     at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:385)
>     at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:162)
>     at 
> org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:507)
>     at 
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:495) 
> {code}



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to