[ 
https://issues.apache.org/jira/browse/CARBONDATA-732?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

anubhav tarar reassigned CARBONDATA-732:
----------------------------------------

    Assignee: anubhav tarar

> User unable to execute the select/Load query using thrift server. 
> ------------------------------------------------------------------
>
>                 Key: CARBONDATA-732
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-732
>             Project: CarbonData
>          Issue Type: Bug
>          Components: sql
>    Affects Versions: 1.0.0-incubating
>         Environment: Spark 2.1
>            Reporter: Vinod Rohilla
>            Assignee: anubhav tarar
>         Attachments: LOG_FIle
>
>          Time Spent: 20m
>  Remaining Estimate: 0h
>
> Result does not display to user while hit Select/Load query.
> Steps to reproduce:
> 1:Hit the query :
> 0: jdbc:hive2://localhost:10000> select * from t4;
> Note: Cursor Keep blinking on beeline.
> 2: Logs on Thrift server:
> Error sending result 
> StreamResponse{streamId=/jars/carbondata_2.11-1.0.0-incubating-SNAPSHOT-shade-hadoop2.2.0.jar,
>  byteCount=19350001, 
> body=FileSegmentManagedBuffer{file=/opt/spark-2.1.0/carbonlib/carbondata_2.11-1.0.0-incubating-SNAPSHOT-shade-hadoop2.2.0.jar,
>  offset=0, length=19350001}} to /192.168.2.179:48291; closing connection
> java.lang.AbstractMethodError
>       at io.netty.util.ReferenceCountUtil.touch(ReferenceCountUtil.java:73)
>       at 
> io.netty.channel.DefaultChannelPipeline.touch(DefaultChannelPipeline.java:107)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.write(AbstractChannelHandlerContext.java:811)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.write(AbstractChannelHandlerContext.java:724)
>       at 
> io.netty.handler.codec.MessageToMessageEncoder.write(MessageToMessageEncoder.java:111)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:739)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeWrite(AbstractChannelHandlerContext.java:731)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.write(AbstractChannelHandlerContext.java:817)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.write(AbstractChannelHandlerContext.java:724)
>       at 
> io.netty.handler.timeout.IdleStateHandler.write(IdleStateHandler.java:305)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:739)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeWriteAndFlush(AbstractChannelHandlerContext.java:802)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.write(AbstractChannelHandlerContext.java:815)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.writeAndFlush(AbstractChannelHandlerContext.java:795)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.writeAndFlush(AbstractChannelHandlerContext.java:832)
>       at 
> io.netty.channel.DefaultChannelPipeline.writeAndFlush(DefaultChannelPipeline.java:1032)
>       at 
> io.netty.channel.AbstractChannel.writeAndFlush(AbstractChannel.java:296)
>       at 
> org.apache.spark.network.server.TransportRequestHandler.respond(TransportRequestHandler.java:194)
>       at 
> org.apache.spark.network.server.TransportRequestHandler.processStreamRequest(TransportRequestHandler.java:150)
>       at 
> org.apache.spark.network.server.TransportRequestHandler.handle(TransportRequestHandler.java:111)
>       at 
> org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:119)
>       at 
> org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:51)
>       at 
> io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:349)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:341)
>       at 
> io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:287)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:349)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:341)
>       at 
> io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:349)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:341)
>       at 
> org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:85)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:349)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:341)
>       at 
> io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1334)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:363)
>       at 
> io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:349)
>       at 
> io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:926)
>       at 
> io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:129)
>       at 
> io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:642)
>       at 
> io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:565)
>       at 
> io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:479)
>       at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:441)
>       at 
> io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:858)
>       at 
> io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:144)
>       at java.lang.Thread.run(Thread.java:745)
> ERROR 28-02 13:43:44,656 - Still have 1 requests outstanding when connection 
> from /192.168.2.179:58030 is closed
> INFO  28-02 13:46:57,954 - Session disconnected without closing properly, 
> close it now
> ERROR 28-02 13:46:57,958 - Error executing query, currentState CLOSED, 
> java.lang.InterruptedException
>       at 
> java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:998)
>       at 
> java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1304)
>       at 
> scala.concurrent.impl.Promise$DefaultPromise.tryAwait(Promise.scala:202)
>       at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:218)
>       at scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:153)
>       at 
> org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:619)
>       at org.apache.spark.SparkContext.runJob(SparkContext.scala:1918)
>       at org.apache.spark.SparkContext.runJob(SparkContext.scala:1931)
>       at org.apache.spark.SparkContext.runJob(SparkContext.scala:1944)
>       at org.apache.spark.SparkContext.runJob(SparkContext.scala:1958)
>       at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:935)
>       at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>       at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)
>       at org.apache.spark.rdd.RDD.withScope(RDD.scala:362)
>       at org.apache.spark.rdd.RDD.collect(RDD.scala:934)
>       at 
> org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:275)
>       at 
> org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2371)
>       at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)
>       at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2765)
>       at 
> org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2370)
>       at 
> org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$collect$1.apply(Dataset.scala:2375)
>       at 
> org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$collect$1.apply(Dataset.scala:2375)
>       at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2778)
>       at 
> org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2375)
>       at org.apache.spark.sql.Dataset.collect(Dataset.scala:2351)
>       at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:235)
>       at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:163)
>       at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:160)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)
>       at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1.run(SparkExecuteStatementOperation.scala:173)
>       at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>       at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>       at java.lang.Thread.run(Thread.java:745)
> ERROR 28-02 13:46:57,959 - Error running hive query: 
> org.apache.hive.service.cli.HiveSQLException: Illegal Operation state 
> transition from CLOSED to ERROR
>       at 
> org.apache.hive.service.cli.OperationState.validateTransition(OperationState.java:92)
>       at 
> org.apache.hive.service.cli.OperationState.validateTransition(OperationState.java:98)
>       at 
> org.apache.hive.service.cli.operation.Operation.setState(Operation.java:126)
>       at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation.org$apache$spark$sql$hive$thriftserver$SparkExecuteStatementOperation$$execute(SparkExecuteStatementOperation.scala:255)
>       at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:163)
>       at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1$$anon$2.run(SparkExecuteStatementOperation.scala:160)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1698)
>       at 
> org.apache.spark.sql.hive.thriftserver.SparkExecuteStatementOperation$$anon$1.run(SparkExecuteStatementOperation.scala:173)
>       at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>       at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>       at java.lang.Thread.run(Thread.java:745)
> Please check attached log file
> Expected Result: User should be able to execute the select/Load query using 
> thrift server. 



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

Reply via email to