[ 
https://issues.apache.org/jira/browse/STORM-3118?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16519842#comment-16519842
 ] 

Jungtaek Lim commented on STORM-3118:
-------------------------------------

[~revans2]

I just thought out loud: it was based on ideal thought and I think we can't 
deprecate pacemaker without proper scale tests.

> Netty incompatibilities with Pacemaker
> --------------------------------------
>
>                 Key: STORM-3118
>                 URL: https://issues.apache.org/jira/browse/STORM-3118
>             Project: Apache Storm
>          Issue Type: Bug
>    Affects Versions: 2.0.0
>            Reporter: Aaron Gresch
>            Assignee: Aaron Gresch
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 10m
>  Remaining Estimate: 0h
>
> Nimbus has issues with Pacemaker:
> {code:java}
> 2018-06-21 08:55:17.762 o.a.s.p.PacemakerClientHandler client-worker-2 
> [ERROR] Exception occurred in Pacemaker.
> org.apache.storm.shade.io.netty.handler.codec.EncoderException: 
> java.lang.IndexOutOfBoundsException: writerIndex(713) + minWritableBytes(2) 
> exceeds maxCapacity(713): UnpooledHeapByteBuf(ridx: 0, widx: 713, cap: 
> 713/713)
>         at 
> org.apache.storm.shade.io.netty.handler.codec.MessageToMessageEncoder.write(MessageToMessageEncoder.java:106)
>  ~[shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.invokeWrite0(AbstractChannelHandlerContext.java:738)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.invokeWriteAndFlush(AbstractChannelHandlerContext.java:801)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.write(AbstractChannelHandlerContext.java:814)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.writeAndFlush(AbstractChannelHandlerContext.java:794)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.DefaultChannelPipeline.writeAndFlush(DefaultChannelPipeline.java:1066)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannel.writeAndFlush(AbstractChannel.java:305)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.messaging.netty.KerberosSaslClientHandler.channelActive(KerberosSaslClientHandler.java:65)
>  [storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:213)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:199)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.fireChannelActive(AbstractChannelHandlerContext.java:192)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.ChannelInboundHandlerAdapter.channelActive(ChannelInboundHandlerAdapter.java:64)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:213)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:199)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.fireChannelActive(AbstractChannelHandlerContext.java:192)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.DefaultChannelPipeline$HeadContext.channelActive(DefaultChannelPipeline.java:1422)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:213)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.AbstractChannelHandlerContext.invokeChannelActive(AbstractChannelHandlerContext.java:199)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.DefaultChannelPipeline.fireChannelActive(DefaultChannelPipeline.java:941)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.fulfillConnectPromise(AbstractNioChannel.java:311)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.nio.AbstractNioChannel$AbstractNioUnsafe.finishConnect(AbstractNioChannel.java:341)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:635)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:582)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:499)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:461)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:884)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at java.lang.Thread.run(Thread.java:748) [?:1.8.0_131]
> Caused by: java.lang.IndexOutOfBoundsException: writerIndex(713) + 
> minWritableBytes(2) exceeds maxCapacity(713): UnpooledHeapByteBuf(ridx: 0, 
> widx: 713, cap: 713/713)
>         at 
> org.apache.storm.shade.io.netty.buffer.AbstractByteBuf.ensureWritable0(AbstractByteBuf.java:276)
>  ~[shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.buffer.AbstractByteBuf.writeShort(AbstractByteBuf.java:966)
>  ~[shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.messaging.netty.SaslMessageToken.write(SaslMessageToken.java:104)
>  ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.pacemaker.codec.ThriftEncoder.encodeNettySerializable(ThriftEncoder.java:44)
>  ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.pacemaker.codec.ThriftEncoder.encode(ThriftEncoder.java:77) 
> ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.io.netty.handler.codec.MessageToMessageEncoder.write(MessageToMessageEncoder.java:88)
>  ~[shaded-deps-2.0.0.y.jar:2.0.0.y]
>         ... 26 more
> {code}
> Prevents topology submission:
>  
> {code:java}
> 2018-06-21 09:10:46.343 o.a.s.d.n.Nimbus pool-37-thread-250 [WARN] Topology 
> submission exception. (topology name='testStormKafkaNewApi')
> java.lang.IllegalStateException: instance must be started before calling this 
> method
>         at 
> org.apache.storm.shade.org.apache.curator.shaded.com.google.common.base.Preconditions.checkState(Preconditions.java:444)
>  ~[shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.shade.org.apache.curator.framework.imps.CuratorFrameworkImpl.checkExists(CuratorFrameworkImpl.java:432)
>  ~[shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.zookeeper.ClientZookeeper.existsNode(ClientZookeeper.java:144)
>  ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.zookeeper.ClientZookeeper.mkdirsImpl(ClientZookeeper.java:288)
>  ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.zookeeper.ClientZookeeper.mkdirs(ClientZookeeper.java:70) 
> ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.cluster.ZKStateStorage.mkdirs(ZKStateStorage.java:114) 
> ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.cluster.PaceMakerStateStorage.mkdirs(PaceMakerStateStorage.java:69)
>  ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.cluster.StormClusterStateImpl.setupHeatbeats(StormClusterStateImpl.java:435)
>  ~[storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.daemon.nimbus.Nimbus.submitTopologyWithOpts(Nimbus.java:3009)
>  [storm-server-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.generated.Nimbus$Processor$submitTopologyWithOpts.getResult(Nimbus.java:3508)
>  [storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.generated.Nimbus$Processor$submitTopologyWithOpts.getResult(Nimbus.java:3487)
>  [storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.thrift.ProcessFunction.process(ProcessFunction.java:38) 
> [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.thrift.TBaseProcessor.process(TBaseProcessor.java:39) 
> [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.security.auth.sasl.SaslTransportPlugin$TUGIWrapProcessor.process(SaslTransportPlugin.java:147)
>  [storm-client-2.0.0.y.jar:2.0.0.y]
>         at 
> org.apache.storm.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:291)
>  [shaded-deps-2.0.0.y.jar:2.0.0.y]
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>  [?:1.8.0_131]
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>  [?:1.8.0_131]
>         at java.lang.Thread.run(Thread.java:748) [?:1.8.0_131]
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to