Github user srdo commented on a diff in the pull request:

    https://github.com/apache/storm/pull/2465#discussion_r158572934
  
    --- Diff: 
external/storm-kafka-client/src/test/java/org/apache/storm/kafka/spout/KafkaSpoutAbstractTest.java
 ---
    @@ -0,0 +1,160 @@
    +/*
    + * Licensed to the Apache Software Foundation (ASF) under one
    + *   or more contributor license agreements.  See the NOTICE file
    + *   distributed with this work for additional information
    + *   regarding copyright ownership.  The ASF licenses this file
    + *   to you under the Apache License, Version 2.0 (the
    + *   "License"); you may not use this file except in compliance
    + *   with the License.  You may obtain a copy of the License at
    + *  
    + *   http://www.apache.org/licenses/LICENSE-2.0
    + *  
    + *   Unless required by applicable law or agreed to in writing, software
    + *   distributed under the License is distributed on an "AS IS" BASIS,
    + *   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 
implied.
    + *   See the License for the specific language governing permissions and
    + *   limitations under the License.
    + */
    +
    +package org.apache.storm.kafka.spout;
    +
    +import org.apache.kafka.clients.consumer.KafkaConsumer;
    +import org.apache.kafka.clients.consumer.OffsetAndMetadata;
    +import org.apache.kafka.common.TopicPartition;
    +import org.apache.storm.kafka.KafkaUnitRule;
    +import 
org.apache.storm.kafka.spout.builders.SingleTopicKafkaSpoutConfiguration;
    +import org.apache.storm.kafka.spout.internal.KafkaConsumerFactory;
    +import org.apache.storm.kafka.spout.internal.KafkaConsumerFactoryDefault;
    +import org.apache.storm.spout.SpoutOutputCollector;
    +import org.apache.storm.task.TopologyContext;
    +import org.apache.storm.tuple.Values;
    +import org.apache.storm.utils.Time;
    +import org.junit.Before;
    +import org.junit.Rule;
    +import org.mockito.ArgumentCaptor;
    +import org.mockito.Captor;
    +import org.mockito.MockitoAnnotations;
    +
    +import java.util.HashMap;
    +import java.util.Map;
    +
    +import static org.hamcrest.CoreMatchers.is;
    +import static org.hamcrest.MatcherAssert.assertThat;
    +import static org.mockito.Matchers.eq;
    +import static org.mockito.Mockito.mock;
    +import static org.mockito.Mockito.reset;
    +import static org.mockito.Mockito.spy;
    +import static org.mockito.Mockito.times;
    +import static org.mockito.Mockito.verify;
    +
    +public abstract class KafkaSpoutAbstractTest {
    +    @Rule
    +    public KafkaUnitRule kafkaUnitRule = new KafkaUnitRule();
    +
    +    @Captor
    +    ArgumentCaptor<Map<TopicPartition, OffsetAndMetadata>> commitCapture;
    +
    +    final TopologyContext topologyContext = mock(TopologyContext.class);
    +    final Map<String, Object> conf = new HashMap<>();
    +    final SpoutOutputCollector collector = 
mock(SpoutOutputCollector.class);
    +    final long commitOffsetPeriodMs = 2_000;
    +    final int maxRetries = 3;
    +    KafkaConsumer<String, String> consumerSpy;
    +    KafkaConsumerFactory<String, String> consumerFactory;
    +    KafkaSpout<String, String> spout;
    +    final int maxPollRecords = 10;
    +
    +    @Before
    +    public void setUp() {
    +        MockitoAnnotations.initMocks(this);
    +
    +        final KafkaSpoutConfig<String, String> spoutConfig = 
createSpoutConfig();
    +
    +        consumerSpy = spy(new KafkaConsumerFactoryDefault<String, 
String>().createConsumer(spoutConfig));
    +
    +        consumerFactory = new KafkaConsumerFactory<String, String>() {
    +            @Override
    +            public KafkaConsumer<String, String> 
createConsumer(KafkaSpoutConfig<String, String> kafkaSpoutConfig) {
    +                return consumerSpy;
    +            }
    +
    +        };
    +
    +        spout = new KafkaSpout<>(spoutConfig, consumerFactory);
    +    }
    +
    +
    +    abstract KafkaSpoutConfig<String, String> createSpoutConfig();
    +
    +    void prepareSpout(int messageCount) throws Exception {
    +        
SingleTopicKafkaUnitSetupHelper.populateTopicData(kafkaUnitRule.getKafkaUnit(), 
SingleTopicKafkaSpoutConfiguration.TOPIC, messageCount);
    +        SingleTopicKafkaUnitSetupHelper.initializeSpout(spout, conf, 
topologyContext, collector);
    +    }
    +
    +    /**
    +     * Helper method to in sequence do:
    +     * <li>
    +     *     <ul>spout.nexTuple()</ul>
    +     *     <ul>verify messageId</ul>
    +     *     <ul>spout.ack(msgId)</ul>
    +     *     <ul>reset(collector) to be able to reuse mock</ul>
    +     * </li>
    +     *
    +     * @param offset offset of message to be verified
    +     * @return {@link ArgumentCaptor} of the messageId verified
    +     */
    +    ArgumentCaptor<Object> 
nextTuple_verifyEmitted_ack_resetCollectorMock(int offset) {
    --- End diff --
    
    The stackoverflow link isn't about Java, and the answer seems to be talking 
about C#. The names here also don't follow the pattern outlined in the answer. 
You're right that the Google style guide allows you to use underscores in test 
method names.
    
    I don't want to bikeshed you to death, and the test method names are fine, 
but nextTuple_verifyEmitted_ack_resetCollectorMock isn't a test method, and the 
name for it could be better I think. It's just emitting a tuple and acking it, 
why do we need to list out the function names it calls?


---

Reply via email to