Github user srdo commented on a diff in the pull request:

    https://github.com/apache/storm/pull/2480#discussion_r158705594
  
    --- Diff: 
external/storm-kafka-client/src/main/java/org/apache/storm/kafka/KafkaUtils.java
 ---
    @@ -0,0 +1,114 @@
    +/**
    + * Licensed to the Apache Software Foundation (ASF) under one
    + * or more contributor license agreements.  See the NOTICE file
    + * distributed with this work for additional information
    + * regarding copyright ownership.  The ASF licenses this file
    + * to you under the Apache License, Version 2.0 (the
    + * "License"); you may not use this file except in compliance
    + * with the License.  You may obtain a copy of the License at
    + *
    + * http://www.apache.org/licenses/LICENSE-2.0
    + *
    + * Unless required by applicable law or agreed to in writing, software
    + * distributed under the License is distributed on an "AS IS" BASIS,
    + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    + * See the License for the specific language governing permissions and
    + * limitations under the License.
    + */
    +package org.apache.storm.kafka;
    +
    +import org.apache.kafka.clients.consumer.KafkaConsumer;
    +import org.apache.kafka.common.TopicPartition;
    +import org.apache.storm.kafka.spout.internal.OffsetManager;
    +import org.apache.storm.metric.api.IMetric;
    +import org.slf4j.Logger;
    +import org.slf4j.LoggerFactory;
    +
    +import java.util.HashMap;
    +import java.util.Map;
    +import java.util.Set;
    +import java.util.TreeMap;
    +
    +public class KafkaUtils {
    +
    +    private static final Logger LOG = 
LoggerFactory.getLogger(KafkaUtils.class);
    +
    +    public static class KafkaOffsetMetric implements IMetric {
    +        Map<TopicPartition, OffsetManager> offsetManagers;
    +        KafkaConsumer kafkaConsumer;
    +
    +        public KafkaOffsetMetric(Map<TopicPartition, OffsetManager> 
offsetManagers, KafkaConsumer kafkaConsumer) {
    +            this.offsetManagers = offsetManagers;
    +            this.kafkaConsumer = kafkaConsumer;
    +        }
    +
    +        private class TopicMetrics {
    +            long totalSpoutLag = 0;
    +            long totalEarliestTimeOffset = 0;
    +            long totalLatestTimeOffset = 0;
    +            long totalLatestEmittedOffset = 0;
    +            long totalLatestCompletedOffset = 0;
    +        }
    +
    +        @Override
    +        public Object getValueAndReset() {
    +            try {
    +                HashMap<String, Long> ret = new HashMap<>();
    +                if (offsetManagers != null && kafkaConsumer != null) {
    +                    Map<String,TopicMetrics> topicMetricsMap = new 
TreeMap<String, TopicMetrics>();
    +                    Set<TopicPartition> topicPartitions = 
offsetManagers.keySet();
    +
    +                    Map<TopicPartition, Long> beginningOffsets= 
kafkaConsumer.beginningOffsets(topicPartitions);
    +                    Map<TopicPartition, Long> endOffsets= 
kafkaConsumer.endOffsets(topicPartitions);
    +
    +                    for (Map.Entry<TopicPartition, OffsetManager> entry : 
offsetManagers.entrySet()) {
    +                        TopicPartition topicPartition = entry.getKey();
    +                        OffsetManager offsetManager = entry.getValue();
    +
    +                        long latestTimeOffset = 
beginningOffsets.get(topicPartition);
    +                        long earliestTimeOffset = 
endOffsets.get(topicPartition);
    +
    +                        long latestEmittedOffset = 
offsetManager.getLatestEmittedOffset();
    +                        long latestCompletedOffset = 
offsetManager.getCommittedOffset();
    +                        long spoutLag = latestTimeOffset - 
latestCompletedOffset;
    +
    +                        String metricPath = topicPartition.topic()  + 
"/partition_" + topicPartition.partition();
    +                        ret.put(metricPath + "/" + "spoutLag", spoutLag);
    +                        ret.put(metricPath + "/" + "earliestTimeOffset", 
earliestTimeOffset);
    +                        ret.put(metricPath + "/" + "latestTimeOffset", 
latestTimeOffset);
    +                        ret.put(metricPath + "/" + "latestEmittedOffset", 
latestEmittedOffset);
    +                        ret.put(metricPath + "/" + 
"latestCompletedOffset", latestCompletedOffset);
    +
    +                        if 
(!topicMetricsMap.containsKey(topicPartition.topic())) {
    +                            topicMetricsMap.put(topicPartition.topic(),new 
TopicMetrics());
    +                        }
    +
    +                        TopicMetrics topicMetrics = 
topicMetricsMap.get(topicPartition.topic());
    +                        topicMetrics.totalSpoutLag += spoutLag;
    +                        topicMetrics.totalEarliestTimeOffset += 
earliestTimeOffset;
    +                        topicMetrics.totalLatestTimeOffset += 
latestTimeOffset;
    +                        topicMetrics.totalLatestEmittedOffset += 
latestEmittedOffset;
    +                        topicMetrics.totalLatestCompletedOffset += 
latestCompletedOffset;
    +                    }
    +
    +                    for(Map.Entry<String, TopicMetrics> e : 
topicMetricsMap.entrySet()) {
    +                        String topic = e.getKey();
    +                        TopicMetrics topicMetrics = e.getValue();
    +                        ret.put(topic + "/" + "totalSpoutLag", 
topicMetrics.totalSpoutLag);
    +                        ret.put(topic + "/" + "totalEarliestTimeOffset", 
topicMetrics.totalEarliestTimeOffset);
    +                        ret.put(topic + "/" + "totalLatestTimeOffset", 
topicMetrics.totalLatestTimeOffset);
    +                        ret.put(topic + "/" + "totalLatestEmittedOffset", 
topicMetrics.totalLatestEmittedOffset);
    +                        ret.put(topic + "/" + 
"totalLatestCompletedOffset", topicMetrics.totalLatestCompletedOffset);
    +                    }
    +
    +                    return ret;
    +                } else {
    +                    LOG.info("Metrics Tick: Not enough data to calculate 
spout lag.");
    +                }
    +            } catch (Throwable t) {
    --- End diff --
    
    Why are we catching all exceptions here?


---

Reply via email to