This is an automated email from the ASF dual-hosted git repository.

hanahmily pushed a commit to branch meter-histogram
in repository https://gitbox.apache.org/repos/asf/skywalking.git

commit aa7f9cf1f479bd0b00f42d44f5f7555fc083712d
Author: Gao Hongtao <[email protected]>
AuthorDate: Mon Aug 31 12:24:57 2020 +0800

    Add labeled histogram
    
    Signed-off-by: Gao Hongtao <[email protected]>
---
 .../meter/function/AvgHistogramFunction.java       |  32 ++-
 .../analysis/meter/function/BucketedValues.java    |   4 +
 .../promethues/PrometheusMetricConverter.java      |  85 ++++---
 .../oap/server/core/query/type/HeatMap.java        |  18 +-
 .../meter/function/AvgHistogramFunctionTest.java   | 263 +++++++++++++++++++++
 .../library/util/prometheus/parser/Context.java    |  41 ++--
 .../util/prometheus/parser/TextParserTest.java     |  14 ++
 .../src/test/resources/testdata/prometheus.txt     |  24 +-
 8 files changed, 412 insertions(+), 69 deletions(-)

diff --git 
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunction.java
 
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunction.java
index 22985cf..b8346dc 100644
--- 
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunction.java
+++ 
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunction.java
@@ -18,11 +18,11 @@
 
 package org.apache.skywalking.oap.server.core.analysis.meter.function;
 
-import java.util.Comparator;
+import com.google.common.base.Strings;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Set;
 import lombok.Getter;
 import lombok.Setter;
 import lombok.ToString;
@@ -32,6 +32,7 @@ import 
org.apache.skywalking.oap.server.core.UnexpectedException;
 import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
 import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
 import org.apache.skywalking.oap.server.core.analysis.metrics.Metrics;
+import org.apache.skywalking.oap.server.core.query.type.Bucket;
 import org.apache.skywalking.oap.server.core.remote.grpc.proto.RemoteData;
 import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
 import org.apache.skywalking.oap.server.core.storage.annotation.Column;
@@ -83,11 +84,17 @@ public abstract class AvgHistogramFunction extends Metrics 
implements Acceptable
 
         this.entityId = entity.id();
 
+        String template = "%s";
+        if (!Strings.isNullOrEmpty(value.getGroup())) {
+            template   = value.getGroup() + ":%s";
+        }
         final long[] values = value.getValues();
         for (int i = 0; i < values.length; i++) {
-            String bucketName = String.valueOf(value.getBuckets()[i]);
-            summation.valueAccumulation(bucketName, values[i]);
-            count.valueAccumulation(bucketName, 1L);
+            int bucket = value.getBuckets()[i];
+            String bucketName = bucket == Integer.MIN_VALUE ? 
Bucket.INFINITE_NEGATIVE : String.valueOf(bucket);
+            String key = String.format(template, bucketName);
+            summation.valueAccumulation(key, values[i]);
+            count.valueAccumulation(key, 1L);
         }
     }
 
@@ -107,9 +114,16 @@ public abstract class AvgHistogramFunction extends Metrics 
implements Acceptable
 
     @Override
     public void calculate() {
-        final List<String> sortedKeys = 
summation.sortedKeys(Comparator.comparingInt(Integer::parseInt));
-        for (String key : sortedKeys) {
-            dataset.put(key, summation.get(key) / count.get(key));
+        final Set<String> keys = summation.keys();
+        for (String key : keys) {
+            long value = 0;
+            if (count.get(key) != 0) {
+                value = summation.get(key) / count.get(key);
+                if (value == 0L && summation.get(key) > 0L) {
+                    value = 1;
+                }
+            }
+            dataset.put(key, value);
         }
     }
 
@@ -146,6 +160,7 @@ public abstract class AvgHistogramFunction extends Metrics 
implements Acceptable
 
         this.setCount(new DataTable(remoteData.getDataObjectStrings(0)));
         this.setSummation(new DataTable(remoteData.getDataObjectStrings(1)));
+        this.setDataset(new DataTable(remoteData.getDataObjectStrings(2)));
     }
 
     @Override
@@ -157,6 +172,7 @@ public abstract class AvgHistogramFunction extends Metrics 
implements Acceptable
 
         remoteBuilder.addDataObjectStrings(count.toStorageData());
         remoteBuilder.addDataObjectStrings(summation.toStorageData());
+        remoteBuilder.addDataObjectStrings(dataset.toStorageData());
 
         return remoteBuilder;
     }
diff --git 
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/BucketedValues.java
 
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/BucketedValues.java
index de5e1b9..0f8f825 100644
--- 
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/BucketedValues.java
+++ 
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/analysis/meter/function/BucketedValues.java
@@ -21,6 +21,7 @@ package 
org.apache.skywalking.oap.server.core.analysis.meter.function;
 import java.util.Arrays;
 import java.util.List;
 import lombok.Getter;
+import lombok.Setter;
 import lombok.ToString;
 import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
 import org.apache.skywalking.oap.server.core.query.type.Bucket;
@@ -32,6 +33,9 @@ import 
org.apache.skywalking.oap.server.core.query.type.HeatMap;
 @ToString
 @Getter
 public class BucketedValues {
+
+    @Setter
+    private String group;
     /**
      * The element in the buckets represent the minimal value of this bucket, 
the max is defined by the next element.
      * Such as 0, 10, 50, 100 means buckets are [0, 10), [10, 50), [50, 100), 
[100, infinite+).
diff --git 
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/PrometheusMetricConverter.java
 
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/PrometheusMetricConverter.java
index 449d525..02b3395 100644
--- 
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/PrometheusMetricConverter.java
+++ 
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/metric/promethues/PrometheusMetricConverter.java
@@ -31,6 +31,7 @@ import java.util.Comparator;
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Optional;
 import java.util.StringJoiner;
 import java.util.concurrent.atomic.AtomicReference;
 import java.util.stream.Collectors;
@@ -78,6 +79,10 @@ public class PrometheusMetricConverter {
 
     private final static String LATEST = "latest";
 
+    private final static String DEFAULT_GROUP = "default";
+
+    private final static List<String> DEFAULT_GROUP_LIST = 
Collections.singletonList(DEFAULT_GROUP);
+
     private final Window window = new Window();
 
     private final List<MetricsRule> rules;
@@ -192,41 +197,51 @@ public class PrometheusMetricConverter {
                         case AVG_PERCENTILE:
                             Validate.isTrue(sources.size() == 1, "Can't get 
source for histogram");
                             Map.Entry<MetricSource, List<Metric>> smm = 
sources.entrySet().iterator().next();
-                            Histogram h = (Histogram) sum(smm.getValue());
-
-                            long[] vv = new long[h.getBuckets().size()];
-                            int[] bb = new int[h.getBuckets().size()];
-                            long v = 0L;
-                            int i = 0;
-                            for (Map.Entry<Double, Long> entry : 
h.getBuckets().entrySet()) {
-                                long increase = entry.getValue() - v;
-                                vv[i] = window.get(operation.getMetricName(), 
ImmutableMap.of("le", entry.getKey().toString()))
-                                    .apply(smm.getKey(), (double) 
increase).longValue();
-                                v = entry.getValue();
-
-                                if (i + 1 < h.getBuckets().size()) {
-                                    bb[i + 1] = 
BigDecimal.valueOf(entry.getKey()).multiply(SECOND_TO_MILLISECOND).intValue();
-                                }
-
-                                i++;
-                            }
-
-                            if (operation.getName().equals(AVG_HISTOGRAM)) {
-                                AcceptableValue<BucketedValues> heatmapMetrics 
= service.buildMetrics(
-                                    
formatMetricName(operation.getMetricName()), BucketedValues.class);
-                                
heatmapMetrics.setTimeBucket(TimeBucket.getMinuteTimeBucket(smm.getKey().getTimestamp()));
-                                
heatmapMetrics.accept(smm.getKey().getEntity(), new BucketedValues(bb, vv));
-                                service.doStreamingCalculation(heatmapMetrics);
-                            } else {
-                                
AcceptableValue<AvgHistogramPercentileFunction.AvgPercentileArgument> 
percentileMetrics =
-                                    
service.buildMetrics(formatMetricName(operation.getMetricName()), 
AvgHistogramPercentileFunction.AvgPercentileArgument.class);
-                                
percentileMetrics.setTimeBucket(TimeBucket.getMinuteTimeBucket(smm.getKey().getTimestamp()));
-                                
percentileMetrics.accept(smm.getKey().getEntity(),
-                                    new 
AvgHistogramPercentileFunction.AvgPercentileArgument(new BucketedValues(bb, 
vv), 
operation.getPercentiles().stream().mapToInt(Integer::intValue).toArray()));
-                                
service.doStreamingCalculation(percentileMetrics);
-                            }
-
-                            generateTraffic(smm.getKey().getEntity());
+
+                            smm.getValue().stream()
+                                .collect(groupingBy(m -> 
Optional.ofNullable(smm.getKey().getGroupBy()).orElse(DEFAULT_GROUP_LIST).stream().map(m.getLabels()::get).collect(Collectors.joining("-"))))
+                                .forEach((group, mm) -> {
+                                    Histogram h = (Histogram) sum(mm);
+
+                                    long[] vv = new 
long[h.getBuckets().size()];
+                                    int[] bb = new int[h.getBuckets().size()];
+                                    long v = 0L;
+                                    int i = 0;
+                                    for (Map.Entry<Double, Long> entry : 
h.getBuckets().entrySet()) {
+                                        long increase = entry.getValue() - v;
+                                        vv[i] = 
window.get(operation.getMetricName(), ImmutableMap.of("group", group, "le", 
entry.getKey().toString()))
+                                            .apply(smm.getKey(), (double) 
increase).longValue();
+                                        v = entry.getValue();
+
+                                        if (i + 1 < h.getBuckets().size()) {
+                                            bb[i + 1] = 
BigDecimal.valueOf(entry.getKey()).multiply(SECOND_TO_MILLISECOND).intValue();
+                                        }
+
+                                        i++;
+                                    }
+
+                                    if 
(operation.getName().equals(AVG_HISTOGRAM)) {
+                                        AcceptableValue<BucketedValues> 
heatmapMetrics = service.buildMetrics(
+                                            
formatMetricName(operation.getMetricName()), BucketedValues.class);
+                                        
heatmapMetrics.setTimeBucket(TimeBucket.getMinuteTimeBucket(smm.getKey().getTimestamp()));
+                                        BucketedValues bv = new 
BucketedValues(bb, vv);
+                                        if (!group.equals(DEFAULT_GROUP)) {
+                                            bv.setGroup(group);
+                                        }
+                                        
heatmapMetrics.accept(smm.getKey().getEntity(), bv);
+                                        
service.doStreamingCalculation(heatmapMetrics);
+                                    } else {
+                                        
AcceptableValue<AvgHistogramPercentileFunction.AvgPercentileArgument> 
percentileMetrics =
+                                            
service.buildMetrics(formatMetricName(operation.getMetricName()), 
AvgHistogramPercentileFunction.AvgPercentileArgument.class);
+                                        
percentileMetrics.setTimeBucket(TimeBucket.getMinuteTimeBucket(smm.getKey().getTimestamp()));
+                                        
percentileMetrics.accept(smm.getKey().getEntity(),
+                                            new 
AvgHistogramPercentileFunction.AvgPercentileArgument(new BucketedValues(bb, 
vv), 
operation.getPercentiles().stream().mapToInt(Integer::intValue).toArray()));
+                                        
service.doStreamingCalculation(percentileMetrics);
+                                    }
+
+                                    generateTraffic(smm.getKey().getEntity());
+                                });
+
                             break;
                         default:
                             throw new 
IllegalArgumentException(String.format("Unsupported downSampling %s", 
operation.getName()));
diff --git 
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/HeatMap.java
 
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/HeatMap.java
index d8b11c1..c10fcb0 100644
--- 
a/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/HeatMap.java
+++ 
b/oap-server/server-core/src/main/java/org/apache/skywalking/oap/server/core/query/type/HeatMap.java
@@ -132,9 +132,16 @@ public class HeatMap {
         private final boolean asc;
 
         @Override
-        public int compare(final String key1, final String key2) {
+        public int compare(final String k1, final String k2) {
             int result;
-
+            String[] kk1 = parseKey(k1);
+            String[] kk2 = parseKey(k2);
+            result = kk1[0].compareTo(kk2[0]);
+            if (result != 0) {
+                return result;
+            }
+            final String key1 = kk1[1];
+            final String key2 = kk2[1];
             if (key1.equals(key2)) {
                 result = 0;
             } else if (Bucket.INFINITE_NEGATIVE.equals(key1) || 
Bucket.INFINITE_POSITIVE.equals(key2)) {
@@ -147,5 +154,12 @@ public class HeatMap {
 
             return asc ? result : 0 - result;
         }
+
+        private String[] parseKey(String key) {
+            if (key.contains(":")) {
+                return key.split(":");
+            }
+            return new String[] {"default", key};
+        }
     }
 }
diff --git 
a/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunctionTest.java
 
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunctionTest.java
new file mode 100644
index 0000000..1b02499
--- /dev/null
+++ 
b/oap-server/server-core/src/test/java/org/apache/skywalking/oap/server/core/analysis/meter/function/AvgHistogramFunctionTest.java
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.skywalking.oap.server.core.analysis.meter.function;
+
+import java.util.Map;
+import java.util.stream.IntStream;
+import org.apache.skywalking.oap.server.core.analysis.meter.MeterEntity;
+import org.apache.skywalking.oap.server.core.analysis.metrics.DataTable;
+import org.apache.skywalking.oap.server.core.query.type.Bucket;
+import org.apache.skywalking.oap.server.core.query.type.HeatMap;
+import org.apache.skywalking.oap.server.core.storage.StorageBuilder;
+import org.junit.Assert;
+import org.junit.Test;
+
+import static 
org.apache.skywalking.oap.server.core.analysis.meter.function.AvgHistogramFunction.DATASET;
+import static 
org.apache.skywalking.oap.server.core.analysis.meter.function.AvgLabeledFunction.COUNT;
+import static 
org.apache.skywalking.oap.server.core.analysis.meter.function.AvgLabeledFunction.SUMMATION;
+
+public class AvgHistogramFunctionTest {
+    private static final int[] BUCKETS = new int[] {
+        0,
+        50,
+        100,
+        250
+    };
+
+    private static final int[] BUCKETS_2ND = new int[] {
+        0,
+        51,
+        100,
+        250
+    };
+
+    private static final int[] INFINITE_BUCKETS = new int[] {
+        Integer.MIN_VALUE,
+        -5,
+        0,
+        10
+    };
+
+    @Test
+    public void testFunction() {
+        HistogramFunctionInst inst = new HistogramFunctionInst();
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            new BucketedValues(
+                BUCKETS, new long[] {
+                0,
+                4,
+                10,
+                10
+            })
+        );
+
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            new BucketedValues(
+                BUCKETS, new long[] {
+                1,
+                2,
+                3,
+                4
+            })
+        );
+        inst.calculate();
+
+        final int[] results = inst.getDataset().sortedValues(new 
HeatMap.KeyComparator(true)).stream()
+            .flatMapToInt(l -> IntStream.of(l.intValue()))
+            .toArray();
+        Assert.assertArrayEquals(new int[] {
+            1,
+            3,
+            6,
+            7
+        }, results);
+    }
+
+    @Test
+    public void testFunctionWithInfinite() {
+        HistogramFunctionInst inst = new HistogramFunctionInst();
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            new BucketedValues(
+                INFINITE_BUCKETS, new long[] {
+                0,
+                4,
+                10,
+                10
+            })
+        );
+
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            new BucketedValues(
+                INFINITE_BUCKETS, new long[] {
+                1,
+                2,
+                3,
+                4
+            })
+        );
+
+        inst.calculate();
+
+        Assert.assertEquals(1L, 
inst.getDataset().get(Bucket.INFINITE_NEGATIVE).longValue());
+    }
+
+    @Test
+    public void testSerialization() {
+        HistogramFunctionInst inst = new HistogramFunctionInst();
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            new BucketedValues(
+                BUCKETS, new long[] {
+                1,
+                4,
+                10,
+                10
+            })
+        );
+        inst.calculate();
+
+        final HistogramFunctionInst inst2 = new HistogramFunctionInst();
+        inst2.deserialize(inst.serialize().build());
+
+        Assert.assertEquals(inst, inst2);
+        // HistogramFunction equal doesn't include dataset.
+        Assert.assertEquals(inst.getDataset(), inst2.getDataset());
+    }
+
+    @Test
+    public void testSerializationInInfinite() {
+        HistogramFunctionInst inst = new HistogramFunctionInst();
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            new BucketedValues(
+                INFINITE_BUCKETS, new long[] {
+                1,
+                4,
+                10,
+                10
+            })
+        );
+
+        final HistogramFunctionInst inst2 = new HistogramFunctionInst();
+        inst2.deserialize(inst.serialize().build());
+
+        Assert.assertEquals(inst, inst2);
+        // HistogramFunction equal doesn't include dataset.
+        Assert.assertEquals(inst.getDataset(), inst2.getDataset());
+    }
+
+    @Test
+    public void testBuilder() throws IllegalAccessException, 
InstantiationException {
+        HistogramFunctionInst inst = new HistogramFunctionInst();
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            new BucketedValues(
+                BUCKETS, new long[] {
+                1,
+                4,
+                10,
+                10
+            })
+        );
+        inst.calculate();
+
+        final StorageBuilder storageBuilder = inst.builder().newInstance();
+
+        // Simulate the storage layer do, convert the datatable to string.
+        Map<String, Object> map = storageBuilder.data2Map(inst);
+        map.put(SUMMATION, ((DataTable) map.get(SUMMATION)).toStorageData());
+        map.put(COUNT, ((DataTable) map.get(COUNT)).toStorageData());
+        map.put(DATASET, ((DataTable) map.get(DATASET)).toStorageData());
+
+        final AvgHistogramFunction inst2 = (AvgHistogramFunction) 
storageBuilder.map2Data(map);
+        Assert.assertEquals(inst, inst2);
+        // HistogramFunction equal doesn't include dataset.
+        Assert.assertEquals(inst.getDataset(), inst2.getDataset());
+    }
+
+    @Test
+    public void testGroup() {
+
+        HistogramFunctionInst inst = new HistogramFunctionInst();
+        BucketedValues bv1 = new BucketedValues(
+            BUCKETS, new long[] {
+            0,
+            4,
+            10,
+            10
+        });
+        bv1.setGroup("g1");
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            bv1
+        );
+
+        BucketedValues bv2 = new BucketedValues(
+            BUCKETS, new long[] {
+            1,
+            2,
+            3,
+            4
+        });
+        bv2.setGroup("g1");
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            bv2
+        );
+        BucketedValues bv3 = new BucketedValues(
+            BUCKETS, new long[] {
+            2,
+            4,
+            6,
+            8
+        });
+        bv3.setGroup("g2");
+        inst.accept(
+            MeterEntity.newService("service-test"),
+            bv3
+        );
+        inst.calculate();
+
+        int[] results = inst.getDataset().sortedValues(new 
HeatMap.KeyComparator(true)).stream()
+            .flatMapToInt(l -> IntStream.of(l.intValue()))
+            .toArray();
+        Assert.assertArrayEquals(new int[] {
+            1,
+            3,
+            6,
+            7,
+            2,
+            4,
+            6,
+            8
+        }, results);
+    }
+
+    private static class HistogramFunctionInst extends AvgHistogramFunction {
+
+        @Override
+        public AcceptableValue<BucketedValues> createNew() {
+            return new HistogramFunctionInst();
+        }
+    }
+}
diff --git 
a/oap-server/server-library/library-util/src/main/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/Context.java
 
b/oap-server/server-library/library-util/src/main/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/Context.java
index d0482fe..e583809 100644
--- 
a/oap-server/server-library/library-util/src/main/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/Context.java
+++ 
b/oap-server/server-library/library-util/src/main/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/Context.java
@@ -113,22 +113,31 @@ public class Context {
                         .build()));
                 break;
             case HISTOGRAM:
-                Histogram.HistogramBuilder hBuilder = Histogram.builder();
-                hBuilder.name(name).timestamp(now);
-                samples.forEach(textSample -> {
-                    hBuilder.labels(textSample.getLabels());
-                    if (textSample.getName().endsWith("_count")) {
-                        hBuilder.sampleCount((long) 
convertStringToDouble(textSample.getValue()));
-                    } else if (textSample.getName().endsWith("_sum")) {
-                        
hBuilder.sampleSum(convertStringToDouble(textSample.getValue()));
-                    } else if (textSample.getLabels().containsKey("le")) {
-                        hBuilder.bucket(
-                            
convertStringToDouble(textSample.getLabels().remove("le")),
-                            (long) convertStringToDouble(textSample.getValue())
-                        );
-                    }
-                });
-                metricFamilyBuilder.addMetric(hBuilder.build());
+                samples.stream()
+                    .map(sample -> {
+                        Map<String, String> labels = 
Maps.newHashMap(sample.getLabels());
+                        labels.remove("le");
+                        return Pair.of(labels, sample);
+                    })
+                    .collect(groupingBy(Pair::getLeft, mapping(Pair::getRight, 
toList())))
+                    .forEach((labels, samples) -> {
+                        Histogram.HistogramBuilder hBuilder = 
Histogram.builder();
+                        hBuilder.name(name).timestamp(now);
+                        hBuilder.labels(labels);
+                        samples.forEach(textSample -> {
+                            if (textSample.getName().endsWith("_count")) {
+                                hBuilder.sampleCount((long) 
convertStringToDouble(textSample.getValue()));
+                            } else if (textSample.getName().endsWith("_sum")) {
+                                
hBuilder.sampleSum(convertStringToDouble(textSample.getValue()));
+                            } else if 
(textSample.getLabels().containsKey("le")) {
+                                hBuilder.bucket(
+                                    
convertStringToDouble(textSample.getLabels().remove("le")),
+                                    (long) 
convertStringToDouble(textSample.getValue())
+                                );
+                            }
+                        });
+                        metricFamilyBuilder.addMetric(hBuilder.build());
+                    });
                 break;
             case SUMMARY:
                 samples.stream()
diff --git 
a/oap-server/server-library/library-util/src/test/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/TextParserTest.java
 
b/oap-server/server-library/library-util/src/test/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/TextParserTest.java
index 13c156f..c34e915 100644
--- 
a/oap-server/server-library/library-util/src/test/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/TextParserTest.java
+++ 
b/oap-server/server-library/library-util/src/test/java/org/apache/skywalking/oap/server/library/util/prometheus/parser/TextParserTest.java
@@ -68,7 +68,21 @@ public class TextParserTest {
                               .setType(MetricType.HISTOGRAM)
                               .setHelp("A histogram of the request duration.")
                               .addMetric(Histogram.builder()
+                                                   
.name("http_request_duration_seconds")
+                                                   .label("status", "400")
+                                                   .sampleCount(55)
+                                                   .sampleSum(12D)
+                                                   .bucket(0.05D, 20L)
+                                                   .bucket(0.1D, 20L)
+                                                   .bucket(0.2D, 20L)
+                                                   .bucket(0.5D, 25L)
+                                                   .bucket(1.0D, 30L)
+                                                   
.bucket(Double.POSITIVE_INFINITY, 30L)
+                                                   .timestamp(now)
+                                                   .build())
+                              .addMetric(Histogram.builder()
                                                   
.name("http_request_duration_seconds")
+                                                  .label("status", "200")
                                                   .sampleCount(144320L)
                                                   .sampleSum(53423.0D)
                                                   .bucket(0.05D, 24054L)
diff --git 
a/oap-server/server-library/library-util/src/test/resources/testdata/prometheus.txt
 
b/oap-server/server-library/library-util/src/test/resources/testdata/prometheus.txt
index b6eec13..7b5c4ac 100644
--- 
a/oap-server/server-library/library-util/src/test/resources/testdata/prometheus.txt
+++ 
b/oap-server/server-library/library-util/src/test/resources/testdata/prometheus.txt
@@ -15,14 +15,22 @@ something_weird{problem="division by zero"} +Inf -3982045
 # A histogram, which has a pretty complex representation in the text format:
 # HELP http_request_duration_seconds A histogram of the request duration.
 # TYPE http_request_duration_seconds histogram
-http_request_duration_seconds_bucket{le="0.05"} 24054
-http_request_duration_seconds_bucket{le="0.1"} 33444
-http_request_duration_seconds_bucket{le="0.2"} 100392
-http_request_duration_seconds_bucket{le="0.5"} 129389
-http_request_duration_seconds_bucket{le="1"} 133988
-http_request_duration_seconds_bucket{le="+Inf"} 144320
-http_request_duration_seconds_sum 53423
-http_request_duration_seconds_count 144320
+http_request_duration_seconds_bucket{le="0.05",status="200"} 24054
+http_request_duration_seconds_bucket{le="0.1",status="200"} 33444
+http_request_duration_seconds_bucket{le="0.2",status="200"} 100392
+http_request_duration_seconds_bucket{le="0.5",status="200"} 129389
+http_request_duration_seconds_bucket{le="1",status="200"} 133988
+http_request_duration_seconds_bucket{le="+Inf",status="200"} 144320
+http_request_duration_seconds_sum{status="200"} 53423
+http_request_duration_seconds_count{status="200"} 144320
+http_request_duration_seconds_bucket{le="0.05",status="400"} 20
+http_request_duration_seconds_bucket{le="0.1",status="400"} 20
+http_request_duration_seconds_bucket{le="0.2",status="400"} 20
+http_request_duration_seconds_bucket{le="0.5",status="400"} 25
+http_request_duration_seconds_bucket{le="1",status="400"} 30
+http_request_duration_seconds_bucket{le="+Inf",status="400"} 30
+http_request_duration_seconds_sum{status="400"} 12
+http_request_duration_seconds_count{status="400"} 55
 
 # Finally a summary, which has a complex representation, too:
 # HELP rpc_duration_seconds A summary of the RPC duration in seconds.

Reply via email to