Skip to content

Commit

Permalink
remove IntHistogram (open-telemetry#3676)
Browse files Browse the repository at this point in the history
* remove IntHistogram

* fix test

* rename histogram in golden dataset
  • Loading branch information
alrex authored Jul 20, 2021
1 parent 6588d6e commit f46871a
Show file tree
Hide file tree
Showing 41 changed files with 84 additions and 1,365 deletions.
19 changes: 0 additions & 19 deletions cmd/mdatagen/metricdata.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ import (
var (
_ MetricData = &intGauge{}
_ MetricData = &intSum{}
_ MetricData = &intHistogram{}
_ MetricData = &gauge{}
_ MetricData = &sum{}
_ MetricData = &histogram{}
Expand All @@ -48,8 +47,6 @@ func (e *ymlMetricData) UnmarshalYAML(unmarshal func(interface{}) error) error {
md = &intGauge{}
case "int sum":
md = &intSum{}
case "int histogram":
md = &intHistogram{}
case "gauge":
md = &gauge{}
case "sum":
Expand Down Expand Up @@ -165,22 +162,6 @@ func (d sum) HasAggregated() bool {
return true
}

type intHistogram struct {
Aggregated `yaml:",inline"`
}

func (i intHistogram) Type() string {
return "IntHistogram"
}

func (i intHistogram) HasMonotonic() bool {
return false
}

func (i intHistogram) HasAggregated() bool {
return true
}

type histogram struct {
Aggregated `yaml:",inline"`
}
Expand Down
1 change: 0 additions & 1 deletion cmd/mdatagen/metricdata_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ func TestMetricData(t *testing.T) {
}{
{&intGauge{}, "IntGauge", false, false},
{&intSum{}, "IntSum", true, true},
{&intHistogram{}, "IntHistogram", true, false},
{&gauge{}, "Gauge", false, false},
{&sum{}, "Sum", true, true},
{&histogram{}, "Histogram", true, false},
Expand Down
46 changes: 0 additions & 46 deletions cmd/pdatagen/internal/metrics_structs.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,12 @@ var metricsFile = &File{
doubleGauge,
intSum,
doubleSum,
intHistogram,
histogram,
summary,
intDataPointSlice,
intDataPoint,
numberDataPointSlice,
numberDataPoint,
intHistogramDataPointSlice,
intHistogramDataPoint,
histogramDataPointSlice,
histogramDataPoint,
summaryDataPointSlice,
Expand Down Expand Up @@ -183,20 +180,6 @@ var doubleSum = &messageValueStruct{
},
}

var intHistogram = &messageValueStruct{
structName: "IntHistogram",
description: "// IntHistogram represents the type of a metric that is calculated by aggregating as a Histogram of all reported double measurements over a time interval.",
originFullName: "otlpmetrics.IntHistogram",
fields: []baseField{
aggregationTemporalityField,
&sliceField{
fieldName: "DataPoints",
originFieldName: "DataPoints",
returnSlice: intHistogramDataPointSlice,
},
},
}

var histogram = &messageValueStruct{
structName: "Histogram",
description: "// Histogram represents the type of a metric that is calculated by aggregating as a Histogram of all reported measurements over a time interval.",
Expand Down Expand Up @@ -267,27 +250,6 @@ var numberDataPoint = &messageValueStruct{
},
}

var intHistogramDataPointSlice = &sliceOfPtrs{
structName: "IntHistogramDataPointSlice",
element: intHistogramDataPoint,
}

var intHistogramDataPoint = &messageValueStruct{
structName: "IntHistogramDataPoint",
description: "// IntHistogramDataPoint is a single data point in a timeseries that describes the time-varying values of a Histogram of int values.",
originFullName: "otlpmetrics.IntHistogramDataPoint",
fields: []baseField{
labelsField,
startTimeField,
timeField,
countField,
intSumField,
bucketCountsField,
explicitBoundsField,
intExemplarsField,
},
}

var histogramDataPointSlice = &sliceOfPtrs{
structName: "HistogramDataPointSlice",
element: histogramDataPoint,
Expand Down Expand Up @@ -426,14 +388,6 @@ var countField = &primitiveField{
testVal: "uint64(17)",
}

var intSumField = &primitiveField{
fieldName: "Sum",
originFieldName: "Sum",
returnType: "int64",
defaultVal: "int64(0.0)",
testVal: "int64(1713)",
}

var doubleSumField = &primitiveField{
fieldName: "Sum",
originFieldName: "Sum",
Expand Down
4 changes: 2 additions & 2 deletions docs/metric-metadata.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,11 @@ metrics:
unit:
# Required
data:
# Required: one of int gauge, int sum, int histogram, gauge, sum, or histogram.
# Required: one of int gauge, int sum, gauge, sum, or histogram.
type:
# Required for int sum and sum.
monotonic: # true | false
# Required for int sum, int histogram, sum, and histogram.
# Required for int sum, sum, and histogram.
aggregation: # delta | cumulative
# Optional: array of labels that were defined in the labels section that are emitted by this metric.
labels:
8 changes: 0 additions & 8 deletions exporter/exporterhelper/resource_to_label.go
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,6 @@ func addLabelsToMetric(metric *pdata.Metric, labelMap pdata.StringMap) {
addLabelsToIntDataPoints(metric.IntSum().DataPoints(), labelMap)
case pdata.MetricDataTypeSum:
addLabelsToNumberDataPoints(metric.Sum().DataPoints(), labelMap)
case pdata.MetricDataTypeIntHistogram:
addLabelsToIntHistogramDataPoints(metric.IntHistogram().DataPoints(), labelMap)
case pdata.MetricDataTypeHistogram:
addLabelsToDoubleHistogramDataPoints(metric.Histogram().DataPoints(), labelMap)
}
Expand All @@ -98,12 +96,6 @@ func addLabelsToNumberDataPoints(ps pdata.NumberDataPointSlice, newLabelMap pdat
}
}

func addLabelsToIntHistogramDataPoints(ps pdata.IntHistogramDataPointSlice, newLabelMap pdata.StringMap) {
for i := 0; i < ps.Len(); i++ {
joinStringMaps(newLabelMap, ps.At(i).LabelsMap())
}
}

func addLabelsToDoubleHistogramDataPoints(ps pdata.HistogramDataPointSlice, newLabelMap pdata.StringMap) {
for i := 0; i < ps.Len(); i++ {
joinStringMaps(newLabelMap, ps.At(i).LabelsMap())
Expand Down
3 changes: 0 additions & 3 deletions exporter/exporterhelper/resource_to_label_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ func TestConvertResourceToLabelsAllDataTypesEmptyDataPoint(t *testing.T) {
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).IntSum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(5).IntHistogram().DataPoints().At(0).LabelsMap().Len())

cloneMd := convertResourceToLabels(md)

Expand All @@ -62,14 +61,12 @@ func TestConvertResourceToLabelsAllDataTypesEmptyDataPoint(t *testing.T) {
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).IntSum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 1, cloneMd.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(5).IntHistogram().DataPoints().At(0).LabelsMap().Len())

assert.Equal(t, 1, md.ResourceMetrics().At(0).Resource().Attributes().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(0).Gauge().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(1).IntGauge().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(2).Sum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(3).IntSum().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(4).Histogram().DataPoints().At(0).LabelsMap().Len())
assert.Equal(t, 0, md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics().At(5).IntHistogram().DataPoints().At(0).LabelsMap().Len())

}
40 changes: 0 additions & 40 deletions exporter/prometheusexporter/accumulator.go
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,6 @@ func (a *lastValueAccumulator) addMetric(metric pdata.Metric, il pdata.Instrumen
return a.accumulateDoubleGauge(metric, il, now)
case pdata.MetricDataTypeSum:
return a.accumulateSum(metric, il, now)
case pdata.MetricDataTypeIntHistogram:
return a.accumulateIntHistogram(metric, il, now)
case pdata.MetricDataTypeHistogram:
return a.accumulateDoubleHistogram(metric, il, now)
case pdata.MetricDataTypeSummary:
Expand Down Expand Up @@ -273,44 +271,6 @@ func (a *lastValueAccumulator) accumulateSum(metric pdata.Metric, il pdata.Instr
return
}

func (a *lastValueAccumulator) accumulateIntHistogram(metric pdata.Metric, il pdata.InstrumentationLibrary, now time.Time) (n int) {
intHistogram := metric.IntHistogram()

// Drop metrics with non-cumulative aggregations
if intHistogram.AggregationTemporality() != pdata.AggregationTemporalityCumulative {
return
}

dps := intHistogram.DataPoints()
for i := 0; i < dps.Len(); i++ {
ip := dps.At(i)

signature := timeseriesSignature(il.Name(), metric, ip.LabelsMap())

v, ok := a.registeredMetrics.Load(signature)
if !ok {
m := createMetric(metric)
ip.CopyTo(m.IntHistogram().DataPoints().AppendEmpty())
a.registeredMetrics.Store(signature, &accumulatedValue{value: m, instrumentationLibrary: il, updated: now})
n++
continue
}
mv := v.(*accumulatedValue)

if ip.Timestamp().AsTime().Before(mv.value.IntHistogram().DataPoints().At(0).Timestamp().AsTime()) {
// only keep datapoint with latest timestamp
continue
}

m := createMetric(metric)
ip.CopyTo(m.IntHistogram().DataPoints().AppendEmpty())
m.IntHistogram().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
a.registeredMetrics.Store(signature, &accumulatedValue{value: m, instrumentationLibrary: il, updated: now})
n++
}
return
}

func (a *lastValueAccumulator) accumulateDoubleHistogram(metric pdata.Metric, il pdata.InstrumentationLibrary, now time.Time) (n int) {
doubleHistogram := metric.Histogram()

Expand Down
40 changes: 0 additions & 40 deletions exporter/prometheusexporter/accumulator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -64,22 +64,6 @@ func TestAccumulateDeltaAggregation(t *testing.T) {
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
{
name: "IntHistogram",
fillMetric: func(ts time.Time, metric pdata.Metric) {
metric.SetName("test_metric")
metric.SetDataType(pdata.MetricDataTypeIntHistogram)
metric.IntHistogram().SetAggregationTemporality(pdata.AggregationTemporalityDelta)
dp := metric.IntHistogram().DataPoints().AppendEmpty()
dp.SetBucketCounts([]uint64{5, 2})
dp.SetCount(7)
dp.SetExplicitBounds([]float64{1.2, 10.0})
dp.SetSum(42)
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
{
name: "Histogram",
fillMetric: func(ts time.Time, metric pdata.Metric) {
Expand Down Expand Up @@ -215,24 +199,6 @@ func TestAccumulateMetrics(t *testing.T) {
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
{
name: "IntHistogram",
metric: func(ts time.Time, v float64, metrics pdata.MetricSlice) {
metric := metrics.AppendEmpty()
metric.SetName("test_metric")
metric.SetDataType(pdata.MetricDataTypeIntHistogram)
metric.IntHistogram().SetAggregationTemporality(pdata.AggregationTemporalityCumulative)
metric.SetDescription("test description")
dp := metric.IntHistogram().DataPoints().AppendEmpty()
dp.SetBucketCounts([]uint64{5, 2})
dp.SetCount(7)
dp.SetExplicitBounds([]float64{1.2, 10.0})
dp.SetSum(int64(v))
dp.LabelsMap().Insert("label_1", "1")
dp.LabelsMap().Insert("label_2", "2")
dp.SetTimestamp(pdata.TimestampFromTime(ts))
},
},
{
name: "Histogram",
metric: func(ts time.Time, v float64, metrics pdata.MetricSlice) {
Expand Down Expand Up @@ -351,12 +317,6 @@ func getMerticProperties(metric pdata.Metric) (
value = metric.Sum().DataPoints().At(0).Value()
temporality = metric.Sum().AggregationTemporality()
isMonotonic = metric.Sum().IsMonotonic()
case pdata.MetricDataTypeIntHistogram:
labels = metric.IntHistogram().DataPoints().At(0).LabelsMap()
ts = metric.IntHistogram().DataPoints().At(0).Timestamp().AsTime()
value = float64(metric.IntHistogram().DataPoints().At(0).Sum())
temporality = metric.IntHistogram().AggregationTemporality()
isMonotonic = true
case pdata.MetricDataTypeHistogram:
labels = metric.Histogram().DataPoints().At(0).LabelsMap()
ts = metric.Histogram().DataPoints().At(0).Timestamp().AsTime()
Expand Down
40 changes: 0 additions & 40 deletions exporter/prometheusexporter/collector.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,6 @@ func (c *collector) convertMetric(metric pdata.Metric) (prometheus.Metric, error
return c.convertDoubleGauge(metric)
case pdata.MetricDataTypeSum:
return c.convertSum(metric)
case pdata.MetricDataTypeIntHistogram:
return c.convertIntHistogram(metric)
case pdata.MetricDataTypeHistogram:
return c.convertDoubleHistogram(metric)
case pdata.MetricDataTypeSummary:
Expand Down Expand Up @@ -172,44 +170,6 @@ func (c *collector) convertSum(metric pdata.Metric) (prometheus.Metric, error) {
return m, nil
}

func (c *collector) convertIntHistogram(metric pdata.Metric) (prometheus.Metric, error) {
ip := metric.IntHistogram().DataPoints().At(0)
desc, labels := c.getMetricMetadata(metric, ip.LabelsMap())

indicesMap := make(map[float64]int)
buckets := make([]float64, 0, len(ip.BucketCounts()))
for index, bucket := range ip.ExplicitBounds() {
if _, added := indicesMap[bucket]; !added {
indicesMap[bucket] = index
buckets = append(buckets, bucket)
}
}
sort.Float64s(buckets)

cumCount := uint64(0)

points := make(map[float64]uint64)
for _, bucket := range buckets {
index := indicesMap[bucket]
var countPerBucket uint64
if len(ip.ExplicitBounds()) > 0 && index < len(ip.ExplicitBounds()) {
countPerBucket = ip.BucketCounts()[index]
}
cumCount += countPerBucket
points[bucket] = cumCount
}

m, err := prometheus.NewConstHistogram(desc, ip.Count(), float64(ip.Sum()), points, labels...)
if err != nil {
return nil, err
}

if c.sendTimestamps {
return prometheus.NewMetricWithTimestamp(ip.Timestamp().AsTime(), m), nil
}
return m, nil
}

func (c *collector) convertSummary(metric pdata.Metric) (prometheus.Metric, error) {
// TODO: In the off chance that we have multiple points
// within the same metric, how should we handle them?
Expand Down
Loading

0 comments on commit f46871a

Please sign in to comment.