123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204 |
- // Code generated by mdatagen. DO NOT EDIT.
- package metadata
- import (
- "testing"
- "github.com/stretchr/testify/assert"
- "go.opentelemetry.io/collector/pdata/pcommon"
- "go.opentelemetry.io/collector/pdata/pmetric"
- "go.opentelemetry.io/collector/receiver/receivertest"
- "go.uber.org/zap"
- "go.uber.org/zap/zaptest/observer"
- )
- type testConfigCollection int
- const (
- testSetDefault testConfigCollection = iota
- testSetAll
- testSetNone
- )
- func TestMetricsBuilder(t *testing.T) {
- tests := []struct {
- name string
- configSet testConfigCollection
- }{
- {
- name: "default",
- configSet: testSetDefault,
- },
- {
- name: "all_set",
- configSet: testSetAll,
- },
- {
- name: "none_set",
- configSet: testSetNone,
- },
- }
- for _, test := range tests {
- t.Run(test.name, func(t *testing.T) {
- start := pcommon.Timestamp(1_000_000_000)
- ts := pcommon.Timestamp(1_000_001_000)
- observedZapCore, observedLogs := observer.New(zap.WarnLevel)
- settings := receivertest.NewNopCreateSettings()
- settings.Logger = zap.New(observedZapCore)
- mb := NewMetricsBuilder(loadMetricsBuilderConfig(t, test.name), settings, WithStartTime(start))
- expectedWarnings := 0
- assert.Equal(t, expectedWarnings, observedLogs.Len())
- defaultMetricsCount := 0
- allMetricsCount := 0
- defaultMetricsCount++
- allMetricsCount++
- mb.RecordRiakMemoryLimitDataPoint(ts, 1)
- defaultMetricsCount++
- allMetricsCount++
- mb.RecordRiakNodeOperationCountDataPoint(ts, 1, AttributeRequestPut)
- defaultMetricsCount++
- allMetricsCount++
- mb.RecordRiakNodeOperationTimeMeanDataPoint(ts, 1, AttributeRequestPut)
- defaultMetricsCount++
- allMetricsCount++
- mb.RecordRiakNodeReadRepairCountDataPoint(ts, 1)
- defaultMetricsCount++
- allMetricsCount++
- mb.RecordRiakVnodeIndexOperationCountDataPoint(ts, 1, AttributeOperationRead)
- defaultMetricsCount++
- allMetricsCount++
- mb.RecordRiakVnodeOperationCountDataPoint(ts, 1, AttributeRequestPut)
- rb := mb.NewResourceBuilder()
- rb.SetRiakNodeName("riak.node.name-val")
- res := rb.Emit()
- metrics := mb.Emit(WithResource(res))
- if test.configSet == testSetNone {
- assert.Equal(t, 0, metrics.ResourceMetrics().Len())
- return
- }
- assert.Equal(t, 1, metrics.ResourceMetrics().Len())
- rm := metrics.ResourceMetrics().At(0)
- assert.Equal(t, res, rm.Resource())
- assert.Equal(t, 1, rm.ScopeMetrics().Len())
- ms := rm.ScopeMetrics().At(0).Metrics()
- if test.configSet == testSetDefault {
- assert.Equal(t, defaultMetricsCount, ms.Len())
- }
- if test.configSet == testSetAll {
- assert.Equal(t, allMetricsCount, ms.Len())
- }
- validatedMetrics := make(map[string]bool)
- for i := 0; i < ms.Len(); i++ {
- switch ms.At(i).Name() {
- case "riak.memory.limit":
- assert.False(t, validatedMetrics["riak.memory.limit"], "Found a duplicate in the metrics slice: riak.memory.limit")
- validatedMetrics["riak.memory.limit"] = true
- assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
- assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
- assert.Equal(t, "The amount of memory allocated to the node.", ms.At(i).Description())
- assert.Equal(t, "By", ms.At(i).Unit())
- assert.Equal(t, false, ms.At(i).Sum().IsMonotonic())
- assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
- dp := ms.At(i).Sum().DataPoints().At(0)
- assert.Equal(t, start, dp.StartTimestamp())
- assert.Equal(t, ts, dp.Timestamp())
- assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
- assert.Equal(t, int64(1), dp.IntValue())
- case "riak.node.operation.count":
- assert.False(t, validatedMetrics["riak.node.operation.count"], "Found a duplicate in the metrics slice: riak.node.operation.count")
- validatedMetrics["riak.node.operation.count"] = true
- assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
- assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
- assert.Equal(t, "The number of operations performed by the node.", ms.At(i).Description())
- assert.Equal(t, "{operation}", ms.At(i).Unit())
- assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
- assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
- dp := ms.At(i).Sum().DataPoints().At(0)
- assert.Equal(t, start, dp.StartTimestamp())
- assert.Equal(t, ts, dp.Timestamp())
- assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
- assert.Equal(t, int64(1), dp.IntValue())
- attrVal, ok := dp.Attributes().Get("request")
- assert.True(t, ok)
- assert.EqualValues(t, "put", attrVal.Str())
- case "riak.node.operation.time.mean":
- assert.False(t, validatedMetrics["riak.node.operation.time.mean"], "Found a duplicate in the metrics slice: riak.node.operation.time.mean")
- validatedMetrics["riak.node.operation.time.mean"] = true
- assert.Equal(t, pmetric.MetricTypeGauge, ms.At(i).Type())
- assert.Equal(t, 1, ms.At(i).Gauge().DataPoints().Len())
- assert.Equal(t, "The mean time between request and response for operations performed by the node over the last minute.", ms.At(i).Description())
- assert.Equal(t, "us", ms.At(i).Unit())
- dp := ms.At(i).Gauge().DataPoints().At(0)
- assert.Equal(t, start, dp.StartTimestamp())
- assert.Equal(t, ts, dp.Timestamp())
- assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
- assert.Equal(t, int64(1), dp.IntValue())
- attrVal, ok := dp.Attributes().Get("request")
- assert.True(t, ok)
- assert.EqualValues(t, "put", attrVal.Str())
- case "riak.node.read_repair.count":
- assert.False(t, validatedMetrics["riak.node.read_repair.count"], "Found a duplicate in the metrics slice: riak.node.read_repair.count")
- validatedMetrics["riak.node.read_repair.count"] = true
- assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
- assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
- assert.Equal(t, "The number of read repairs performed by the node.", ms.At(i).Description())
- assert.Equal(t, "{read_repair}", ms.At(i).Unit())
- assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
- assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
- dp := ms.At(i).Sum().DataPoints().At(0)
- assert.Equal(t, start, dp.StartTimestamp())
- assert.Equal(t, ts, dp.Timestamp())
- assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
- assert.Equal(t, int64(1), dp.IntValue())
- case "riak.vnode.index.operation.count":
- assert.False(t, validatedMetrics["riak.vnode.index.operation.count"], "Found a duplicate in the metrics slice: riak.vnode.index.operation.count")
- validatedMetrics["riak.vnode.index.operation.count"] = true
- assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
- assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
- assert.Equal(t, "The number of index operations performed by vnodes on the node.", ms.At(i).Description())
- assert.Equal(t, "{operation}", ms.At(i).Unit())
- assert.Equal(t, false, ms.At(i).Sum().IsMonotonic())
- assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
- dp := ms.At(i).Sum().DataPoints().At(0)
- assert.Equal(t, start, dp.StartTimestamp())
- assert.Equal(t, ts, dp.Timestamp())
- assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
- assert.Equal(t, int64(1), dp.IntValue())
- attrVal, ok := dp.Attributes().Get("operation")
- assert.True(t, ok)
- assert.EqualValues(t, "read", attrVal.Str())
- case "riak.vnode.operation.count":
- assert.False(t, validatedMetrics["riak.vnode.operation.count"], "Found a duplicate in the metrics slice: riak.vnode.operation.count")
- validatedMetrics["riak.vnode.operation.count"] = true
- assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
- assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
- assert.Equal(t, "The number of operations performed by vnodes on the node.", ms.At(i).Description())
- assert.Equal(t, "{operation}", ms.At(i).Unit())
- assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
- assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
- dp := ms.At(i).Sum().DataPoints().At(0)
- assert.Equal(t, start, dp.StartTimestamp())
- assert.Equal(t, ts, dp.Timestamp())
- assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
- assert.Equal(t, int64(1), dp.IntValue())
- attrVal, ok := dp.Attributes().Get("request")
- assert.True(t, ok)
- assert.EqualValues(t, "put", attrVal.Str())
- }
- }
- })
- }
- }
|