metricfamily_test.go 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682
  1. // Copyright The OpenTelemetry Authors
  2. // SPDX-License-Identifier: Apache-2.0
  3. package internal
  4. import (
  5. "math"
  6. "testing"
  7. "time"
  8. "github.com/prometheus/prometheus/model/labels"
  9. "github.com/prometheus/prometheus/model/textparse"
  10. "github.com/prometheus/prometheus/model/value"
  11. "github.com/prometheus/prometheus/scrape"
  12. "github.com/stretchr/testify/require"
  13. "go.opentelemetry.io/collector/pdata/pcommon"
  14. "go.opentelemetry.io/collector/pdata/pmetric"
  15. "go.uber.org/zap"
  16. )
  17. type testMetadataStore map[string]scrape.MetricMetadata
  18. func (tmc testMetadataStore) GetMetadata(familyName string) (scrape.MetricMetadata, bool) {
  19. lookup, ok := tmc[familyName]
  20. return lookup, ok
  21. }
  22. func (tmc testMetadataStore) ListMetadata() []scrape.MetricMetadata { return nil }
  23. func (tmc testMetadataStore) SizeMetadata() int { return 0 }
  24. func (tmc testMetadataStore) LengthMetadata() int {
  25. return len(tmc)
  26. }
  27. var mc = testMetadataStore{
  28. "counter": scrape.MetricMetadata{
  29. Metric: "cr",
  30. Type: textparse.MetricTypeCounter,
  31. Help: "This is some help for a counter",
  32. Unit: "By",
  33. },
  34. "gauge": scrape.MetricMetadata{
  35. Metric: "ge",
  36. Type: textparse.MetricTypeGauge,
  37. Help: "This is some help for a gauge",
  38. Unit: "1",
  39. },
  40. "gaugehistogram": scrape.MetricMetadata{
  41. Metric: "gh",
  42. Type: textparse.MetricTypeGaugeHistogram,
  43. Help: "This is some help for a gauge histogram",
  44. Unit: "?",
  45. },
  46. "histogram": scrape.MetricMetadata{
  47. Metric: "hg",
  48. Type: textparse.MetricTypeHistogram,
  49. Help: "This is some help for a histogram",
  50. Unit: "ms",
  51. },
  52. "histogram_with_created": scrape.MetricMetadata{
  53. Metric: "hg",
  54. Type: textparse.MetricTypeHistogram,
  55. Help: "This is some help for a histogram",
  56. Unit: "ms",
  57. },
  58. "histogram_stale": scrape.MetricMetadata{
  59. Metric: "hg_stale",
  60. Type: textparse.MetricTypeHistogram,
  61. Help: "This is some help for a histogram",
  62. Unit: "ms",
  63. },
  64. "summary": scrape.MetricMetadata{
  65. Metric: "s",
  66. Type: textparse.MetricTypeSummary,
  67. Help: "This is some help for a summary",
  68. Unit: "ms",
  69. },
  70. "summary_with_created": scrape.MetricMetadata{
  71. Metric: "s",
  72. Type: textparse.MetricTypeSummary,
  73. Help: "This is some help for a summary",
  74. Unit: "ms",
  75. },
  76. "summary_stale": scrape.MetricMetadata{
  77. Metric: "s_stale",
  78. Type: textparse.MetricTypeSummary,
  79. Help: "This is some help for a summary",
  80. Unit: "ms",
  81. },
  82. "unknown": scrape.MetricMetadata{
  83. Metric: "u",
  84. Type: textparse.MetricTypeUnknown,
  85. Help: "This is some help for an unknown metric",
  86. Unit: "?",
  87. },
  88. }
  89. func TestMetricGroupData_toDistributionUnitTest(t *testing.T) {
  90. type scrape struct {
  91. at int64
  92. value float64
  93. metric string
  94. extraLabel labels.Label
  95. }
  96. tests := []struct {
  97. name string
  98. metricName string
  99. labels labels.Labels
  100. scrapes []*scrape
  101. want func() pmetric.HistogramDataPoint
  102. wantErr bool
  103. intervalStartTimeMs int64
  104. }{
  105. {
  106. name: "histogram with startTimestamp",
  107. metricName: "histogram",
  108. intervalStartTimeMs: 11,
  109. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  110. scrapes: []*scrape{
  111. {at: 11, value: 66, metric: "histogram_count"},
  112. {at: 11, value: 1004.78, metric: "histogram_sum"},
  113. {at: 11, value: 33, metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "0.75"}},
  114. {at: 11, value: 55, metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "2.75"}},
  115. {at: 11, value: 66, metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "+Inf"}},
  116. },
  117. want: func() pmetric.HistogramDataPoint {
  118. point := pmetric.NewHistogramDataPoint()
  119. point.SetCount(66)
  120. point.SetSum(1004.78)
  121. point.SetTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  122. point.ExplicitBounds().FromRaw([]float64{0.75, 2.75})
  123. point.BucketCounts().FromRaw([]uint64{33, 22, 11})
  124. point.SetStartTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  125. attributes := point.Attributes()
  126. attributes.PutStr("a", "A")
  127. attributes.PutStr("b", "B")
  128. return point
  129. },
  130. },
  131. {
  132. name: "histogram with startTimestamp from _created",
  133. metricName: "histogram_with_created",
  134. intervalStartTimeMs: 11,
  135. labels: labels.FromMap(map[string]string{"a": "A"}),
  136. scrapes: []*scrape{
  137. {at: 11, value: 66, metric: "histogram_with_created_count"},
  138. {at: 11, value: 1004.78, metric: "histogram_with_created_sum"},
  139. {at: 11, value: 600.78, metric: "histogram_with_created_created"},
  140. {
  141. at: 11,
  142. value: 33,
  143. metric: "histogram_with_created_bucket",
  144. extraLabel: labels.Label{Name: "le", Value: "0.75"},
  145. },
  146. {
  147. at: 11,
  148. value: 55,
  149. metric: "histogram_with_created_bucket",
  150. extraLabel: labels.Label{Name: "le", Value: "2.75"},
  151. },
  152. {
  153. at: 11,
  154. value: 66,
  155. metric: "histogram_with_created_bucket",
  156. extraLabel: labels.Label{Name: "le", Value: "+Inf"}},
  157. },
  158. want: func() pmetric.HistogramDataPoint {
  159. point := pmetric.NewHistogramDataPoint()
  160. point.SetCount(66)
  161. point.SetSum(1004.78)
  162. // the time in milliseconds -> nanoseconds.
  163. point.SetTimestamp(pcommon.Timestamp(11 * time.Millisecond))
  164. point.SetStartTimestamp(timestampFromFloat64(600.78))
  165. point.ExplicitBounds().FromRaw([]float64{0.75, 2.75})
  166. point.BucketCounts().FromRaw([]uint64{33, 22, 11})
  167. attributes := point.Attributes()
  168. attributes.PutStr("a", "A")
  169. return point
  170. },
  171. },
  172. {
  173. name: "histogram that is stale",
  174. metricName: "histogram_stale",
  175. intervalStartTimeMs: 11,
  176. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  177. scrapes: []*scrape{
  178. {at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_stale_count"},
  179. {at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_stale_sum"},
  180. {at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "0.75"}},
  181. {at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "2.75"}},
  182. {at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_bucket", extraLabel: labels.Label{Name: "le", Value: "+Inf"}},
  183. },
  184. want: func() pmetric.HistogramDataPoint {
  185. point := pmetric.NewHistogramDataPoint()
  186. point.SetTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  187. point.SetFlags(pmetric.DefaultDataPointFlags.WithNoRecordedValue(true))
  188. point.ExplicitBounds().FromRaw([]float64{0.75, 2.75})
  189. point.BucketCounts().FromRaw([]uint64{0, 0, 0})
  190. point.SetStartTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  191. attributes := point.Attributes()
  192. attributes.PutStr("a", "A")
  193. attributes.PutStr("b", "B")
  194. return point
  195. },
  196. },
  197. {
  198. name: "histogram with inconsistent timestamps",
  199. metricName: "histogram_inconsistent_ts",
  200. intervalStartTimeMs: 11,
  201. labels: labels.FromMap(map[string]string{"a": "A", "le": "0.75", "b": "B"}),
  202. scrapes: []*scrape{
  203. {at: 11, value: math.Float64frombits(value.StaleNaN), metric: "histogram_stale_count"},
  204. {at: 12, value: math.Float64frombits(value.StaleNaN), metric: "histogram_stale_sum"},
  205. {at: 13, value: math.Float64frombits(value.StaleNaN), metric: "value"},
  206. },
  207. wantErr: true,
  208. },
  209. {
  210. name: "histogram without buckets",
  211. metricName: "histogram",
  212. intervalStartTimeMs: 11,
  213. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  214. scrapes: []*scrape{
  215. {at: 11, value: 66, metric: "histogram_count"},
  216. {at: 11, value: 1004.78, metric: "histogram_sum"},
  217. },
  218. want: func() pmetric.HistogramDataPoint {
  219. point := pmetric.NewHistogramDataPoint()
  220. point.SetCount(66)
  221. point.SetSum(1004.78)
  222. point.SetTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  223. point.SetStartTimestamp(pcommon.Timestamp(11 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  224. point.BucketCounts().FromRaw([]uint64{66})
  225. attributes := point.Attributes()
  226. attributes.PutStr("a", "A")
  227. attributes.PutStr("b", "B")
  228. return point
  229. },
  230. },
  231. }
  232. for _, tt := range tests {
  233. tt := tt
  234. t.Run(tt.name, func(t *testing.T) {
  235. mp := newMetricFamily(tt.metricName, mc, zap.NewNop())
  236. for i, tv := range tt.scrapes {
  237. var lbls labels.Labels
  238. if tv.extraLabel.Name != "" {
  239. lbls = labels.NewBuilder(tt.labels).Set(tv.extraLabel.Name, tv.extraLabel.Value).Labels()
  240. } else {
  241. lbls = tt.labels.Copy()
  242. }
  243. sRef, _ := getSeriesRef(nil, lbls, mp.mtype)
  244. err := mp.addSeries(sRef, tv.metric, lbls, tv.at, tv.value)
  245. if tt.wantErr {
  246. if i != 0 {
  247. require.Error(t, err)
  248. }
  249. } else {
  250. require.NoError(t, err)
  251. }
  252. }
  253. if tt.wantErr {
  254. // Don't check the result if we got an error
  255. return
  256. }
  257. require.Len(t, mp.groups, 1)
  258. sl := pmetric.NewMetricSlice()
  259. mp.appendMetric(sl, false)
  260. require.Equal(t, 1, sl.Len(), "Exactly one metric expected")
  261. metric := sl.At(0)
  262. require.Equal(t, mc[tt.metricName].Help, metric.Description(), "Expected help metadata in metric description")
  263. require.Equal(t, mc[tt.metricName].Unit, metric.Unit(), "Expected unit metadata in metric")
  264. hdpL := metric.Histogram().DataPoints()
  265. require.Equal(t, 1, hdpL.Len(), "Exactly one point expected")
  266. got := hdpL.At(0)
  267. want := tt.want()
  268. require.Equal(t, want, got, "Expected the points to be equal")
  269. })
  270. }
  271. }
  272. func TestMetricGroupData_toSummaryUnitTest(t *testing.T) {
  273. type scrape struct {
  274. at int64
  275. value float64
  276. metric string
  277. }
  278. type labelsScrapes struct {
  279. labels labels.Labels
  280. scrapes []*scrape
  281. }
  282. tests := []struct {
  283. name string
  284. labelsScrapes []*labelsScrapes
  285. want func() pmetric.SummaryDataPoint
  286. wantErr bool
  287. }{
  288. {
  289. name: "summary",
  290. labelsScrapes: []*labelsScrapes{
  291. {
  292. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  293. scrapes: []*scrape{
  294. {at: 14, value: 10, metric: "summary_count"},
  295. {at: 14, value: 15, metric: "summary_sum"},
  296. },
  297. },
  298. {
  299. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.0", "b": "B"}),
  300. scrapes: []*scrape{
  301. {at: 14, value: 8, metric: "value"},
  302. },
  303. },
  304. {
  305. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.75", "b": "B"}),
  306. scrapes: []*scrape{
  307. {at: 14, value: 33.7, metric: "value"},
  308. },
  309. },
  310. {
  311. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.50", "b": "B"}),
  312. scrapes: []*scrape{
  313. {at: 14, value: 27, metric: "value"},
  314. },
  315. },
  316. {
  317. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.90", "b": "B"}),
  318. scrapes: []*scrape{
  319. {at: 14, value: 56, metric: "value"},
  320. },
  321. },
  322. {
  323. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.99", "b": "B"}),
  324. scrapes: []*scrape{
  325. {at: 14, value: 82, metric: "value"},
  326. },
  327. },
  328. },
  329. want: func() pmetric.SummaryDataPoint {
  330. point := pmetric.NewSummaryDataPoint()
  331. point.SetCount(10)
  332. point.SetSum(15)
  333. qtL := point.QuantileValues()
  334. qn0 := qtL.AppendEmpty()
  335. qn0.SetQuantile(0)
  336. qn0.SetValue(8)
  337. qn50 := qtL.AppendEmpty()
  338. qn50.SetQuantile(.5)
  339. qn50.SetValue(27)
  340. qn75 := qtL.AppendEmpty()
  341. qn75.SetQuantile(.75)
  342. qn75.SetValue(33.7)
  343. qn90 := qtL.AppendEmpty()
  344. qn90.SetQuantile(.9)
  345. qn90.SetValue(56)
  346. qn99 := qtL.AppendEmpty()
  347. qn99.SetQuantile(.99)
  348. qn99.SetValue(82)
  349. point.SetTimestamp(pcommon.Timestamp(14 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  350. point.SetStartTimestamp(pcommon.Timestamp(14 * time.Millisecond)) // the time in milliseconds -> nanoseconds
  351. attributes := point.Attributes()
  352. attributes.PutStr("a", "A")
  353. attributes.PutStr("b", "B")
  354. return point
  355. },
  356. },
  357. {
  358. name: "summary_with_created",
  359. labelsScrapes: []*labelsScrapes{
  360. {
  361. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  362. scrapes: []*scrape{
  363. {at: 14, value: 10, metric: "summary_with_created_count"},
  364. {at: 14, value: 15, metric: "summary_with_created_sum"},
  365. {at: 14, value: 150, metric: "summary_with_created_created"},
  366. },
  367. },
  368. {
  369. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.0", "b": "B"}),
  370. scrapes: []*scrape{
  371. {at: 14, value: 8, metric: "value"},
  372. },
  373. },
  374. {
  375. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.75", "b": "B"}),
  376. scrapes: []*scrape{
  377. {at: 14, value: 33.7, metric: "value"},
  378. },
  379. },
  380. {
  381. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.50", "b": "B"}),
  382. scrapes: []*scrape{
  383. {at: 14, value: 27, metric: "value"},
  384. },
  385. },
  386. {
  387. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.90", "b": "B"}),
  388. scrapes: []*scrape{
  389. {at: 14, value: 56, metric: "value"},
  390. },
  391. },
  392. {
  393. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.99", "b": "B"}),
  394. scrapes: []*scrape{
  395. {at: 14, value: 82, metric: "value"},
  396. },
  397. },
  398. },
  399. want: func() pmetric.SummaryDataPoint {
  400. point := pmetric.NewSummaryDataPoint()
  401. point.SetCount(10)
  402. point.SetSum(15)
  403. qtL := point.QuantileValues()
  404. qn0 := qtL.AppendEmpty()
  405. qn0.SetQuantile(0)
  406. qn0.SetValue(8)
  407. qn50 := qtL.AppendEmpty()
  408. qn50.SetQuantile(.5)
  409. qn50.SetValue(27)
  410. qn75 := qtL.AppendEmpty()
  411. qn75.SetQuantile(.75)
  412. qn75.SetValue(33.7)
  413. qn90 := qtL.AppendEmpty()
  414. qn90.SetQuantile(.9)
  415. qn90.SetValue(56)
  416. qn99 := qtL.AppendEmpty()
  417. qn99.SetQuantile(.99)
  418. qn99.SetValue(82)
  419. // the time in milliseconds -> nanoseconds.
  420. point.SetTimestamp(pcommon.Timestamp(14 * time.Millisecond))
  421. point.SetStartTimestamp(timestampFromFloat64(150))
  422. attributes := point.Attributes()
  423. attributes.PutStr("a", "A")
  424. attributes.PutStr("b", "B")
  425. return point
  426. },
  427. },
  428. {
  429. name: "summary_stale",
  430. labelsScrapes: []*labelsScrapes{
  431. {
  432. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.0", "b": "B"}),
  433. scrapes: []*scrape{
  434. {at: 14, value: 10, metric: "summary_stale_count"},
  435. {at: 14, value: 12, metric: "summary_stale_sum"},
  436. {at: 14, value: 8, metric: "value"},
  437. },
  438. },
  439. {
  440. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.75", "b": "B"}),
  441. scrapes: []*scrape{
  442. {at: 14, value: 10, metric: "summary_stale_count"},
  443. {at: 14, value: 1004.78, metric: "summary_stale_sum"},
  444. {at: 14, value: 33.7, metric: "value"},
  445. },
  446. },
  447. {
  448. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.50", "b": "B"}),
  449. scrapes: []*scrape{
  450. {at: 14, value: 10, metric: "summary_stale_count"},
  451. {at: 14, value: 13, metric: "summary_stale_sum"},
  452. {at: 14, value: 27, metric: "value"},
  453. },
  454. },
  455. {
  456. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.90", "b": "B"}),
  457. scrapes: []*scrape{
  458. {at: 14, value: 10, metric: "summary_stale_count"},
  459. {at: 14, value: 14, metric: "summary_stale_sum"},
  460. {at: 14, value: 56, metric: "value"},
  461. },
  462. },
  463. {
  464. labels: labels.FromMap(map[string]string{"a": "A", "quantile": "0.99", "b": "B"}),
  465. scrapes: []*scrape{
  466. {at: 14, value: math.Float64frombits(value.StaleNaN), metric: "summary_stale_count"},
  467. {at: 14, value: math.Float64frombits(value.StaleNaN), metric: "summary_stale_sum"},
  468. {at: 14, value: math.Float64frombits(value.StaleNaN), metric: "value"},
  469. },
  470. },
  471. },
  472. want: func() pmetric.SummaryDataPoint {
  473. point := pmetric.NewSummaryDataPoint()
  474. qtL := point.QuantileValues()
  475. qn0 := qtL.AppendEmpty()
  476. point.SetFlags(pmetric.DefaultDataPointFlags.WithNoRecordedValue(true))
  477. qn0.SetQuantile(0)
  478. qn0.SetValue(0)
  479. qn50 := qtL.AppendEmpty()
  480. qn50.SetQuantile(.5)
  481. qn50.SetValue(0)
  482. qn75 := qtL.AppendEmpty()
  483. qn75.SetQuantile(.75)
  484. qn75.SetValue(0)
  485. qn90 := qtL.AppendEmpty()
  486. qn90.SetQuantile(.9)
  487. qn90.SetValue(0)
  488. qn99 := qtL.AppendEmpty()
  489. qn99.SetQuantile(.99)
  490. qn99.SetValue(0)
  491. point.SetTimestamp(pcommon.Timestamp(14 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  492. point.SetStartTimestamp(pcommon.Timestamp(14 * time.Millisecond)) // the time in milliseconds -> nanoseconds
  493. attributes := point.Attributes()
  494. attributes.PutStr("a", "A")
  495. attributes.PutStr("b", "B")
  496. return point
  497. },
  498. },
  499. {
  500. name: "summary with inconsistent timestamps",
  501. labelsScrapes: []*labelsScrapes{
  502. {
  503. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  504. scrapes: []*scrape{
  505. {at: 11, value: 10, metric: "summary_count"},
  506. {at: 14, value: 15, metric: "summary_sum"},
  507. },
  508. },
  509. },
  510. wantErr: true,
  511. },
  512. }
  513. for _, tt := range tests {
  514. tt := tt
  515. t.Run(tt.name, func(t *testing.T) {
  516. mp := newMetricFamily(tt.name, mc, zap.NewNop())
  517. for _, lbs := range tt.labelsScrapes {
  518. for i, scrape := range lbs.scrapes {
  519. lb := lbs.labels.Copy()
  520. sRef, _ := getSeriesRef(nil, lb, mp.mtype)
  521. err := mp.addSeries(sRef, scrape.metric, lb, scrape.at, scrape.value)
  522. if tt.wantErr {
  523. // The first scrape won't have an error
  524. if i != 0 {
  525. require.Error(t, err)
  526. }
  527. } else {
  528. require.NoError(t, err)
  529. }
  530. }
  531. }
  532. if tt.wantErr {
  533. // Don't check the result if we got an error
  534. return
  535. }
  536. require.Len(t, mp.groups, 1)
  537. sl := pmetric.NewMetricSlice()
  538. mp.appendMetric(sl, false)
  539. require.Equal(t, 1, sl.Len(), "Exactly one metric expected")
  540. metric := sl.At(0)
  541. require.Equal(t, mc[tt.name].Help, metric.Description(), "Expected help metadata in metric description")
  542. require.Equal(t, mc[tt.name].Unit, metric.Unit(), "Expected unit metadata in metric")
  543. sdpL := metric.Summary().DataPoints()
  544. require.Equal(t, 1, sdpL.Len(), "Exactly one point expected")
  545. got := sdpL.At(0)
  546. want := tt.want()
  547. require.Equal(t, want, got, "Expected the points to be equal")
  548. })
  549. }
  550. }
  551. func TestMetricGroupData_toNumberDataUnitTest(t *testing.T) {
  552. type scrape struct {
  553. at int64
  554. value float64
  555. metric string
  556. }
  557. tests := []struct {
  558. name string
  559. metricKind string
  560. labels labels.Labels
  561. scrapes []*scrape
  562. intervalStartTimestampMs int64
  563. want func() pmetric.NumberDataPoint
  564. }{
  565. {
  566. metricKind: "counter",
  567. name: "counter:: startTimestampMs from _created",
  568. intervalStartTimestampMs: 11,
  569. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  570. scrapes: []*scrape{
  571. {at: 13, value: 33.7, metric: "value"},
  572. {at: 13, value: 150, metric: "value_created"},
  573. },
  574. want: func() pmetric.NumberDataPoint {
  575. point := pmetric.NewNumberDataPoint()
  576. point.SetDoubleValue(33.7)
  577. // the time in milliseconds -> nanoseconds.
  578. point.SetTimestamp(pcommon.Timestamp(13 * time.Millisecond))
  579. point.SetStartTimestamp(timestampFromFloat64(150))
  580. attributes := point.Attributes()
  581. attributes.PutStr("a", "A")
  582. attributes.PutStr("b", "B")
  583. return point
  584. },
  585. },
  586. {
  587. metricKind: "counter",
  588. name: "counter:: startTimestampMs of 11",
  589. intervalStartTimestampMs: 11,
  590. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  591. scrapes: []*scrape{
  592. {at: 13, value: 33.7, metric: "value"},
  593. },
  594. want: func() pmetric.NumberDataPoint {
  595. point := pmetric.NewNumberDataPoint()
  596. point.SetDoubleValue(33.7)
  597. point.SetTimestamp(pcommon.Timestamp(13 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  598. point.SetStartTimestamp(pcommon.Timestamp(13 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  599. attributes := point.Attributes()
  600. attributes.PutStr("a", "A")
  601. attributes.PutStr("b", "B")
  602. return point
  603. },
  604. },
  605. {
  606. name: "counter:: startTimestampMs of 0",
  607. metricKind: "counter",
  608. intervalStartTimestampMs: 0,
  609. labels: labels.FromMap(map[string]string{"a": "A", "b": "B"}),
  610. scrapes: []*scrape{
  611. {at: 28, value: 99.9, metric: "value"},
  612. },
  613. want: func() pmetric.NumberDataPoint {
  614. point := pmetric.NewNumberDataPoint()
  615. point.SetDoubleValue(99.9)
  616. point.SetTimestamp(pcommon.Timestamp(28 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  617. point.SetStartTimestamp(pcommon.Timestamp(28 * time.Millisecond)) // the time in milliseconds -> nanoseconds.
  618. attributes := point.Attributes()
  619. attributes.PutStr("a", "A")
  620. attributes.PutStr("b", "B")
  621. return point
  622. },
  623. },
  624. }
  625. for _, tt := range tests {
  626. tt := tt
  627. t.Run(tt.name, func(t *testing.T) {
  628. mp := newMetricFamily(tt.metricKind, mc, zap.NewNop())
  629. for _, tv := range tt.scrapes {
  630. lb := tt.labels.Copy()
  631. sRef, _ := getSeriesRef(nil, lb, mp.mtype)
  632. require.NoError(t, mp.addSeries(sRef, tv.metric, lb, tv.at, tv.value))
  633. }
  634. require.Len(t, mp.groups, 1)
  635. sl := pmetric.NewMetricSlice()
  636. mp.appendMetric(sl, false)
  637. require.Equal(t, 1, sl.Len(), "Exactly one metric expected")
  638. metric := sl.At(0)
  639. require.Equal(t, mc[tt.metricKind].Help, metric.Description(), "Expected help metadata in metric description")
  640. require.Equal(t, mc[tt.metricKind].Unit, metric.Unit(), "Expected unit metadata in metric")
  641. ndpL := metric.Sum().DataPoints()
  642. require.Equal(t, 1, ndpL.Len(), "Exactly one point expected")
  643. got := ndpL.At(0)
  644. want := tt.want()
  645. require.Equal(t, want, got, "Expected the points to be equal")
  646. })
  647. }
  648. }