transaction_test.go 52 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602
  1. // Copyright The OpenTelemetry Authors
  2. // SPDX-License-Identifier: Apache-2.0
  3. package internal
  4. import (
  5. "context"
  6. "errors"
  7. "testing"
  8. "time"
  9. "github.com/prometheus/common/model"
  10. "github.com/prometheus/prometheus/model/exemplar"
  11. "github.com/prometheus/prometheus/model/labels"
  12. "github.com/prometheus/prometheus/model/metadata"
  13. "github.com/prometheus/prometheus/scrape"
  14. "github.com/stretchr/testify/assert"
  15. "github.com/stretchr/testify/require"
  16. "go.opentelemetry.io/collector/component"
  17. "go.opentelemetry.io/collector/consumer/consumertest"
  18. "go.opentelemetry.io/collector/pdata/pcommon"
  19. "go.opentelemetry.io/collector/pdata/pmetric"
  20. "go.opentelemetry.io/collector/receiver/receiverhelper"
  21. "go.opentelemetry.io/collector/receiver/receivertest"
  22. "go.uber.org/zap"
  23. "go.uber.org/zap/zaptest/observer"
  24. )
  25. const (
  26. startTimestamp = pcommon.Timestamp(1555366608340000000)
  27. ts = int64(1555366610000)
  28. interval = int64(15 * 1000)
  29. tsNanos = pcommon.Timestamp(ts * 1e6)
  30. tsPlusIntervalNanos = pcommon.Timestamp((ts + interval) * 1e6)
  31. )
  32. var (
  33. target = scrape.NewTarget(
  34. // processedLabels contain label values after processing (e.g. relabeling)
  35. labels.FromMap(map[string]string{
  36. model.InstanceLabel: "localhost:8080",
  37. }),
  38. // discoveredLabels contain labels prior to any processing
  39. labels.FromMap(map[string]string{
  40. model.AddressLabel: "address:8080",
  41. model.SchemeLabel: "http",
  42. }),
  43. nil)
  44. scrapeCtx = scrape.ContextWithMetricMetadataStore(
  45. scrape.ContextWithTarget(context.Background(), target),
  46. testMetadataStore(testMetadata))
  47. )
  48. func TestTransactionCommitWithoutAdding(t *testing.T) {
  49. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  50. assert.NoError(t, tr.Commit())
  51. }
  52. func TestTransactionRollbackDoesNothing(t *testing.T) {
  53. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  54. assert.NoError(t, tr.Rollback())
  55. }
  56. func TestTransactionUpdateMetadataDoesNothing(t *testing.T) {
  57. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  58. _, err := tr.UpdateMetadata(0, labels.New(), metadata.Metadata{})
  59. assert.NoError(t, err)
  60. }
  61. func TestTransactionAppendNoTarget(t *testing.T) {
  62. badLabels := labels.FromStrings(model.MetricNameLabel, "counter_test")
  63. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  64. _, err := tr.Append(0, badLabels, time.Now().Unix()*1000, 1.0)
  65. assert.Error(t, err)
  66. }
  67. func TestTransactionAppendNoMetricName(t *testing.T) {
  68. jobNotFoundLb := labels.FromMap(map[string]string{
  69. model.InstanceLabel: "localhost:8080",
  70. model.JobLabel: "test2",
  71. })
  72. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  73. _, err := tr.Append(0, jobNotFoundLb, time.Now().Unix()*1000, 1.0)
  74. assert.ErrorIs(t, err, errMetricNameNotFound)
  75. assert.ErrorIs(t, tr.Commit(), errNoDataToBuild)
  76. }
  77. func TestTransactionAppendEmptyMetricName(t *testing.T) {
  78. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, consumertest.NewNop(), nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  79. _, err := tr.Append(0, labels.FromMap(map[string]string{
  80. model.InstanceLabel: "localhost:8080",
  81. model.JobLabel: "test2",
  82. model.MetricNameLabel: "",
  83. }), time.Now().Unix()*1000, 1.0)
  84. assert.ErrorIs(t, err, errMetricNameNotFound)
  85. }
  86. func TestTransactionAppendResource(t *testing.T) {
  87. sink := new(consumertest.MetricsSink)
  88. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  89. _, err := tr.Append(0, labels.FromMap(map[string]string{
  90. model.InstanceLabel: "localhost:8080",
  91. model.JobLabel: "test",
  92. model.MetricNameLabel: "counter_test",
  93. }), time.Now().Unix()*1000, 1.0)
  94. assert.NoError(t, err)
  95. _, err = tr.Append(0, labels.FromMap(map[string]string{
  96. model.InstanceLabel: "localhost:8080",
  97. model.JobLabel: "test",
  98. model.MetricNameLabel: startTimeMetricName,
  99. }), time.Now().UnixMilli(), 1.0)
  100. assert.NoError(t, err)
  101. assert.NoError(t, tr.Commit())
  102. expectedResource := CreateResource("test", "localhost:8080", labels.FromStrings(model.SchemeLabel, "http"))
  103. mds := sink.AllMetrics()
  104. require.Len(t, mds, 1)
  105. gotResource := mds[0].ResourceMetrics().At(0).Resource()
  106. require.Equal(t, expectedResource, gotResource)
  107. }
  108. func TestReceiverVersionAndNameAreAttached(t *testing.T) {
  109. sink := new(consumertest.MetricsSink)
  110. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  111. _, err := tr.Append(0, labels.FromMap(map[string]string{
  112. model.InstanceLabel: "localhost:8080",
  113. model.JobLabel: "test",
  114. model.MetricNameLabel: "counter_test",
  115. }), time.Now().Unix()*1000, 1.0)
  116. assert.NoError(t, err)
  117. assert.NoError(t, tr.Commit())
  118. expectedResource := CreateResource("test", "localhost:8080", labels.FromStrings(model.SchemeLabel, "http"))
  119. mds := sink.AllMetrics()
  120. require.Len(t, mds, 1)
  121. gotResource := mds[0].ResourceMetrics().At(0).Resource()
  122. require.Equal(t, expectedResource, gotResource)
  123. gotScope := mds[0].ResourceMetrics().At(0).ScopeMetrics().At(0).Scope()
  124. require.Equal(t, receiverName, gotScope.Name())
  125. require.Equal(t, component.NewDefaultBuildInfo().Version, gotScope.Version())
  126. }
  127. func TestTransactionCommitErrorWhenAdjusterError(t *testing.T) {
  128. goodLabels := labels.FromMap(map[string]string{
  129. model.InstanceLabel: "localhost:8080",
  130. model.JobLabel: "test",
  131. model.MetricNameLabel: "counter_test",
  132. })
  133. sink := new(consumertest.MetricsSink)
  134. adjusterErr := errors.New("adjuster error")
  135. tr := newTransaction(scrapeCtx, &errorAdjuster{err: adjusterErr}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  136. _, err := tr.Append(0, goodLabels, time.Now().Unix()*1000, 1.0)
  137. assert.NoError(t, err)
  138. assert.ErrorIs(t, tr.Commit(), adjusterErr)
  139. }
  140. // Ensure that we reject duplicate label keys. See https://github.com/open-telemetry/wg-prometheus/issues/44.
  141. func TestTransactionAppendDuplicateLabels(t *testing.T) {
  142. sink := new(consumertest.MetricsSink)
  143. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  144. dupLabels := labels.FromStrings(
  145. model.InstanceLabel, "0.0.0.0:8855",
  146. model.JobLabel, "test",
  147. model.MetricNameLabel, "counter_test",
  148. "a", "1",
  149. "a", "6",
  150. "z", "9",
  151. )
  152. _, err := tr.Append(0, dupLabels, 1917, 1.0)
  153. require.Error(t, err)
  154. assert.Contains(t, err.Error(), `invalid sample: non-unique label names: "a"`)
  155. }
  156. func TestTransactionAppendHistogramNoLe(t *testing.T) {
  157. sink := new(consumertest.MetricsSink)
  158. receiverSettings := receivertest.NewNopCreateSettings()
  159. core, observedLogs := observer.New(zap.InfoLevel)
  160. receiverSettings.Logger = zap.New(core)
  161. tr := newTransaction(
  162. scrapeCtx,
  163. &startTimeAdjuster{startTime: startTimestamp},
  164. sink,
  165. nil,
  166. receiverSettings,
  167. nopObsRecv(t),
  168. false,
  169. )
  170. goodLabels := labels.FromStrings(
  171. model.InstanceLabel, "0.0.0.0:8855",
  172. model.JobLabel, "test",
  173. model.MetricNameLabel, "hist_test_bucket",
  174. )
  175. _, err := tr.Append(0, goodLabels, 1917, 1.0)
  176. require.NoError(t, err)
  177. assert.Equal(t, 1, observedLogs.Len())
  178. assert.Equal(t, 1, observedLogs.FilterMessage("failed to add datapoint").Len())
  179. assert.NoError(t, tr.Commit())
  180. assert.Len(t, sink.AllMetrics(), 0)
  181. }
  182. func TestTransactionAppendSummaryNoQuantile(t *testing.T) {
  183. sink := new(consumertest.MetricsSink)
  184. receiverSettings := receivertest.NewNopCreateSettings()
  185. core, observedLogs := observer.New(zap.InfoLevel)
  186. receiverSettings.Logger = zap.New(core)
  187. tr := newTransaction(
  188. scrapeCtx,
  189. &startTimeAdjuster{startTime: startTimestamp},
  190. sink,
  191. nil,
  192. receiverSettings,
  193. nopObsRecv(t),
  194. false,
  195. )
  196. goodLabels := labels.FromStrings(
  197. model.InstanceLabel, "0.0.0.0:8855",
  198. model.JobLabel, "test",
  199. model.MetricNameLabel, "summary_test",
  200. )
  201. _, err := tr.Append(0, goodLabels, 1917, 1.0)
  202. require.NoError(t, err)
  203. assert.Equal(t, 1, observedLogs.Len())
  204. assert.Equal(t, 1, observedLogs.FilterMessage("failed to add datapoint").Len())
  205. assert.NoError(t, tr.Commit())
  206. assert.Len(t, sink.AllMetrics(), 0)
  207. }
  208. func TestTransactionAppendValidAndInvalid(t *testing.T) {
  209. sink := new(consumertest.MetricsSink)
  210. receiverSettings := receivertest.NewNopCreateSettings()
  211. core, observedLogs := observer.New(zap.InfoLevel)
  212. receiverSettings.Logger = zap.New(core)
  213. tr := newTransaction(
  214. scrapeCtx,
  215. &startTimeAdjuster{startTime: startTimestamp},
  216. sink,
  217. nil,
  218. receiverSettings,
  219. nopObsRecv(t),
  220. false,
  221. )
  222. // a valid counter
  223. _, err := tr.Append(0, labels.FromMap(map[string]string{
  224. model.InstanceLabel: "localhost:8080",
  225. model.JobLabel: "test",
  226. model.MetricNameLabel: "counter_test",
  227. }), time.Now().Unix()*1000, 1.0)
  228. assert.NoError(t, err)
  229. // summary without quantiles, should be ignored
  230. summarylabels := labels.FromStrings(
  231. model.InstanceLabel, "0.0.0.0:8855",
  232. model.JobLabel, "test",
  233. model.MetricNameLabel, "summary_test",
  234. )
  235. _, err = tr.Append(0, summarylabels, 1917, 1.0)
  236. require.NoError(t, err)
  237. assert.Equal(t, 1, observedLogs.Len())
  238. assert.Equal(t, 1, observedLogs.FilterMessage("failed to add datapoint").Len())
  239. assert.NoError(t, tr.Commit())
  240. expectedResource := CreateResource("test", "localhost:8080", labels.FromStrings(model.SchemeLabel, "http"))
  241. mds := sink.AllMetrics()
  242. require.Len(t, mds, 1)
  243. gotResource := mds[0].ResourceMetrics().At(0).Resource()
  244. require.Equal(t, expectedResource, gotResource)
  245. require.Equal(t, 1, mds[0].MetricCount())
  246. }
  247. func TestAppendExemplarWithNoMetricName(t *testing.T) {
  248. sink := new(consumertest.MetricsSink)
  249. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  250. labels := labels.FromStrings(
  251. model.InstanceLabel, "0.0.0.0:8855",
  252. model.JobLabel, "test",
  253. )
  254. _, err := tr.AppendExemplar(0, labels, exemplar.Exemplar{Value: 0})
  255. assert.Equal(t, errMetricNameNotFound, err)
  256. }
  257. func TestAppendExemplarWithEmptyMetricName(t *testing.T) {
  258. sink := new(consumertest.MetricsSink)
  259. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  260. labels := labels.FromStrings(
  261. model.InstanceLabel, "0.0.0.0:8855",
  262. model.JobLabel, "test",
  263. model.MetricNameLabel, "",
  264. )
  265. _, err := tr.AppendExemplar(0, labels, exemplar.Exemplar{Value: 0})
  266. assert.Equal(t, errMetricNameNotFound, err)
  267. }
  268. func TestAppendExemplarWithDuplicateLabels(t *testing.T) {
  269. sink := new(consumertest.MetricsSink)
  270. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  271. labels := labels.FromStrings(
  272. model.InstanceLabel, "0.0.0.0:8855",
  273. model.JobLabel, "test",
  274. model.MetricNameLabel, "",
  275. "a", "b",
  276. "a", "c",
  277. )
  278. _, err := tr.AppendExemplar(0, labels, exemplar.Exemplar{Value: 0})
  279. require.Error(t, err)
  280. assert.Contains(t, err.Error(), `invalid sample: non-unique label names: "a"`)
  281. }
  282. func TestAppendExemplarWithoutAddingMetric(t *testing.T) {
  283. sink := new(consumertest.MetricsSink)
  284. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  285. labels := labels.FromStrings(
  286. model.InstanceLabel, "0.0.0.0:8855",
  287. model.JobLabel, "test",
  288. model.MetricNameLabel, "counter_test",
  289. "a", "b",
  290. )
  291. _, err := tr.AppendExemplar(0, labels, exemplar.Exemplar{Value: 0})
  292. assert.NoError(t, err)
  293. }
  294. func TestAppendExemplarWithNoLabels(t *testing.T) {
  295. sink := new(consumertest.MetricsSink)
  296. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  297. _, err := tr.AppendExemplar(0, nil, exemplar.Exemplar{Value: 0})
  298. assert.Equal(t, errNoJobInstance, err)
  299. }
  300. func TestAppendExemplarWithEmptyLabelArray(t *testing.T) {
  301. sink := new(consumertest.MetricsSink)
  302. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  303. _, err := tr.AppendExemplar(0, []labels.Label{}, exemplar.Exemplar{Value: 0})
  304. assert.Equal(t, errNoJobInstance, err)
  305. }
  306. func nopObsRecv(t *testing.T) *receiverhelper.ObsReport {
  307. obsrecv, err := receiverhelper.NewObsReport(receiverhelper.ObsReportSettings{
  308. ReceiverID: component.NewID("prometheus"),
  309. Transport: transport,
  310. ReceiverCreateSettings: receivertest.NewNopCreateSettings(),
  311. })
  312. require.NoError(t, err)
  313. return obsrecv
  314. }
  315. func TestMetricBuilderCounters(t *testing.T) {
  316. tests := []buildTestData{
  317. {
  318. name: "single-item",
  319. inputs: []*testScrapedPage{
  320. {
  321. pts: []*testDataPoint{
  322. createDataPoint("counter_test", 100, nil, "foo", "bar"),
  323. },
  324. },
  325. },
  326. wants: func() []pmetric.Metrics {
  327. md0 := pmetric.NewMetrics()
  328. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  329. m0 := mL0.AppendEmpty()
  330. m0.SetName("counter_test")
  331. sum := m0.SetEmptySum()
  332. sum.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  333. sum.SetIsMonotonic(true)
  334. pt0 := sum.DataPoints().AppendEmpty()
  335. pt0.SetDoubleValue(100.0)
  336. pt0.SetStartTimestamp(startTimestamp)
  337. pt0.SetTimestamp(tsNanos)
  338. pt0.Attributes().PutStr("foo", "bar")
  339. return []pmetric.Metrics{md0}
  340. },
  341. },
  342. {
  343. name: "single-item-with-exemplars",
  344. inputs: []*testScrapedPage{
  345. {
  346. pts: []*testDataPoint{
  347. createDataPoint(
  348. "counter_test",
  349. 100,
  350. []exemplar.Exemplar{
  351. {
  352. Value: 1,
  353. Ts: 1663113420863,
  354. Labels: []labels.Label{{Name: model.MetricNameLabel, Value: "counter_test"}, {Name: model.JobLabel, Value: "job"}, {Name: model.InstanceLabel, Value: "instance"}, {Name: "foo", Value: "bar"}},
  355. },
  356. {
  357. Value: 1,
  358. Ts: 1663113420863,
  359. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: ""}, {Name: "span_id", Value: ""}},
  360. },
  361. {
  362. Value: 1,
  363. Ts: 1663113420863,
  364. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: "10a47365b8aa04e08291fab9deca84db6170"}, {Name: "span_id", Value: "719cee4a669fd7d109ff"}},
  365. },
  366. {
  367. Value: 1,
  368. Ts: 1663113420863,
  369. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: "174137cab66dc880"}, {Name: "span_id", Value: "dfa4597a9d"}},
  370. },
  371. },
  372. "foo", "bar"),
  373. },
  374. },
  375. },
  376. wants: func() []pmetric.Metrics {
  377. md0 := pmetric.NewMetrics()
  378. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  379. m0 := mL0.AppendEmpty()
  380. m0.SetName("counter_test")
  381. sum := m0.SetEmptySum()
  382. sum.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  383. sum.SetIsMonotonic(true)
  384. pt0 := sum.DataPoints().AppendEmpty()
  385. pt0.SetDoubleValue(100.0)
  386. pt0.SetStartTimestamp(startTimestamp)
  387. pt0.SetTimestamp(tsNanos)
  388. pt0.Attributes().PutStr("foo", "bar")
  389. e0 := pt0.Exemplars().AppendEmpty()
  390. e0.SetTimestamp(timestampFromMs(1663113420863))
  391. e0.SetDoubleValue(1)
  392. e0.FilteredAttributes().PutStr(model.MetricNameLabel, "counter_test")
  393. e0.FilteredAttributes().PutStr(model.JobLabel, "job")
  394. e0.FilteredAttributes().PutStr(model.InstanceLabel, "instance")
  395. e0.FilteredAttributes().PutStr("foo", "bar")
  396. e1 := pt0.Exemplars().AppendEmpty()
  397. e1.SetTimestamp(timestampFromMs(1663113420863))
  398. e1.SetDoubleValue(1)
  399. e1.FilteredAttributes().PutStr("foo", "bar")
  400. e2 := pt0.Exemplars().AppendEmpty()
  401. e2.SetTimestamp(timestampFromMs(1663113420863))
  402. e2.SetDoubleValue(1)
  403. e2.FilteredAttributes().PutStr("foo", "bar")
  404. e2.SetTraceID([16]byte{0x10, 0xa4, 0x73, 0x65, 0xb8, 0xaa, 0x04, 0xe0, 0x82, 0x91, 0xfa, 0xb9, 0xde, 0xca, 0x84, 0xdb})
  405. e2.SetSpanID([8]byte{0x71, 0x9c, 0xee, 0x4a, 0x66, 0x9f, 0xd7, 0xd1})
  406. e3 := pt0.Exemplars().AppendEmpty()
  407. e3.SetTimestamp(timestampFromMs(1663113420863))
  408. e3.SetDoubleValue(1)
  409. e3.FilteredAttributes().PutStr("foo", "bar")
  410. e3.SetTraceID([16]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x41, 0x37, 0xca, 0xb6, 0x6d, 0xc8, 0x80})
  411. e3.SetSpanID([8]byte{0x00, 0x00, 0x00, 0xdf, 0xa4, 0x59, 0x7a, 0x9d})
  412. return []pmetric.Metrics{md0}
  413. },
  414. },
  415. {
  416. name: "two-items",
  417. inputs: []*testScrapedPage{
  418. {
  419. pts: []*testDataPoint{
  420. createDataPoint("counter_test", 150, nil, "foo", "bar"),
  421. createDataPoint("counter_test", 25, nil, "foo", "other"),
  422. },
  423. },
  424. },
  425. wants: func() []pmetric.Metrics {
  426. md0 := pmetric.NewMetrics()
  427. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  428. m0 := mL0.AppendEmpty()
  429. m0.SetName("counter_test")
  430. sum := m0.SetEmptySum()
  431. sum.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  432. sum.SetIsMonotonic(true)
  433. pt0 := sum.DataPoints().AppendEmpty()
  434. pt0.SetDoubleValue(150.0)
  435. pt0.SetStartTimestamp(startTimestamp)
  436. pt0.SetTimestamp(tsNanos)
  437. pt0.Attributes().PutStr("foo", "bar")
  438. pt1 := sum.DataPoints().AppendEmpty()
  439. pt1.SetDoubleValue(25.0)
  440. pt1.SetStartTimestamp(startTimestamp)
  441. pt1.SetTimestamp(tsNanos)
  442. pt1.Attributes().PutStr("foo", "other")
  443. return []pmetric.Metrics{md0}
  444. },
  445. },
  446. {
  447. name: "two-metrics",
  448. inputs: []*testScrapedPage{
  449. {
  450. pts: []*testDataPoint{
  451. createDataPoint("counter_test", 150, nil, "foo", "bar"),
  452. createDataPoint("counter_test", 25, nil, "foo", "other"),
  453. createDataPoint("counter_test2", 100, nil, "foo", "bar"),
  454. },
  455. },
  456. },
  457. wants: func() []pmetric.Metrics {
  458. md0 := pmetric.NewMetrics()
  459. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  460. m0 := mL0.AppendEmpty()
  461. m0.SetName("counter_test")
  462. sum0 := m0.SetEmptySum()
  463. sum0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  464. sum0.SetIsMonotonic(true)
  465. pt0 := sum0.DataPoints().AppendEmpty()
  466. pt0.SetDoubleValue(150.0)
  467. pt0.SetStartTimestamp(startTimestamp)
  468. pt0.SetTimestamp(tsNanos)
  469. pt0.Attributes().PutStr("foo", "bar")
  470. pt1 := sum0.DataPoints().AppendEmpty()
  471. pt1.SetDoubleValue(25.0)
  472. pt1.SetStartTimestamp(startTimestamp)
  473. pt1.SetTimestamp(tsNanos)
  474. pt1.Attributes().PutStr("foo", "other")
  475. m1 := mL0.AppendEmpty()
  476. m1.SetName("counter_test2")
  477. sum1 := m1.SetEmptySum()
  478. sum1.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  479. sum1.SetIsMonotonic(true)
  480. pt2 := sum1.DataPoints().AppendEmpty()
  481. pt2.SetDoubleValue(100.0)
  482. pt2.SetStartTimestamp(startTimestamp)
  483. pt2.SetTimestamp(tsNanos)
  484. pt2.Attributes().PutStr("foo", "bar")
  485. return []pmetric.Metrics{md0}
  486. },
  487. },
  488. {
  489. name: "metrics-with-poor-names",
  490. inputs: []*testScrapedPage{
  491. {
  492. pts: []*testDataPoint{
  493. createDataPoint("poor_name_count", 100, nil, "foo", "bar"),
  494. },
  495. },
  496. },
  497. wants: func() []pmetric.Metrics {
  498. md0 := pmetric.NewMetrics()
  499. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  500. m0 := mL0.AppendEmpty()
  501. m0.SetName("poor_name_count")
  502. sum := m0.SetEmptySum()
  503. sum.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  504. sum.SetIsMonotonic(true)
  505. pt0 := sum.DataPoints().AppendEmpty()
  506. pt0.SetDoubleValue(100.0)
  507. pt0.SetStartTimestamp(startTimestamp)
  508. pt0.SetTimestamp(tsNanos)
  509. pt0.Attributes().PutStr("foo", "bar")
  510. return []pmetric.Metrics{md0}
  511. },
  512. },
  513. }
  514. for _, tt := range tests {
  515. t.Run(tt.name, func(t *testing.T) {
  516. tt.run(t)
  517. })
  518. }
  519. }
  520. func TestMetricBuilderGauges(t *testing.T) {
  521. tests := []buildTestData{
  522. {
  523. name: "one-gauge",
  524. inputs: []*testScrapedPage{
  525. {
  526. pts: []*testDataPoint{
  527. createDataPoint("gauge_test", 100, nil, "foo", "bar"),
  528. },
  529. },
  530. {
  531. pts: []*testDataPoint{
  532. createDataPoint("gauge_test", 90, nil, "foo", "bar"),
  533. },
  534. },
  535. },
  536. wants: func() []pmetric.Metrics {
  537. md0 := pmetric.NewMetrics()
  538. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  539. m0 := mL0.AppendEmpty()
  540. m0.SetName("gauge_test")
  541. gauge0 := m0.SetEmptyGauge()
  542. pt0 := gauge0.DataPoints().AppendEmpty()
  543. pt0.SetDoubleValue(100.0)
  544. pt0.SetStartTimestamp(0)
  545. pt0.SetTimestamp(tsNanos)
  546. pt0.Attributes().PutStr("foo", "bar")
  547. md1 := pmetric.NewMetrics()
  548. mL1 := md1.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  549. m1 := mL1.AppendEmpty()
  550. m1.SetName("gauge_test")
  551. gauge1 := m1.SetEmptyGauge()
  552. pt1 := gauge1.DataPoints().AppendEmpty()
  553. pt1.SetDoubleValue(90.0)
  554. pt1.SetStartTimestamp(0)
  555. pt1.SetTimestamp(tsPlusIntervalNanos)
  556. pt1.Attributes().PutStr("foo", "bar")
  557. return []pmetric.Metrics{md0, md1}
  558. },
  559. },
  560. {
  561. name: "one-gauge-with-exemplars",
  562. inputs: []*testScrapedPage{
  563. {
  564. pts: []*testDataPoint{
  565. createDataPoint(
  566. "gauge_test",
  567. 100,
  568. []exemplar.Exemplar{
  569. {
  570. Value: 2,
  571. Ts: 1663350815890,
  572. Labels: []labels.Label{{Name: model.MetricNameLabel, Value: "counter_test"}, {Name: model.JobLabel, Value: "job"}, {Name: model.InstanceLabel, Value: "instance"}, {Name: "foo", Value: "bar"}},
  573. },
  574. {
  575. Value: 2,
  576. Ts: 1663350815890,
  577. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: ""}, {Name: "span_id", Value: ""}},
  578. },
  579. {
  580. Value: 2,
  581. Ts: 1663350815890,
  582. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: "10a47365b8aa04e08291fab9deca84db6170"}, {Name: "span_id", Value: "719cee4a669fd7d109ff"}},
  583. },
  584. {
  585. Value: 2,
  586. Ts: 1663350815890,
  587. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: "174137cab66dc880"}, {Name: "span_id", Value: "dfa4597a9d"}},
  588. },
  589. },
  590. "foo", "bar"),
  591. },
  592. },
  593. {
  594. pts: []*testDataPoint{
  595. createDataPoint("gauge_test", 90, nil, "foo", "bar"),
  596. },
  597. },
  598. },
  599. wants: func() []pmetric.Metrics {
  600. md0 := pmetric.NewMetrics()
  601. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  602. m0 := mL0.AppendEmpty()
  603. m0.SetName("gauge_test")
  604. gauge0 := m0.SetEmptyGauge()
  605. pt0 := gauge0.DataPoints().AppendEmpty()
  606. pt0.SetDoubleValue(100.0)
  607. pt0.SetStartTimestamp(0)
  608. pt0.SetTimestamp(tsNanos)
  609. pt0.Attributes().PutStr("foo", "bar")
  610. e0 := pt0.Exemplars().AppendEmpty()
  611. e0.SetTimestamp(timestampFromMs(1663350815890))
  612. e0.SetDoubleValue(2)
  613. e0.FilteredAttributes().PutStr(model.MetricNameLabel, "counter_test")
  614. e0.FilteredAttributes().PutStr(model.JobLabel, "job")
  615. e0.FilteredAttributes().PutStr(model.InstanceLabel, "instance")
  616. e0.FilteredAttributes().PutStr("foo", "bar")
  617. e1 := pt0.Exemplars().AppendEmpty()
  618. e1.SetTimestamp(timestampFromMs(1663350815890))
  619. e1.SetDoubleValue(2)
  620. e1.FilteredAttributes().PutStr("foo", "bar")
  621. e2 := pt0.Exemplars().AppendEmpty()
  622. e2.SetTimestamp(timestampFromMs(1663350815890))
  623. e2.SetDoubleValue(2)
  624. e2.FilteredAttributes().PutStr("foo", "bar")
  625. e2.SetTraceID([16]byte{0x10, 0xa4, 0x73, 0x65, 0xb8, 0xaa, 0x04, 0xe0, 0x82, 0x91, 0xfa, 0xb9, 0xde, 0xca, 0x84, 0xdb})
  626. e2.SetSpanID([8]byte{0x71, 0x9c, 0xee, 0x4a, 0x66, 0x9f, 0xd7, 0xd1})
  627. e3 := pt0.Exemplars().AppendEmpty()
  628. e3.SetTimestamp(timestampFromMs(1663350815890))
  629. e3.SetDoubleValue(2)
  630. e3.FilteredAttributes().PutStr("foo", "bar")
  631. e3.SetTraceID([16]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x41, 0x37, 0xca, 0xb6, 0x6d, 0xc8, 0x80})
  632. e3.SetSpanID([8]byte{0x00, 0x00, 0x00, 0xdf, 0xa4, 0x59, 0x7a, 0x9d})
  633. md1 := pmetric.NewMetrics()
  634. mL1 := md1.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  635. m1 := mL1.AppendEmpty()
  636. m1.SetName("gauge_test")
  637. gauge1 := m1.SetEmptyGauge()
  638. pt1 := gauge1.DataPoints().AppendEmpty()
  639. pt1.SetDoubleValue(90.0)
  640. pt1.SetStartTimestamp(0)
  641. pt1.SetTimestamp(tsPlusIntervalNanos)
  642. pt1.Attributes().PutStr("foo", "bar")
  643. return []pmetric.Metrics{md0, md1}
  644. },
  645. },
  646. {
  647. name: "gauge-with-different-tags",
  648. inputs: []*testScrapedPage{
  649. {
  650. pts: []*testDataPoint{
  651. createDataPoint("gauge_test", 100, nil, "foo", "bar"),
  652. createDataPoint("gauge_test", 200, nil, "bar", "foo"),
  653. },
  654. },
  655. },
  656. wants: func() []pmetric.Metrics {
  657. md0 := pmetric.NewMetrics()
  658. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  659. m0 := mL0.AppendEmpty()
  660. m0.SetName("gauge_test")
  661. gauge0 := m0.SetEmptyGauge()
  662. pt0 := gauge0.DataPoints().AppendEmpty()
  663. pt0.SetDoubleValue(100.0)
  664. pt0.SetStartTimestamp(0)
  665. pt0.SetTimestamp(tsNanos)
  666. pt0.Attributes().PutStr("foo", "bar")
  667. pt1 := gauge0.DataPoints().AppendEmpty()
  668. pt1.SetDoubleValue(200.0)
  669. pt1.SetStartTimestamp(0)
  670. pt1.SetTimestamp(tsNanos)
  671. pt1.Attributes().PutStr("bar", "foo")
  672. return []pmetric.Metrics{md0}
  673. },
  674. },
  675. {
  676. // TODO: A decision need to be made. If we want to have the behavior which can generate different tag key
  677. // sets because metrics come and go
  678. name: "gauge-comes-and-go-with-different-tagset",
  679. inputs: []*testScrapedPage{
  680. {
  681. pts: []*testDataPoint{
  682. createDataPoint("gauge_test", 100, nil, "foo", "bar"),
  683. createDataPoint("gauge_test", 200, nil, "bar", "foo"),
  684. },
  685. },
  686. {
  687. pts: []*testDataPoint{
  688. createDataPoint("gauge_test", 20, nil, "foo", "bar"),
  689. },
  690. },
  691. },
  692. wants: func() []pmetric.Metrics {
  693. md0 := pmetric.NewMetrics()
  694. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  695. m0 := mL0.AppendEmpty()
  696. m0.SetName("gauge_test")
  697. gauge0 := m0.SetEmptyGauge()
  698. pt0 := gauge0.DataPoints().AppendEmpty()
  699. pt0.SetDoubleValue(100.0)
  700. pt0.SetStartTimestamp(0)
  701. pt0.SetTimestamp(tsNanos)
  702. pt0.Attributes().PutStr("foo", "bar")
  703. pt1 := gauge0.DataPoints().AppendEmpty()
  704. pt1.SetDoubleValue(200.0)
  705. pt1.SetStartTimestamp(0)
  706. pt1.SetTimestamp(tsNanos)
  707. pt1.Attributes().PutStr("bar", "foo")
  708. md1 := pmetric.NewMetrics()
  709. mL1 := md1.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  710. m1 := mL1.AppendEmpty()
  711. m1.SetName("gauge_test")
  712. gauge1 := m1.SetEmptyGauge()
  713. pt2 := gauge1.DataPoints().AppendEmpty()
  714. pt2.SetDoubleValue(20.0)
  715. pt2.SetStartTimestamp(0)
  716. pt2.SetTimestamp(tsPlusIntervalNanos)
  717. pt2.Attributes().PutStr("foo", "bar")
  718. return []pmetric.Metrics{md0, md1}
  719. },
  720. },
  721. }
  722. for _, tt := range tests {
  723. t.Run(tt.name, func(t *testing.T) {
  724. tt.run(t)
  725. })
  726. }
  727. }
  728. func TestMetricBuilderUntyped(t *testing.T) {
  729. tests := []buildTestData{
  730. {
  731. name: "one-unknown",
  732. inputs: []*testScrapedPage{
  733. {
  734. pts: []*testDataPoint{
  735. createDataPoint("unknown_test", 100, nil, "foo", "bar"),
  736. },
  737. },
  738. },
  739. wants: func() []pmetric.Metrics {
  740. md0 := pmetric.NewMetrics()
  741. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  742. m0 := mL0.AppendEmpty()
  743. m0.SetName("unknown_test")
  744. gauge0 := m0.SetEmptyGauge()
  745. pt0 := gauge0.DataPoints().AppendEmpty()
  746. pt0.SetDoubleValue(100.0)
  747. pt0.SetStartTimestamp(0)
  748. pt0.SetTimestamp(tsNanos)
  749. pt0.Attributes().PutStr("foo", "bar")
  750. return []pmetric.Metrics{md0}
  751. },
  752. },
  753. {
  754. name: "no-type-hint",
  755. inputs: []*testScrapedPage{
  756. {
  757. pts: []*testDataPoint{
  758. createDataPoint("something_not_exists", 100, nil, "foo", "bar"),
  759. createDataPoint("theother_not_exists", 200, nil, "foo", "bar"),
  760. createDataPoint("theother_not_exists", 300, nil, "bar", "foo"),
  761. },
  762. },
  763. },
  764. wants: func() []pmetric.Metrics {
  765. md0 := pmetric.NewMetrics()
  766. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  767. m0 := mL0.AppendEmpty()
  768. m0.SetName("something_not_exists")
  769. gauge0 := m0.SetEmptyGauge()
  770. pt0 := gauge0.DataPoints().AppendEmpty()
  771. pt0.SetDoubleValue(100.0)
  772. pt0.SetTimestamp(tsNanos)
  773. pt0.Attributes().PutStr("foo", "bar")
  774. m1 := mL0.AppendEmpty()
  775. m1.SetName("theother_not_exists")
  776. gauge1 := m1.SetEmptyGauge()
  777. pt1 := gauge1.DataPoints().AppendEmpty()
  778. pt1.SetDoubleValue(200.0)
  779. pt1.SetTimestamp(tsNanos)
  780. pt1.Attributes().PutStr("foo", "bar")
  781. pt2 := gauge1.DataPoints().AppendEmpty()
  782. pt2.SetDoubleValue(300.0)
  783. pt2.SetTimestamp(tsNanos)
  784. pt2.Attributes().PutStr("bar", "foo")
  785. return []pmetric.Metrics{md0}
  786. },
  787. },
  788. {
  789. name: "untype-metric-poor-names",
  790. inputs: []*testScrapedPage{
  791. {
  792. pts: []*testDataPoint{
  793. createDataPoint("some_count", 100, nil, "foo", "bar"),
  794. },
  795. },
  796. },
  797. wants: func() []pmetric.Metrics {
  798. md0 := pmetric.NewMetrics()
  799. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  800. m0 := mL0.AppendEmpty()
  801. m0.SetName("some_count")
  802. gauge0 := m0.SetEmptyGauge()
  803. pt0 := gauge0.DataPoints().AppendEmpty()
  804. pt0.SetDoubleValue(100.0)
  805. pt0.SetTimestamp(tsNanos)
  806. pt0.Attributes().PutStr("foo", "bar")
  807. return []pmetric.Metrics{md0}
  808. },
  809. },
  810. }
  811. for _, tt := range tests {
  812. t.Run(tt.name, func(t *testing.T) {
  813. tt.run(t)
  814. })
  815. }
  816. }
  817. func TestMetricBuilderHistogram(t *testing.T) {
  818. tests := []buildTestData{
  819. {
  820. name: "single item",
  821. inputs: []*testScrapedPage{
  822. {
  823. pts: []*testDataPoint{
  824. createDataPoint("hist_test_bucket", 1, nil, "foo", "bar", "le", "10"),
  825. createDataPoint("hist_test_bucket", 2, nil, "foo", "bar", "le", "20"),
  826. createDataPoint("hist_test_bucket", 10, nil, "foo", "bar", "le", "+inf"),
  827. createDataPoint("hist_test_sum", 99, nil, "foo", "bar"),
  828. createDataPoint("hist_test_count", 10, nil, "foo", "bar"),
  829. },
  830. },
  831. },
  832. wants: func() []pmetric.Metrics {
  833. md0 := pmetric.NewMetrics()
  834. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  835. m0 := mL0.AppendEmpty()
  836. m0.SetName("hist_test")
  837. hist0 := m0.SetEmptyHistogram()
  838. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  839. pt0 := hist0.DataPoints().AppendEmpty()
  840. pt0.SetCount(10)
  841. pt0.SetSum(99)
  842. pt0.ExplicitBounds().FromRaw([]float64{10, 20})
  843. pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})
  844. pt0.SetTimestamp(tsNanos)
  845. pt0.SetStartTimestamp(startTimestamp)
  846. pt0.Attributes().PutStr("foo", "bar")
  847. return []pmetric.Metrics{md0}
  848. },
  849. },
  850. {
  851. name: "single item with exemplars",
  852. inputs: []*testScrapedPage{
  853. {
  854. pts: []*testDataPoint{
  855. createDataPoint(
  856. "hist_test_bucket",
  857. 1,
  858. []exemplar.Exemplar{
  859. {
  860. Value: 1,
  861. Ts: 1663113420863,
  862. Labels: []labels.Label{{Name: model.MetricNameLabel, Value: "counter_test"}, {Name: model.JobLabel, Value: "job"}, {Name: model.InstanceLabel, Value: "instance"}, {Name: "foo", Value: "bar"}},
  863. },
  864. {
  865. Value: 1,
  866. Ts: 1663113420863,
  867. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: ""}, {Name: "span_id", Value: ""}, {Name: "le", Value: "20"}},
  868. },
  869. {
  870. Value: 1,
  871. Ts: 1663113420863,
  872. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: "10a47365b8aa04e08291fab9deca84db6170"}, {Name: "traceid", Value: "e3688e1aa2961786"}, {Name: "span_id", Value: "719cee4a669fd7d109ff"}},
  873. },
  874. {
  875. Value: 1,
  876. Ts: 1663113420863,
  877. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: "174137cab66dc880"}, {Name: "span_id", Value: "dfa4597a9d"}},
  878. },
  879. {
  880. Value: 1,
  881. Ts: 1663113420863,
  882. Labels: []labels.Label{{Name: "foo", Value: "bar"}, {Name: "trace_id", Value: "174137cab66dc88"}, {Name: "span_id", Value: "dfa4597a9"}},
  883. },
  884. },
  885. "foo", "bar", "le", "10"),
  886. createDataPoint("hist_test_bucket", 2, nil, "foo", "bar", "le", "20"),
  887. createDataPoint("hist_test_bucket", 10, nil, "foo", "bar", "le", "+inf"),
  888. createDataPoint("hist_test_sum", 99, nil, "foo", "bar"),
  889. createDataPoint("hist_test_count", 10, nil, "foo", "bar"),
  890. },
  891. },
  892. },
  893. wants: func() []pmetric.Metrics {
  894. md0 := pmetric.NewMetrics()
  895. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  896. m0 := mL0.AppendEmpty()
  897. m0.SetName("hist_test")
  898. hist0 := m0.SetEmptyHistogram()
  899. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  900. pt0 := hist0.DataPoints().AppendEmpty()
  901. pt0.SetCount(10)
  902. pt0.SetSum(99)
  903. pt0.ExplicitBounds().FromRaw([]float64{10, 20})
  904. pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})
  905. pt0.SetTimestamp(tsNanos)
  906. pt0.SetStartTimestamp(startTimestamp)
  907. pt0.Attributes().PutStr("foo", "bar")
  908. e0 := pt0.Exemplars().AppendEmpty()
  909. e0.SetTimestamp(timestampFromMs(1663113420863))
  910. e0.SetDoubleValue(1)
  911. e0.FilteredAttributes().PutStr(model.MetricNameLabel, "counter_test")
  912. e0.FilteredAttributes().PutStr(model.JobLabel, "job")
  913. e0.FilteredAttributes().PutStr(model.InstanceLabel, "instance")
  914. e0.FilteredAttributes().PutStr("foo", "bar")
  915. e1 := pt0.Exemplars().AppendEmpty()
  916. e1.SetTimestamp(timestampFromMs(1663113420863))
  917. e1.SetDoubleValue(1)
  918. e1.FilteredAttributes().PutStr("foo", "bar")
  919. e1.FilteredAttributes().PutStr("le", "20")
  920. e2 := pt0.Exemplars().AppendEmpty()
  921. e2.SetTimestamp(timestampFromMs(1663113420863))
  922. e2.SetDoubleValue(1)
  923. e2.FilteredAttributes().PutStr("foo", "bar")
  924. e2.FilteredAttributes().PutStr("traceid", "e3688e1aa2961786")
  925. e2.SetTraceID([16]byte{0x10, 0xa4, 0x73, 0x65, 0xb8, 0xaa, 0x04, 0xe0, 0x82, 0x91, 0xfa, 0xb9, 0xde, 0xca, 0x84, 0xdb})
  926. e2.SetSpanID([8]byte{0x71, 0x9c, 0xee, 0x4a, 0x66, 0x9f, 0xd7, 0xd1})
  927. e3 := pt0.Exemplars().AppendEmpty()
  928. e3.SetTimestamp(timestampFromMs(1663113420863))
  929. e3.SetDoubleValue(1)
  930. e3.FilteredAttributes().PutStr("foo", "bar")
  931. e3.SetTraceID([16]byte{0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0x41, 0x37, 0xca, 0xb6, 0x6d, 0xc8, 0x80})
  932. e3.SetSpanID([8]byte{0x00, 0x00, 0x00, 0xdf, 0xa4, 0x59, 0x7a, 0x9d})
  933. e4 := pt0.Exemplars().AppendEmpty()
  934. e4.SetTimestamp(timestampFromMs(1663113420863))
  935. e4.SetDoubleValue(1)
  936. e4.FilteredAttributes().PutStr("foo", "bar")
  937. e4.FilteredAttributes().PutStr("trace_id", "174137cab66dc88")
  938. e4.FilteredAttributes().PutStr("span_id", "dfa4597a9")
  939. return []pmetric.Metrics{md0}
  940. },
  941. },
  942. {
  943. name: "multi-groups",
  944. inputs: []*testScrapedPage{
  945. {
  946. pts: []*testDataPoint{
  947. createDataPoint("hist_test_bucket", 1, nil, "foo", "bar", "le", "10"),
  948. createDataPoint("hist_test_bucket", 2, nil, "foo", "bar", "le", "20"),
  949. createDataPoint("hist_test_bucket", 10, nil, "foo", "bar", "le", "+inf"),
  950. createDataPoint("hist_test_sum", 99, nil, "foo", "bar"),
  951. createDataPoint("hist_test_count", 10, nil, "foo", "bar"),
  952. createDataPoint("hist_test_bucket", 1, nil, "key2", "v2", "le", "10"),
  953. createDataPoint("hist_test_bucket", 2, nil, "key2", "v2", "le", "20"),
  954. createDataPoint("hist_test_bucket", 3, nil, "key2", "v2", "le", "+inf"),
  955. createDataPoint("hist_test_sum", 50, nil, "key2", "v2"),
  956. createDataPoint("hist_test_count", 3, nil, "key2", "v2"),
  957. },
  958. },
  959. },
  960. wants: func() []pmetric.Metrics {
  961. md0 := pmetric.NewMetrics()
  962. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  963. m0 := mL0.AppendEmpty()
  964. m0.SetName("hist_test")
  965. hist0 := m0.SetEmptyHistogram()
  966. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  967. pt0 := hist0.DataPoints().AppendEmpty()
  968. pt0.SetCount(10)
  969. pt0.SetSum(99)
  970. pt0.ExplicitBounds().FromRaw([]float64{10, 20})
  971. pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})
  972. pt0.SetTimestamp(tsNanos)
  973. pt0.SetStartTimestamp(startTimestamp)
  974. pt0.Attributes().PutStr("foo", "bar")
  975. pt1 := hist0.DataPoints().AppendEmpty()
  976. pt1.SetCount(3)
  977. pt1.SetSum(50)
  978. pt1.ExplicitBounds().FromRaw([]float64{10, 20})
  979. pt1.BucketCounts().FromRaw([]uint64{1, 1, 1})
  980. pt1.SetTimestamp(tsNanos)
  981. pt1.SetStartTimestamp(startTimestamp)
  982. pt1.Attributes().PutStr("key2", "v2")
  983. return []pmetric.Metrics{md0}
  984. },
  985. },
  986. {
  987. name: "multi-groups-and-families",
  988. inputs: []*testScrapedPage{
  989. {
  990. pts: []*testDataPoint{
  991. createDataPoint("hist_test_bucket", 1, nil, "foo", "bar", "le", "10"),
  992. createDataPoint("hist_test_bucket", 2, nil, "foo", "bar", "le", "20"),
  993. createDataPoint("hist_test_bucket", 10, nil, "foo", "bar", "le", "+inf"),
  994. createDataPoint("hist_test_sum", 99, nil, "foo", "bar"),
  995. createDataPoint("hist_test_count", 10, nil, "foo", "bar"),
  996. createDataPoint("hist_test_bucket", 1, nil, "key2", "v2", "le", "10"),
  997. createDataPoint("hist_test_bucket", 2, nil, "key2", "v2", "le", "20"),
  998. createDataPoint("hist_test_bucket", 3, nil, "key2", "v2", "le", "+inf"),
  999. createDataPoint("hist_test_sum", 50, nil, "key2", "v2"),
  1000. createDataPoint("hist_test_count", 3, nil, "key2", "v2"),
  1001. createDataPoint("hist_test2_bucket", 1, nil, "foo", "bar", "le", "10"),
  1002. createDataPoint("hist_test2_bucket", 2, nil, "foo", "bar", "le", "20"),
  1003. createDataPoint("hist_test2_bucket", 3, nil, "foo", "bar", "le", "+inf"),
  1004. createDataPoint("hist_test2_sum", 50, nil, "foo", "bar"),
  1005. createDataPoint("hist_test2_count", 3, nil, "foo", "bar"),
  1006. },
  1007. },
  1008. },
  1009. wants: func() []pmetric.Metrics {
  1010. md0 := pmetric.NewMetrics()
  1011. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1012. m0 := mL0.AppendEmpty()
  1013. m0.SetName("hist_test")
  1014. hist0 := m0.SetEmptyHistogram()
  1015. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  1016. pt0 := hist0.DataPoints().AppendEmpty()
  1017. pt0.SetCount(10)
  1018. pt0.SetSum(99)
  1019. pt0.ExplicitBounds().FromRaw([]float64{10, 20})
  1020. pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})
  1021. pt0.SetTimestamp(tsNanos)
  1022. pt0.SetStartTimestamp(startTimestamp)
  1023. pt0.Attributes().PutStr("foo", "bar")
  1024. pt1 := hist0.DataPoints().AppendEmpty()
  1025. pt1.SetCount(3)
  1026. pt1.SetSum(50)
  1027. pt1.ExplicitBounds().FromRaw([]float64{10, 20})
  1028. pt1.BucketCounts().FromRaw([]uint64{1, 1, 1})
  1029. pt1.SetTimestamp(tsNanos)
  1030. pt1.SetStartTimestamp(startTimestamp)
  1031. pt1.Attributes().PutStr("key2", "v2")
  1032. m1 := mL0.AppendEmpty()
  1033. m1.SetName("hist_test2")
  1034. hist1 := m1.SetEmptyHistogram()
  1035. hist1.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  1036. pt2 := hist1.DataPoints().AppendEmpty()
  1037. pt2.SetCount(3)
  1038. pt2.SetSum(50)
  1039. pt2.ExplicitBounds().FromRaw([]float64{10, 20})
  1040. pt2.BucketCounts().FromRaw([]uint64{1, 1, 1})
  1041. pt2.SetTimestamp(tsNanos)
  1042. pt2.SetStartTimestamp(startTimestamp)
  1043. pt2.Attributes().PutStr("foo", "bar")
  1044. return []pmetric.Metrics{md0}
  1045. },
  1046. },
  1047. {
  1048. name: "unordered-buckets",
  1049. inputs: []*testScrapedPage{
  1050. {
  1051. pts: []*testDataPoint{
  1052. createDataPoint("hist_test_bucket", 10, nil, "foo", "bar", "le", "+inf"),
  1053. createDataPoint("hist_test_bucket", 1, nil, "foo", "bar", "le", "10"),
  1054. createDataPoint("hist_test_bucket", 2, nil, "foo", "bar", "le", "20"),
  1055. createDataPoint("hist_test_sum", 99, nil, "foo", "bar"),
  1056. createDataPoint("hist_test_count", 10, nil, "foo", "bar"),
  1057. },
  1058. },
  1059. },
  1060. wants: func() []pmetric.Metrics {
  1061. md0 := pmetric.NewMetrics()
  1062. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1063. m0 := mL0.AppendEmpty()
  1064. m0.SetName("hist_test")
  1065. hist0 := m0.SetEmptyHistogram()
  1066. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  1067. pt0 := hist0.DataPoints().AppendEmpty()
  1068. pt0.SetCount(10)
  1069. pt0.SetSum(99)
  1070. pt0.ExplicitBounds().FromRaw([]float64{10, 20})
  1071. pt0.BucketCounts().FromRaw([]uint64{1, 1, 8})
  1072. pt0.SetTimestamp(tsNanos)
  1073. pt0.SetStartTimestamp(startTimestamp)
  1074. pt0.Attributes().PutStr("foo", "bar")
  1075. return []pmetric.Metrics{md0}
  1076. },
  1077. },
  1078. {
  1079. // this won't likely happen in real env, as prometheus wont generate histogram with less than 3 buckets
  1080. name: "only-one-bucket",
  1081. inputs: []*testScrapedPage{
  1082. {
  1083. pts: []*testDataPoint{
  1084. createDataPoint("hist_test_bucket", 3, nil, "foo", "bar", "le", "+inf"),
  1085. createDataPoint("hist_test_count", 3, nil, "foo", "bar"),
  1086. createDataPoint("hist_test_sum", 100, nil, "foo", "bar"),
  1087. },
  1088. },
  1089. },
  1090. wants: func() []pmetric.Metrics {
  1091. md0 := pmetric.NewMetrics()
  1092. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1093. m0 := mL0.AppendEmpty()
  1094. m0.SetName("hist_test")
  1095. hist0 := m0.SetEmptyHistogram()
  1096. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  1097. pt0 := hist0.DataPoints().AppendEmpty()
  1098. pt0.SetCount(3)
  1099. pt0.SetSum(100)
  1100. pt0.BucketCounts().FromRaw([]uint64{3})
  1101. pt0.SetTimestamp(tsNanos)
  1102. pt0.SetStartTimestamp(startTimestamp)
  1103. pt0.Attributes().PutStr("foo", "bar")
  1104. return []pmetric.Metrics{md0}
  1105. },
  1106. },
  1107. {
  1108. // this won't likely happen in real env, as prometheus wont generate histogram with less than 3 buckets
  1109. name: "only-one-bucket-noninf",
  1110. inputs: []*testScrapedPage{
  1111. {
  1112. pts: []*testDataPoint{
  1113. createDataPoint("hist_test_bucket", 3, nil, "foo", "bar", "le", "20"),
  1114. createDataPoint("hist_test_count", 3, nil, "foo", "bar"),
  1115. createDataPoint("hist_test_sum", 100, nil, "foo", "bar"),
  1116. },
  1117. },
  1118. },
  1119. wants: func() []pmetric.Metrics {
  1120. md0 := pmetric.NewMetrics()
  1121. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1122. m0 := mL0.AppendEmpty()
  1123. m0.SetName("hist_test")
  1124. hist0 := m0.SetEmptyHistogram()
  1125. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  1126. pt0 := hist0.DataPoints().AppendEmpty()
  1127. pt0.SetCount(3)
  1128. pt0.SetSum(100)
  1129. pt0.BucketCounts().FromRaw([]uint64{3, 0})
  1130. pt0.ExplicitBounds().FromRaw([]float64{20})
  1131. pt0.SetTimestamp(tsNanos)
  1132. pt0.SetStartTimestamp(startTimestamp)
  1133. pt0.Attributes().PutStr("foo", "bar")
  1134. return []pmetric.Metrics{md0}
  1135. },
  1136. },
  1137. {
  1138. name: "no-sum",
  1139. inputs: []*testScrapedPage{
  1140. {
  1141. pts: []*testDataPoint{
  1142. createDataPoint("hist_test_bucket", 1, nil, "foo", "bar", "le", "10"),
  1143. createDataPoint("hist_test_bucket", 2, nil, "foo", "bar", "le", "20"),
  1144. createDataPoint("hist_test_bucket", 3, nil, "foo", "bar", "le", "+inf"),
  1145. createDataPoint("hist_test_count", 3, nil, "foo", "bar"),
  1146. },
  1147. },
  1148. },
  1149. wants: func() []pmetric.Metrics {
  1150. md0 := pmetric.NewMetrics()
  1151. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1152. m0 := mL0.AppendEmpty()
  1153. m0.SetName("hist_test")
  1154. hist0 := m0.SetEmptyHistogram()
  1155. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  1156. pt0 := hist0.DataPoints().AppendEmpty()
  1157. pt0.SetCount(3)
  1158. pt0.ExplicitBounds().FromRaw([]float64{10, 20})
  1159. pt0.BucketCounts().FromRaw([]uint64{1, 1, 1})
  1160. pt0.SetTimestamp(tsNanos)
  1161. pt0.SetStartTimestamp(startTimestamp)
  1162. pt0.Attributes().PutStr("foo", "bar")
  1163. return []pmetric.Metrics{md0}
  1164. },
  1165. },
  1166. {
  1167. name: "corrupted-no-buckets",
  1168. inputs: []*testScrapedPage{
  1169. {
  1170. pts: []*testDataPoint{
  1171. createDataPoint("hist_test_sum", 99, nil, "foo", "bar"),
  1172. createDataPoint("hist_test_count", 10, nil, "foo", "bar"),
  1173. },
  1174. },
  1175. },
  1176. wants: func() []pmetric.Metrics {
  1177. md0 := pmetric.NewMetrics()
  1178. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1179. m0 := mL0.AppendEmpty()
  1180. m0.SetName("hist_test")
  1181. hist0 := m0.SetEmptyHistogram()
  1182. hist0.SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
  1183. pt0 := hist0.DataPoints().AppendEmpty()
  1184. pt0.SetCount(10)
  1185. pt0.SetSum(99)
  1186. pt0.BucketCounts().FromRaw([]uint64{10})
  1187. pt0.SetTimestamp(tsNanos)
  1188. pt0.SetStartTimestamp(startTimestamp)
  1189. pt0.Attributes().PutStr("foo", "bar")
  1190. return []pmetric.Metrics{md0}
  1191. },
  1192. },
  1193. {
  1194. name: "corrupted-no-count",
  1195. inputs: []*testScrapedPage{
  1196. {
  1197. pts: []*testDataPoint{
  1198. createDataPoint("hist_test_bucket", 1, nil, "foo", "bar", "le", "10"),
  1199. createDataPoint("hist_test_bucket", 2, nil, "foo", "bar", "le", "20"),
  1200. createDataPoint("hist_test_bucket", 3, nil, "foo", "bar", "le", "+inf"),
  1201. createDataPoint("hist_test_sum", 99, nil, "foo", "bar"),
  1202. },
  1203. },
  1204. },
  1205. wants: func() []pmetric.Metrics {
  1206. return []pmetric.Metrics{pmetric.NewMetrics()}
  1207. },
  1208. },
  1209. }
  1210. for _, tt := range tests {
  1211. t.Run(tt.name, func(t *testing.T) {
  1212. tt.run(t)
  1213. })
  1214. }
  1215. }
  1216. func TestMetricBuilderSummary(t *testing.T) {
  1217. tests := []buildTestData{
  1218. {
  1219. name: "no-sum-and-count",
  1220. inputs: []*testScrapedPage{
  1221. {
  1222. pts: []*testDataPoint{
  1223. createDataPoint("summary_test", 5, nil, "foo", "bar", "quantile", "1"),
  1224. },
  1225. },
  1226. },
  1227. wants: func() []pmetric.Metrics {
  1228. return []pmetric.Metrics{pmetric.NewMetrics()}
  1229. },
  1230. },
  1231. {
  1232. name: "no-count",
  1233. inputs: []*testScrapedPage{
  1234. {
  1235. pts: []*testDataPoint{
  1236. createDataPoint("summary_test", 1, nil, "foo", "bar", "quantile", "0.5"),
  1237. createDataPoint("summary_test", 2, nil, "foo", "bar", "quantile", "0.75"),
  1238. createDataPoint("summary_test", 5, nil, "foo", "bar", "quantile", "1"),
  1239. createDataPoint("summary_test_sum", 500, nil, "foo", "bar"),
  1240. },
  1241. },
  1242. },
  1243. wants: func() []pmetric.Metrics {
  1244. return []pmetric.Metrics{pmetric.NewMetrics()}
  1245. },
  1246. },
  1247. {
  1248. name: "no-sum",
  1249. inputs: []*testScrapedPage{
  1250. {
  1251. pts: []*testDataPoint{
  1252. createDataPoint("summary_test", 1, nil, "foo", "bar", "quantile", "0.5"),
  1253. createDataPoint("summary_test", 2, nil, "foo", "bar", "quantile", "0.75"),
  1254. createDataPoint("summary_test", 5, nil, "foo", "bar", "quantile", "1"),
  1255. createDataPoint("summary_test_count", 500, nil, "foo", "bar"),
  1256. },
  1257. },
  1258. },
  1259. wants: func() []pmetric.Metrics {
  1260. md0 := pmetric.NewMetrics()
  1261. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1262. m0 := mL0.AppendEmpty()
  1263. m0.SetName("summary_test")
  1264. sum0 := m0.SetEmptySummary()
  1265. pt0 := sum0.DataPoints().AppendEmpty()
  1266. pt0.SetTimestamp(tsNanos)
  1267. pt0.SetStartTimestamp(startTimestamp)
  1268. pt0.SetCount(500)
  1269. pt0.SetSum(0.0)
  1270. pt0.Attributes().PutStr("foo", "bar")
  1271. qvL := pt0.QuantileValues()
  1272. q50 := qvL.AppendEmpty()
  1273. q50.SetQuantile(.50)
  1274. q50.SetValue(1.0)
  1275. q75 := qvL.AppendEmpty()
  1276. q75.SetQuantile(.75)
  1277. q75.SetValue(2.0)
  1278. q100 := qvL.AppendEmpty()
  1279. q100.SetQuantile(1)
  1280. q100.SetValue(5.0)
  1281. return []pmetric.Metrics{md0}
  1282. },
  1283. },
  1284. {
  1285. name: "empty-quantiles",
  1286. inputs: []*testScrapedPage{
  1287. {
  1288. pts: []*testDataPoint{
  1289. createDataPoint("summary_test_sum", 100, nil, "foo", "bar"),
  1290. createDataPoint("summary_test_count", 500, nil, "foo", "bar"),
  1291. },
  1292. },
  1293. },
  1294. wants: func() []pmetric.Metrics {
  1295. md0 := pmetric.NewMetrics()
  1296. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1297. m0 := mL0.AppendEmpty()
  1298. m0.SetName("summary_test")
  1299. sum0 := m0.SetEmptySummary()
  1300. pt0 := sum0.DataPoints().AppendEmpty()
  1301. pt0.SetStartTimestamp(startTimestamp)
  1302. pt0.SetTimestamp(tsNanos)
  1303. pt0.SetCount(500)
  1304. pt0.SetSum(100.0)
  1305. pt0.Attributes().PutStr("foo", "bar")
  1306. return []pmetric.Metrics{md0}
  1307. },
  1308. },
  1309. {
  1310. name: "regular-summary",
  1311. inputs: []*testScrapedPage{
  1312. {
  1313. pts: []*testDataPoint{
  1314. createDataPoint("summary_test", 1, nil, "foo", "bar", "quantile", "0.5"),
  1315. createDataPoint("summary_test", 2, nil, "foo", "bar", "quantile", "0.75"),
  1316. createDataPoint("summary_test", 5, nil, "foo", "bar", "quantile", "1"),
  1317. createDataPoint("summary_test_sum", 100, nil, "foo", "bar"),
  1318. createDataPoint("summary_test_count", 500, nil, "foo", "bar"),
  1319. },
  1320. },
  1321. },
  1322. wants: func() []pmetric.Metrics {
  1323. md0 := pmetric.NewMetrics()
  1324. mL0 := md0.ResourceMetrics().AppendEmpty().ScopeMetrics().AppendEmpty().Metrics()
  1325. m0 := mL0.AppendEmpty()
  1326. m0.SetName("summary_test")
  1327. sum0 := m0.SetEmptySummary()
  1328. pt0 := sum0.DataPoints().AppendEmpty()
  1329. pt0.SetStartTimestamp(startTimestamp)
  1330. pt0.SetTimestamp(tsNanos)
  1331. pt0.SetCount(500)
  1332. pt0.SetSum(100.0)
  1333. pt0.Attributes().PutStr("foo", "bar")
  1334. qvL := pt0.QuantileValues()
  1335. q50 := qvL.AppendEmpty()
  1336. q50.SetQuantile(.50)
  1337. q50.SetValue(1.0)
  1338. q75 := qvL.AppendEmpty()
  1339. q75.SetQuantile(.75)
  1340. q75.SetValue(2.0)
  1341. q100 := qvL.AppendEmpty()
  1342. q100.SetQuantile(1)
  1343. q100.SetValue(5.0)
  1344. return []pmetric.Metrics{md0}
  1345. },
  1346. },
  1347. }
  1348. for _, tt := range tests {
  1349. t.Run(tt.name, func(t *testing.T) {
  1350. tt.run(t)
  1351. })
  1352. }
  1353. }
  1354. type buildTestData struct {
  1355. name string
  1356. inputs []*testScrapedPage
  1357. wants func() []pmetric.Metrics
  1358. }
  1359. func (tt buildTestData) run(t *testing.T) {
  1360. wants := tt.wants()
  1361. assert.EqualValues(t, len(wants), len(tt.inputs))
  1362. st := ts
  1363. for i, page := range tt.inputs {
  1364. sink := new(consumertest.MetricsSink)
  1365. tr := newTransaction(scrapeCtx, &startTimeAdjuster{startTime: startTimestamp}, sink, nil, receivertest.NewNopCreateSettings(), nopObsRecv(t), false)
  1366. for _, pt := range page.pts {
  1367. // set ts for testing
  1368. pt.t = st
  1369. _, err := tr.Append(0, pt.lb, pt.t, pt.v)
  1370. assert.NoError(t, err)
  1371. for _, e := range pt.exemplars {
  1372. _, err := tr.AppendExemplar(0, pt.lb, e)
  1373. assert.NoError(t, err)
  1374. }
  1375. }
  1376. assert.NoError(t, tr.Commit())
  1377. mds := sink.AllMetrics()
  1378. if wants[i].ResourceMetrics().Len() == 0 {
  1379. // Receiver does not emit empty metrics, so will not have anything in the sink.
  1380. require.Len(t, mds, 0)
  1381. st += interval
  1382. continue
  1383. }
  1384. require.Len(t, mds, 1)
  1385. assertEquivalentMetrics(t, wants[i], mds[0])
  1386. st += interval
  1387. }
  1388. }
  1389. type errorAdjuster struct {
  1390. err error
  1391. }
  1392. func (ea *errorAdjuster) AdjustMetrics(pmetric.Metrics) error {
  1393. return ea.err
  1394. }
  1395. type startTimeAdjuster struct {
  1396. startTime pcommon.Timestamp
  1397. }
  1398. func (s *startTimeAdjuster) AdjustMetrics(metrics pmetric.Metrics) error {
  1399. for i := 0; i < metrics.ResourceMetrics().Len(); i++ {
  1400. rm := metrics.ResourceMetrics().At(i)
  1401. for j := 0; j < rm.ScopeMetrics().Len(); j++ {
  1402. ilm := rm.ScopeMetrics().At(j)
  1403. for k := 0; k < ilm.Metrics().Len(); k++ {
  1404. metric := ilm.Metrics().At(k)
  1405. switch metric.Type() {
  1406. case pmetric.MetricTypeSum:
  1407. dps := metric.Sum().DataPoints()
  1408. for l := 0; l < dps.Len(); l++ {
  1409. dps.At(l).SetStartTimestamp(s.startTime)
  1410. }
  1411. case pmetric.MetricTypeSummary:
  1412. dps := metric.Summary().DataPoints()
  1413. for l := 0; l < dps.Len(); l++ {
  1414. dps.At(l).SetStartTimestamp(s.startTime)
  1415. }
  1416. case pmetric.MetricTypeHistogram:
  1417. dps := metric.Histogram().DataPoints()
  1418. for l := 0; l < dps.Len(); l++ {
  1419. dps.At(l).SetStartTimestamp(s.startTime)
  1420. }
  1421. case pmetric.MetricTypeEmpty, pmetric.MetricTypeGauge, pmetric.MetricTypeExponentialHistogram:
  1422. }
  1423. }
  1424. }
  1425. }
  1426. return nil
  1427. }
  1428. type testDataPoint struct {
  1429. lb labels.Labels
  1430. t int64
  1431. v float64
  1432. exemplars []exemplar.Exemplar
  1433. }
  1434. type testScrapedPage struct {
  1435. pts []*testDataPoint
  1436. }
  1437. func createDataPoint(mname string, value float64, es []exemplar.Exemplar, tagPairs ...string) *testDataPoint {
  1438. var lbls []string
  1439. lbls = append(lbls, tagPairs...)
  1440. lbls = append(lbls, model.MetricNameLabel, mname)
  1441. lbls = append(lbls, model.JobLabel, "job")
  1442. lbls = append(lbls, model.InstanceLabel, "instance")
  1443. return &testDataPoint{
  1444. lb: labels.FromStrings(lbls...),
  1445. t: ts,
  1446. v: value,
  1447. exemplars: es,
  1448. }
  1449. }
  1450. func assertEquivalentMetrics(t *testing.T, want, got pmetric.Metrics) {
  1451. require.Equal(t, want.ResourceMetrics().Len(), got.ResourceMetrics().Len())
  1452. if want.ResourceMetrics().Len() == 0 {
  1453. return
  1454. }
  1455. for i := 0; i < want.ResourceMetrics().Len(); i++ {
  1456. wantSm := want.ResourceMetrics().At(i).ScopeMetrics()
  1457. gotSm := got.ResourceMetrics().At(i).ScopeMetrics()
  1458. require.Equal(t, wantSm.Len(), gotSm.Len())
  1459. if wantSm.Len() == 0 {
  1460. return
  1461. }
  1462. for j := 0; j < wantSm.Len(); j++ {
  1463. wantMs := wantSm.At(j).Metrics()
  1464. gotMs := gotSm.At(j).Metrics()
  1465. require.Equal(t, wantMs.Len(), gotMs.Len())
  1466. wmap := map[string]pmetric.Metric{}
  1467. gmap := map[string]pmetric.Metric{}
  1468. for k := 0; k < wantMs.Len(); k++ {
  1469. wi := wantMs.At(k)
  1470. wmap[wi.Name()] = wi
  1471. gi := gotMs.At(k)
  1472. gmap[gi.Name()] = gi
  1473. }
  1474. assert.EqualValues(t, wmap, gmap)
  1475. }
  1476. }
  1477. }