generated_metrics_test.go 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540
  1. // Code generated by mdatagen. DO NOT EDIT.
  2. package metadata
  3. import (
  4. "testing"
  5. "github.com/stretchr/testify/assert"
  6. "go.opentelemetry.io/collector/pdata/pcommon"
  7. "go.opentelemetry.io/collector/pdata/pmetric"
  8. "go.opentelemetry.io/collector/receiver/receivertest"
  9. "go.uber.org/zap"
  10. "go.uber.org/zap/zaptest/observer"
  11. )
  12. type testConfigCollection int
  13. const (
  14. testSetDefault testConfigCollection = iota
  15. testSetAll
  16. testSetNone
  17. )
  18. func TestMetricsBuilder(t *testing.T) {
  19. tests := []struct {
  20. name string
  21. configSet testConfigCollection
  22. }{
  23. {
  24. name: "default",
  25. configSet: testSetDefault,
  26. },
  27. {
  28. name: "all_set",
  29. configSet: testSetAll,
  30. },
  31. {
  32. name: "none_set",
  33. configSet: testSetNone,
  34. },
  35. }
  36. for _, test := range tests {
  37. t.Run(test.name, func(t *testing.T) {
  38. start := pcommon.Timestamp(1_000_000_000)
  39. ts := pcommon.Timestamp(1_000_001_000)
  40. observedZapCore, observedLogs := observer.New(zap.WarnLevel)
  41. settings := receivertest.NewNopCreateSettings()
  42. settings.Logger = zap.New(observedZapCore)
  43. mb := NewMetricsBuilder(loadMetricsBuilderConfig(t, test.name), settings, WithStartTime(start))
  44. expectedWarnings := 0
  45. assert.Equal(t, expectedWarnings, observedLogs.Len())
  46. defaultMetricsCount := 0
  47. allMetricsCount := 0
  48. defaultMetricsCount++
  49. allMetricsCount++
  50. mb.RecordHaproxyBytesInputDataPoint(ts, "1")
  51. defaultMetricsCount++
  52. allMetricsCount++
  53. mb.RecordHaproxyBytesOutputDataPoint(ts, "1")
  54. allMetricsCount++
  55. mb.RecordHaproxyClientsCanceledDataPoint(ts, "1")
  56. allMetricsCount++
  57. mb.RecordHaproxyCompressionBypassDataPoint(ts, "1")
  58. allMetricsCount++
  59. mb.RecordHaproxyCompressionCountDataPoint(ts, "1")
  60. allMetricsCount++
  61. mb.RecordHaproxyCompressionInputDataPoint(ts, "1")
  62. allMetricsCount++
  63. mb.RecordHaproxyCompressionOutputDataPoint(ts, "1")
  64. defaultMetricsCount++
  65. allMetricsCount++
  66. mb.RecordHaproxyConnectionsErrorsDataPoint(ts, "1")
  67. defaultMetricsCount++
  68. allMetricsCount++
  69. mb.RecordHaproxyConnectionsRateDataPoint(ts, "1")
  70. defaultMetricsCount++
  71. allMetricsCount++
  72. mb.RecordHaproxyConnectionsRetriesDataPoint(ts, "1")
  73. allMetricsCount++
  74. mb.RecordHaproxyConnectionsTotalDataPoint(ts, "1")
  75. allMetricsCount++
  76. mb.RecordHaproxyDowntimeDataPoint(ts, "1")
  77. allMetricsCount++
  78. mb.RecordHaproxyFailedChecksDataPoint(ts, "1")
  79. defaultMetricsCount++
  80. allMetricsCount++
  81. mb.RecordHaproxyRequestsDeniedDataPoint(ts, "1")
  82. defaultMetricsCount++
  83. allMetricsCount++
  84. mb.RecordHaproxyRequestsErrorsDataPoint(ts, "1")
  85. defaultMetricsCount++
  86. allMetricsCount++
  87. mb.RecordHaproxyRequestsQueuedDataPoint(ts, "1")
  88. defaultMetricsCount++
  89. allMetricsCount++
  90. mb.RecordHaproxyRequestsRateDataPoint(ts, "1")
  91. defaultMetricsCount++
  92. allMetricsCount++
  93. mb.RecordHaproxyRequestsRedispatchedDataPoint(ts, "1")
  94. defaultMetricsCount++
  95. allMetricsCount++
  96. mb.RecordHaproxyRequestsTotalDataPoint(ts, "1", AttributeStatusCode1xx)
  97. defaultMetricsCount++
  98. allMetricsCount++
  99. mb.RecordHaproxyResponsesDeniedDataPoint(ts, "1")
  100. defaultMetricsCount++
  101. allMetricsCount++
  102. mb.RecordHaproxyResponsesErrorsDataPoint(ts, 1)
  103. defaultMetricsCount++
  104. allMetricsCount++
  105. mb.RecordHaproxyServerSelectedTotalDataPoint(ts, "1")
  106. defaultMetricsCount++
  107. allMetricsCount++
  108. mb.RecordHaproxySessionsAverageDataPoint(ts, "1")
  109. defaultMetricsCount++
  110. allMetricsCount++
  111. mb.RecordHaproxySessionsCountDataPoint(ts, "1")
  112. defaultMetricsCount++
  113. allMetricsCount++
  114. mb.RecordHaproxySessionsRateDataPoint(ts, "1")
  115. allMetricsCount++
  116. mb.RecordHaproxySessionsTotalDataPoint(ts, "1")
  117. rb := mb.NewResourceBuilder()
  118. rb.SetHaproxyAddr("haproxy.addr-val")
  119. rb.SetHaproxyProxyName("haproxy.proxy_name-val")
  120. rb.SetHaproxyServiceName("haproxy.service_name-val")
  121. res := rb.Emit()
  122. metrics := mb.Emit(WithResource(res))
  123. if test.configSet == testSetNone {
  124. assert.Equal(t, 0, metrics.ResourceMetrics().Len())
  125. return
  126. }
  127. assert.Equal(t, 1, metrics.ResourceMetrics().Len())
  128. rm := metrics.ResourceMetrics().At(0)
  129. assert.Equal(t, res, rm.Resource())
  130. assert.Equal(t, 1, rm.ScopeMetrics().Len())
  131. ms := rm.ScopeMetrics().At(0).Metrics()
  132. if test.configSet == testSetDefault {
  133. assert.Equal(t, defaultMetricsCount, ms.Len())
  134. }
  135. if test.configSet == testSetAll {
  136. assert.Equal(t, allMetricsCount, ms.Len())
  137. }
  138. validatedMetrics := make(map[string]bool)
  139. for i := 0; i < ms.Len(); i++ {
  140. switch ms.At(i).Name() {
  141. case "haproxy.bytes.input":
  142. assert.False(t, validatedMetrics["haproxy.bytes.input"], "Found a duplicate in the metrics slice: haproxy.bytes.input")
  143. validatedMetrics["haproxy.bytes.input"] = true
  144. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  145. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  146. assert.Equal(t, "Bytes in. Corresponds to HAProxy's `bin` metric.", ms.At(i).Description())
  147. assert.Equal(t, "by", ms.At(i).Unit())
  148. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  149. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  150. dp := ms.At(i).Sum().DataPoints().At(0)
  151. assert.Equal(t, start, dp.StartTimestamp())
  152. assert.Equal(t, ts, dp.Timestamp())
  153. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  154. assert.Equal(t, int64(1), dp.IntValue())
  155. case "haproxy.bytes.output":
  156. assert.False(t, validatedMetrics["haproxy.bytes.output"], "Found a duplicate in the metrics slice: haproxy.bytes.output")
  157. validatedMetrics["haproxy.bytes.output"] = true
  158. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  159. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  160. assert.Equal(t, "Bytes out. Corresponds to HAProxy's `bout` metric.", ms.At(i).Description())
  161. assert.Equal(t, "by", ms.At(i).Unit())
  162. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  163. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  164. dp := ms.At(i).Sum().DataPoints().At(0)
  165. assert.Equal(t, start, dp.StartTimestamp())
  166. assert.Equal(t, ts, dp.Timestamp())
  167. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  168. assert.Equal(t, int64(1), dp.IntValue())
  169. case "haproxy.clients.canceled":
  170. assert.False(t, validatedMetrics["haproxy.clients.canceled"], "Found a duplicate in the metrics slice: haproxy.clients.canceled")
  171. validatedMetrics["haproxy.clients.canceled"] = true
  172. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  173. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  174. assert.Equal(t, "Number of data transfers aborted by the client. Corresponds to HAProxy's `cli_abrt` metric", ms.At(i).Description())
  175. assert.Equal(t, "{cancellations}", ms.At(i).Unit())
  176. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  177. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  178. dp := ms.At(i).Sum().DataPoints().At(0)
  179. assert.Equal(t, start, dp.StartTimestamp())
  180. assert.Equal(t, ts, dp.Timestamp())
  181. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  182. assert.Equal(t, int64(1), dp.IntValue())
  183. case "haproxy.compression.bypass":
  184. assert.False(t, validatedMetrics["haproxy.compression.bypass"], "Found a duplicate in the metrics slice: haproxy.compression.bypass")
  185. validatedMetrics["haproxy.compression.bypass"] = true
  186. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  187. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  188. assert.Equal(t, "Number of bytes that bypassed the HTTP compressor (CPU/BW limit). Corresponds to HAProxy's `comp_byp` metric.", ms.At(i).Description())
  189. assert.Equal(t, "by", ms.At(i).Unit())
  190. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  191. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  192. dp := ms.At(i).Sum().DataPoints().At(0)
  193. assert.Equal(t, start, dp.StartTimestamp())
  194. assert.Equal(t, ts, dp.Timestamp())
  195. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  196. assert.Equal(t, int64(1), dp.IntValue())
  197. case "haproxy.compression.count":
  198. assert.False(t, validatedMetrics["haproxy.compression.count"], "Found a duplicate in the metrics slice: haproxy.compression.count")
  199. validatedMetrics["haproxy.compression.count"] = true
  200. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  201. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  202. assert.Equal(t, "Number of HTTP responses that were compressed. Corresponds to HAProxy's `comp_rsp` metric.", ms.At(i).Description())
  203. assert.Equal(t, "{responses}", ms.At(i).Unit())
  204. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  205. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  206. dp := ms.At(i).Sum().DataPoints().At(0)
  207. assert.Equal(t, start, dp.StartTimestamp())
  208. assert.Equal(t, ts, dp.Timestamp())
  209. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  210. assert.Equal(t, int64(1), dp.IntValue())
  211. case "haproxy.compression.input":
  212. assert.False(t, validatedMetrics["haproxy.compression.input"], "Found a duplicate in the metrics slice: haproxy.compression.input")
  213. validatedMetrics["haproxy.compression.input"] = true
  214. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  215. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  216. assert.Equal(t, "Number of HTTP response bytes fed to the compressor. Corresponds to HAProxy's `comp_in` metric.", ms.At(i).Description())
  217. assert.Equal(t, "by", ms.At(i).Unit())
  218. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  219. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  220. dp := ms.At(i).Sum().DataPoints().At(0)
  221. assert.Equal(t, start, dp.StartTimestamp())
  222. assert.Equal(t, ts, dp.Timestamp())
  223. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  224. assert.Equal(t, int64(1), dp.IntValue())
  225. case "haproxy.compression.output":
  226. assert.False(t, validatedMetrics["haproxy.compression.output"], "Found a duplicate in the metrics slice: haproxy.compression.output")
  227. validatedMetrics["haproxy.compression.output"] = true
  228. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  229. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  230. assert.Equal(t, "Number of HTTP response bytes emitted by the compressor. Corresponds to HAProxy's `comp_out` metric.", ms.At(i).Description())
  231. assert.Equal(t, "by", ms.At(i).Unit())
  232. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  233. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  234. dp := ms.At(i).Sum().DataPoints().At(0)
  235. assert.Equal(t, start, dp.StartTimestamp())
  236. assert.Equal(t, ts, dp.Timestamp())
  237. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  238. assert.Equal(t, int64(1), dp.IntValue())
  239. case "haproxy.connections.errors":
  240. assert.False(t, validatedMetrics["haproxy.connections.errors"], "Found a duplicate in the metrics slice: haproxy.connections.errors")
  241. validatedMetrics["haproxy.connections.errors"] = true
  242. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  243. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  244. assert.Equal(t, "Number of requests that encountered an error trying to connect to a backend server. The backend stat is the sum of the stat. Corresponds to HAProxy's `econ` metric", ms.At(i).Description())
  245. assert.Equal(t, "{errors}", ms.At(i).Unit())
  246. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  247. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  248. dp := ms.At(i).Sum().DataPoints().At(0)
  249. assert.Equal(t, start, dp.StartTimestamp())
  250. assert.Equal(t, ts, dp.Timestamp())
  251. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  252. assert.Equal(t, int64(1), dp.IntValue())
  253. case "haproxy.connections.rate":
  254. assert.False(t, validatedMetrics["haproxy.connections.rate"], "Found a duplicate in the metrics slice: haproxy.connections.rate")
  255. validatedMetrics["haproxy.connections.rate"] = true
  256. assert.Equal(t, pmetric.MetricTypeGauge, ms.At(i).Type())
  257. assert.Equal(t, 1, ms.At(i).Gauge().DataPoints().Len())
  258. assert.Equal(t, "Number of connections over the last elapsed second (frontend). Corresponds to HAProxy's `conn_rate` metric.", ms.At(i).Description())
  259. assert.Equal(t, "{connections}", ms.At(i).Unit())
  260. dp := ms.At(i).Gauge().DataPoints().At(0)
  261. assert.Equal(t, start, dp.StartTimestamp())
  262. assert.Equal(t, ts, dp.Timestamp())
  263. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  264. assert.Equal(t, int64(1), dp.IntValue())
  265. case "haproxy.connections.retries":
  266. assert.False(t, validatedMetrics["haproxy.connections.retries"], "Found a duplicate in the metrics slice: haproxy.connections.retries")
  267. validatedMetrics["haproxy.connections.retries"] = true
  268. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  269. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  270. assert.Equal(t, "Number of times a connection to a server was retried. Corresponds to HAProxy's `wretr` metric.", ms.At(i).Description())
  271. assert.Equal(t, "{retries}", ms.At(i).Unit())
  272. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  273. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  274. dp := ms.At(i).Sum().DataPoints().At(0)
  275. assert.Equal(t, start, dp.StartTimestamp())
  276. assert.Equal(t, ts, dp.Timestamp())
  277. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  278. assert.Equal(t, int64(1), dp.IntValue())
  279. case "haproxy.connections.total":
  280. assert.False(t, validatedMetrics["haproxy.connections.total"], "Found a duplicate in the metrics slice: haproxy.connections.total")
  281. validatedMetrics["haproxy.connections.total"] = true
  282. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  283. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  284. assert.Equal(t, "Cumulative number of connections (frontend). Corresponds to HAProxy's `conn_tot` metric.", ms.At(i).Description())
  285. assert.Equal(t, "{connections}", ms.At(i).Unit())
  286. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  287. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  288. dp := ms.At(i).Sum().DataPoints().At(0)
  289. assert.Equal(t, start, dp.StartTimestamp())
  290. assert.Equal(t, ts, dp.Timestamp())
  291. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  292. assert.Equal(t, int64(1), dp.IntValue())
  293. case "haproxy.downtime":
  294. assert.False(t, validatedMetrics["haproxy.downtime"], "Found a duplicate in the metrics slice: haproxy.downtime")
  295. validatedMetrics["haproxy.downtime"] = true
  296. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  297. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  298. assert.Equal(t, "Total downtime (in seconds). The value for the backend is the downtime for the whole backend, not the sum of the server downtime. Corresponds to HAProxy's `downtime` metric", ms.At(i).Description())
  299. assert.Equal(t, "s", ms.At(i).Unit())
  300. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  301. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  302. dp := ms.At(i).Sum().DataPoints().At(0)
  303. assert.Equal(t, start, dp.StartTimestamp())
  304. assert.Equal(t, ts, dp.Timestamp())
  305. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  306. assert.Equal(t, int64(1), dp.IntValue())
  307. case "haproxy.failed_checks":
  308. assert.False(t, validatedMetrics["haproxy.failed_checks"], "Found a duplicate in the metrics slice: haproxy.failed_checks")
  309. validatedMetrics["haproxy.failed_checks"] = true
  310. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  311. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  312. assert.Equal(t, "Number of failed checks. (Only counts checks failed when the server is up). Corresponds to HAProxy's `chkfail` metric.", ms.At(i).Description())
  313. assert.Equal(t, "{checks}", ms.At(i).Unit())
  314. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  315. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  316. dp := ms.At(i).Sum().DataPoints().At(0)
  317. assert.Equal(t, start, dp.StartTimestamp())
  318. assert.Equal(t, ts, dp.Timestamp())
  319. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  320. assert.Equal(t, int64(1), dp.IntValue())
  321. case "haproxy.requests.denied":
  322. assert.False(t, validatedMetrics["haproxy.requests.denied"], "Found a duplicate in the metrics slice: haproxy.requests.denied")
  323. validatedMetrics["haproxy.requests.denied"] = true
  324. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  325. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  326. assert.Equal(t, "Requests denied because of security concerns. Corresponds to HAProxy's `dreq` metric", ms.At(i).Description())
  327. assert.Equal(t, "{requests}", ms.At(i).Unit())
  328. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  329. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  330. dp := ms.At(i).Sum().DataPoints().At(0)
  331. assert.Equal(t, start, dp.StartTimestamp())
  332. assert.Equal(t, ts, dp.Timestamp())
  333. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  334. assert.Equal(t, int64(1), dp.IntValue())
  335. case "haproxy.requests.errors":
  336. assert.False(t, validatedMetrics["haproxy.requests.errors"], "Found a duplicate in the metrics slice: haproxy.requests.errors")
  337. validatedMetrics["haproxy.requests.errors"] = true
  338. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  339. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  340. assert.Equal(t, "Cumulative number of request errors. Corresponds to HAProxy's `ereq` metric.", ms.At(i).Description())
  341. assert.Equal(t, "{errors}", ms.At(i).Unit())
  342. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  343. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  344. dp := ms.At(i).Sum().DataPoints().At(0)
  345. assert.Equal(t, start, dp.StartTimestamp())
  346. assert.Equal(t, ts, dp.Timestamp())
  347. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  348. assert.Equal(t, int64(1), dp.IntValue())
  349. case "haproxy.requests.queued":
  350. assert.False(t, validatedMetrics["haproxy.requests.queued"], "Found a duplicate in the metrics slice: haproxy.requests.queued")
  351. validatedMetrics["haproxy.requests.queued"] = true
  352. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  353. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  354. assert.Equal(t, "Current queued requests. For the backend this reports the number queued without a server assigned. Corresponds to HAProxy's `qcur` metric.", ms.At(i).Description())
  355. assert.Equal(t, "{requests}", ms.At(i).Unit())
  356. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  357. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  358. dp := ms.At(i).Sum().DataPoints().At(0)
  359. assert.Equal(t, start, dp.StartTimestamp())
  360. assert.Equal(t, ts, dp.Timestamp())
  361. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  362. assert.Equal(t, int64(1), dp.IntValue())
  363. case "haproxy.requests.rate":
  364. assert.False(t, validatedMetrics["haproxy.requests.rate"], "Found a duplicate in the metrics slice: haproxy.requests.rate")
  365. validatedMetrics["haproxy.requests.rate"] = true
  366. assert.Equal(t, pmetric.MetricTypeGauge, ms.At(i).Type())
  367. assert.Equal(t, 1, ms.At(i).Gauge().DataPoints().Len())
  368. assert.Equal(t, "HTTP requests per second over last elapsed second. Corresponds to HAProxy's `req_rate` metric.", ms.At(i).Description())
  369. assert.Equal(t, "{requests}", ms.At(i).Unit())
  370. dp := ms.At(i).Gauge().DataPoints().At(0)
  371. assert.Equal(t, start, dp.StartTimestamp())
  372. assert.Equal(t, ts, dp.Timestamp())
  373. assert.Equal(t, pmetric.NumberDataPointValueTypeDouble, dp.ValueType())
  374. assert.Equal(t, float64(1), dp.DoubleValue())
  375. case "haproxy.requests.redispatched":
  376. assert.False(t, validatedMetrics["haproxy.requests.redispatched"], "Found a duplicate in the metrics slice: haproxy.requests.redispatched")
  377. validatedMetrics["haproxy.requests.redispatched"] = true
  378. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  379. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  380. assert.Equal(t, "Number of times a request was redispatched to another server. Corresponds to HAProxy's `wredis` metric.", ms.At(i).Description())
  381. assert.Equal(t, "{requests}", ms.At(i).Unit())
  382. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  383. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  384. dp := ms.At(i).Sum().DataPoints().At(0)
  385. assert.Equal(t, start, dp.StartTimestamp())
  386. assert.Equal(t, ts, dp.Timestamp())
  387. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  388. assert.Equal(t, int64(1), dp.IntValue())
  389. case "haproxy.requests.total":
  390. assert.False(t, validatedMetrics["haproxy.requests.total"], "Found a duplicate in the metrics slice: haproxy.requests.total")
  391. validatedMetrics["haproxy.requests.total"] = true
  392. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  393. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  394. assert.Equal(t, "Total number of HTTP requests received. Corresponds to HAProxy's `req_tot`, `hrsp_1xx`, `hrsp_2xx`, `hrsp_3xx`, `hrsp_4xx`, `hrsp_5xx` and `hrsp_other` metrics.", ms.At(i).Description())
  395. assert.Equal(t, "{requests}", ms.At(i).Unit())
  396. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  397. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  398. dp := ms.At(i).Sum().DataPoints().At(0)
  399. assert.Equal(t, start, dp.StartTimestamp())
  400. assert.Equal(t, ts, dp.Timestamp())
  401. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  402. assert.Equal(t, int64(1), dp.IntValue())
  403. attrVal, ok := dp.Attributes().Get("status_code")
  404. assert.True(t, ok)
  405. assert.EqualValues(t, "1xx", attrVal.Str())
  406. case "haproxy.responses.denied":
  407. assert.False(t, validatedMetrics["haproxy.responses.denied"], "Found a duplicate in the metrics slice: haproxy.responses.denied")
  408. validatedMetrics["haproxy.responses.denied"] = true
  409. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  410. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  411. assert.Equal(t, "Responses denied because of security concerns. Corresponds to HAProxy's `dresp` metric", ms.At(i).Description())
  412. assert.Equal(t, "{responses}", ms.At(i).Unit())
  413. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  414. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  415. dp := ms.At(i).Sum().DataPoints().At(0)
  416. assert.Equal(t, start, dp.StartTimestamp())
  417. assert.Equal(t, ts, dp.Timestamp())
  418. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  419. assert.Equal(t, int64(1), dp.IntValue())
  420. case "haproxy.responses.errors":
  421. assert.False(t, validatedMetrics["haproxy.responses.errors"], "Found a duplicate in the metrics slice: haproxy.responses.errors")
  422. validatedMetrics["haproxy.responses.errors"] = true
  423. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  424. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  425. assert.Equal(t, "Cumulative number of response errors. Corresponds to HAProxy's `eresp` metric, `srv_abrt` will be counted here also.", ms.At(i).Description())
  426. assert.Equal(t, "{errors}", ms.At(i).Unit())
  427. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  428. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  429. dp := ms.At(i).Sum().DataPoints().At(0)
  430. assert.Equal(t, start, dp.StartTimestamp())
  431. assert.Equal(t, ts, dp.Timestamp())
  432. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  433. assert.Equal(t, int64(1), dp.IntValue())
  434. case "haproxy.server_selected.total":
  435. assert.False(t, validatedMetrics["haproxy.server_selected.total"], "Found a duplicate in the metrics slice: haproxy.server_selected.total")
  436. validatedMetrics["haproxy.server_selected.total"] = true
  437. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  438. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  439. assert.Equal(t, "Number of times a server was selected, either for new sessions or when re-dispatching. Corresponds to HAProxy's `lbtot` metric.", ms.At(i).Description())
  440. assert.Equal(t, "{selections}", ms.At(i).Unit())
  441. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  442. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  443. dp := ms.At(i).Sum().DataPoints().At(0)
  444. assert.Equal(t, start, dp.StartTimestamp())
  445. assert.Equal(t, ts, dp.Timestamp())
  446. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  447. assert.Equal(t, int64(1), dp.IntValue())
  448. case "haproxy.sessions.average":
  449. assert.False(t, validatedMetrics["haproxy.sessions.average"], "Found a duplicate in the metrics slice: haproxy.sessions.average")
  450. validatedMetrics["haproxy.sessions.average"] = true
  451. assert.Equal(t, pmetric.MetricTypeGauge, ms.At(i).Type())
  452. assert.Equal(t, 1, ms.At(i).Gauge().DataPoints().Len())
  453. assert.Equal(t, "Average total session time in ms over the last 1024 requests. Corresponds to HAProxy's `ttime` metric.", ms.At(i).Description())
  454. assert.Equal(t, "ms", ms.At(i).Unit())
  455. dp := ms.At(i).Gauge().DataPoints().At(0)
  456. assert.Equal(t, start, dp.StartTimestamp())
  457. assert.Equal(t, ts, dp.Timestamp())
  458. assert.Equal(t, pmetric.NumberDataPointValueTypeDouble, dp.ValueType())
  459. assert.Equal(t, float64(1), dp.DoubleValue())
  460. case "haproxy.sessions.count":
  461. assert.False(t, validatedMetrics["haproxy.sessions.count"], "Found a duplicate in the metrics slice: haproxy.sessions.count")
  462. validatedMetrics["haproxy.sessions.count"] = true
  463. assert.Equal(t, pmetric.MetricTypeGauge, ms.At(i).Type())
  464. assert.Equal(t, 1, ms.At(i).Gauge().DataPoints().Len())
  465. assert.Equal(t, "Current sessions. Corresponds to HAProxy's `scur` metric.", ms.At(i).Description())
  466. assert.Equal(t, "{sessions}", ms.At(i).Unit())
  467. dp := ms.At(i).Gauge().DataPoints().At(0)
  468. assert.Equal(t, start, dp.StartTimestamp())
  469. assert.Equal(t, ts, dp.Timestamp())
  470. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  471. assert.Equal(t, int64(1), dp.IntValue())
  472. case "haproxy.sessions.rate":
  473. assert.False(t, validatedMetrics["haproxy.sessions.rate"], "Found a duplicate in the metrics slice: haproxy.sessions.rate")
  474. validatedMetrics["haproxy.sessions.rate"] = true
  475. assert.Equal(t, pmetric.MetricTypeGauge, ms.At(i).Type())
  476. assert.Equal(t, 1, ms.At(i).Gauge().DataPoints().Len())
  477. assert.Equal(t, "Number of sessions per second over last elapsed second. Corresponds to HAProxy's `rate` metric.", ms.At(i).Description())
  478. assert.Equal(t, "{sessions}", ms.At(i).Unit())
  479. dp := ms.At(i).Gauge().DataPoints().At(0)
  480. assert.Equal(t, start, dp.StartTimestamp())
  481. assert.Equal(t, ts, dp.Timestamp())
  482. assert.Equal(t, pmetric.NumberDataPointValueTypeDouble, dp.ValueType())
  483. assert.Equal(t, float64(1), dp.DoubleValue())
  484. case "haproxy.sessions.total":
  485. assert.False(t, validatedMetrics["haproxy.sessions.total"], "Found a duplicate in the metrics slice: haproxy.sessions.total")
  486. validatedMetrics["haproxy.sessions.total"] = true
  487. assert.Equal(t, pmetric.MetricTypeSum, ms.At(i).Type())
  488. assert.Equal(t, 1, ms.At(i).Sum().DataPoints().Len())
  489. assert.Equal(t, "Cumulative number of sessions. Corresponds to HAProxy's `stot` metric.", ms.At(i).Description())
  490. assert.Equal(t, "{sessions}", ms.At(i).Unit())
  491. assert.Equal(t, true, ms.At(i).Sum().IsMonotonic())
  492. assert.Equal(t, pmetric.AggregationTemporalityCumulative, ms.At(i).Sum().AggregationTemporality())
  493. dp := ms.At(i).Sum().DataPoints().At(0)
  494. assert.Equal(t, start, dp.StartTimestamp())
  495. assert.Equal(t, ts, dp.Timestamp())
  496. assert.Equal(t, pmetric.NumberDataPointValueTypeInt, dp.ValueType())
  497. assert.Equal(t, int64(1), dp.IntValue())
  498. }
  499. }
  500. })
  501. }
  502. }