scraper.go 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309
  1. // Copyright The OpenTelemetry Authors
  2. // SPDX-License-Identifier: Apache-2.0
  3. package haproxyreceiver // import "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/haproxyreceiver"
  4. import (
  5. "bytes"
  6. "context"
  7. "encoding/csv"
  8. "fmt"
  9. "io"
  10. "net"
  11. "net/http"
  12. "net/url"
  13. "strconv"
  14. "strings"
  15. "time"
  16. "go.opentelemetry.io/collector/component"
  17. "go.opentelemetry.io/collector/pdata/pcommon"
  18. "go.opentelemetry.io/collector/pdata/pmetric"
  19. "go.opentelemetry.io/collector/receiver"
  20. "go.opentelemetry.io/collector/receiver/scrapererror"
  21. "go.uber.org/multierr"
  22. "go.uber.org/zap"
  23. "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/haproxyreceiver/internal/metadata"
  24. )
  25. var (
  26. showStatsCommand = []byte("show stats\n")
  27. )
  28. type scraper struct {
  29. cfg *Config
  30. httpClient *http.Client
  31. logger *zap.Logger
  32. mb *metadata.MetricsBuilder
  33. telemetrySettings component.TelemetrySettings
  34. }
  35. func (s *scraper) scrape(ctx context.Context) (pmetric.Metrics, error) {
  36. var records []map[string]string
  37. if u, notURLerr := url.Parse(s.cfg.Endpoint); notURLerr == nil && strings.HasPrefix(u.Scheme, "http") {
  38. resp, err := s.httpClient.Get(s.cfg.Endpoint + ";csv")
  39. if err != nil {
  40. return pmetric.NewMetrics(), err
  41. }
  42. defer resp.Body.Close()
  43. buf, err := io.ReadAll(resp.Body)
  44. if err != nil {
  45. return pmetric.NewMetrics(), err
  46. }
  47. records, err = s.readStats(buf)
  48. if err != nil {
  49. return pmetric.NewMetrics(), err
  50. }
  51. } else {
  52. var d net.Dialer
  53. c, err := d.DialContext(ctx, "unix", s.cfg.Endpoint)
  54. if err != nil {
  55. return pmetric.NewMetrics(), err
  56. }
  57. defer func(c net.Conn) {
  58. _ = c.Close()
  59. }(c)
  60. _, err = c.Write(showStatsCommand)
  61. if err != nil {
  62. return pmetric.NewMetrics(), err
  63. }
  64. buf := make([]byte, 4096)
  65. _, err = c.Read(buf)
  66. if err != nil {
  67. return pmetric.NewMetrics(), err
  68. }
  69. records, err = s.readStats(buf)
  70. if err != nil {
  71. return pmetric.NewMetrics(), err
  72. }
  73. }
  74. var scrapeErrors []error
  75. now := pcommon.NewTimestampFromTime(time.Now())
  76. for _, record := range records {
  77. err := s.mb.RecordHaproxySessionsCountDataPoint(now, record["scur"])
  78. if err != nil {
  79. scrapeErrors = append(scrapeErrors, err)
  80. }
  81. if record["conn_rate"] != "" {
  82. err = s.mb.RecordHaproxyConnectionsRateDataPoint(now, record["conn_rate"])
  83. if err != nil {
  84. scrapeErrors = append(scrapeErrors, err)
  85. }
  86. }
  87. if record["conn_tot"] != "" {
  88. err = s.mb.RecordHaproxyConnectionsTotalDataPoint(now, record["conn_tot"])
  89. if err != nil {
  90. scrapeErrors = append(scrapeErrors, err)
  91. }
  92. }
  93. if record["lbtot"] != "" {
  94. err = s.mb.RecordHaproxyServerSelectedTotalDataPoint(now, record["lbtot"])
  95. if err != nil {
  96. scrapeErrors = append(scrapeErrors, err)
  97. }
  98. }
  99. err = s.mb.RecordHaproxyBytesInputDataPoint(now, record["bin"])
  100. if err != nil {
  101. scrapeErrors = append(scrapeErrors, err)
  102. }
  103. err = s.mb.RecordHaproxyBytesOutputDataPoint(now, record["bout"])
  104. if err != nil {
  105. scrapeErrors = append(scrapeErrors, err)
  106. }
  107. if record["cli_abrt"] != "" {
  108. err = s.mb.RecordHaproxyClientsCanceledDataPoint(now, record["cli_abrt"])
  109. if err != nil {
  110. scrapeErrors = append(scrapeErrors, err)
  111. }
  112. }
  113. if record["comp_byp"] != "" {
  114. err = s.mb.RecordHaproxyCompressionBypassDataPoint(now, record["comp_byp"])
  115. if err != nil {
  116. scrapeErrors = append(scrapeErrors, err)
  117. }
  118. }
  119. if record["comp_in"] != "" {
  120. err = s.mb.RecordHaproxyCompressionInputDataPoint(now, record["comp_in"])
  121. if err != nil {
  122. scrapeErrors = append(scrapeErrors, err)
  123. }
  124. }
  125. if record["comp_out"] != "" {
  126. err = s.mb.RecordHaproxyCompressionOutputDataPoint(now, record["comp_out"])
  127. if err != nil {
  128. scrapeErrors = append(scrapeErrors, err)
  129. }
  130. }
  131. if record["comp_rsp"] != "" {
  132. err = s.mb.RecordHaproxyCompressionCountDataPoint(now, record["comp_rsp"])
  133. if err != nil {
  134. scrapeErrors = append(scrapeErrors, err)
  135. }
  136. }
  137. if record["dreq"] != "" {
  138. err = s.mb.RecordHaproxyRequestsDeniedDataPoint(now, record["dreq"])
  139. if err != nil {
  140. scrapeErrors = append(scrapeErrors, err)
  141. }
  142. }
  143. if record["dresp"] != "" {
  144. err = s.mb.RecordHaproxyResponsesDeniedDataPoint(now, record["dresp"])
  145. if err != nil {
  146. scrapeErrors = append(scrapeErrors, err)
  147. }
  148. }
  149. if record["downtime"] != "" {
  150. err = s.mb.RecordHaproxyDowntimeDataPoint(now, record["downtime"])
  151. if err != nil {
  152. scrapeErrors = append(scrapeErrors, err)
  153. }
  154. }
  155. if record["econ"] != "" {
  156. err = s.mb.RecordHaproxyConnectionsErrorsDataPoint(now, record["econ"])
  157. if err != nil {
  158. scrapeErrors = append(scrapeErrors, err)
  159. }
  160. }
  161. if record["ereq"] != "" {
  162. err = s.mb.RecordHaproxyRequestsErrorsDataPoint(now, record["ereq"])
  163. if err != nil {
  164. scrapeErrors = append(scrapeErrors, err)
  165. }
  166. }
  167. if record["eresp"] != "" && record["srv_abrt"] != "" {
  168. aborts := record["srv_abrt"]
  169. eresp := record["eresp"]
  170. abortsVal, err2 := strconv.ParseInt(aborts, 10, 64)
  171. if err2 != nil {
  172. scrapeErrors = append(scrapeErrors, fmt.Errorf("failed to parse int64 for HaproxyResponsesErrors, value was %s: %w", aborts, err2))
  173. }
  174. erespVal, err2 := strconv.ParseInt(eresp, 10, 64)
  175. if err2 != nil {
  176. scrapeErrors = append(scrapeErrors, fmt.Errorf("failed to parse int64 for HaproxyResponsesErrors, value was %s: %w", eresp, err2))
  177. }
  178. s.mb.RecordHaproxyResponsesErrorsDataPoint(now, abortsVal+erespVal)
  179. }
  180. if record["chkfail"] != "" {
  181. err = s.mb.RecordHaproxyFailedChecksDataPoint(now, record["chkfail"])
  182. if err != nil {
  183. scrapeErrors = append(scrapeErrors, err)
  184. }
  185. }
  186. if record["wredis"] != "" {
  187. err = s.mb.RecordHaproxyRequestsRedispatchedDataPoint(now, record["wredis"])
  188. if err != nil {
  189. scrapeErrors = append(scrapeErrors, err)
  190. }
  191. }
  192. err = s.mb.RecordHaproxyRequestsTotalDataPoint(now, record["hrsp_1xx"], metadata.AttributeStatusCode1xx)
  193. if err != nil {
  194. scrapeErrors = append(scrapeErrors, err)
  195. }
  196. err = s.mb.RecordHaproxyRequestsTotalDataPoint(now, record["hrsp_2xx"], metadata.AttributeStatusCode2xx)
  197. if err != nil {
  198. scrapeErrors = append(scrapeErrors, err)
  199. }
  200. err = s.mb.RecordHaproxyRequestsTotalDataPoint(now, record["hrsp_3xx"], metadata.AttributeStatusCode3xx)
  201. if err != nil {
  202. scrapeErrors = append(scrapeErrors, err)
  203. }
  204. err = s.mb.RecordHaproxyRequestsTotalDataPoint(now, record["hrsp_4xx"], metadata.AttributeStatusCode4xx)
  205. if err != nil {
  206. scrapeErrors = append(scrapeErrors, err)
  207. }
  208. err = s.mb.RecordHaproxyRequestsTotalDataPoint(now, record["hrsp_5xx"], metadata.AttributeStatusCode5xx)
  209. if err != nil {
  210. scrapeErrors = append(scrapeErrors, err)
  211. }
  212. err = s.mb.RecordHaproxyRequestsTotalDataPoint(now, record["hrsp_other"], metadata.AttributeStatusCodeOther)
  213. if err != nil {
  214. scrapeErrors = append(scrapeErrors, err)
  215. }
  216. if record["wretr"] != "" {
  217. err = s.mb.RecordHaproxyConnectionsRetriesDataPoint(now, record["wretr"])
  218. if err != nil {
  219. scrapeErrors = append(scrapeErrors, err)
  220. }
  221. }
  222. err = s.mb.RecordHaproxySessionsTotalDataPoint(now, record["stot"])
  223. if err != nil {
  224. scrapeErrors = append(scrapeErrors, err)
  225. }
  226. if record["qcur"] != "" {
  227. err = s.mb.RecordHaproxyRequestsQueuedDataPoint(now, record["qcur"])
  228. if err != nil {
  229. scrapeErrors = append(scrapeErrors, err)
  230. }
  231. }
  232. if record["req_rate"] != "" {
  233. err = s.mb.RecordHaproxyRequestsRateDataPoint(now, record["req_rate"])
  234. if err != nil {
  235. scrapeErrors = append(scrapeErrors, err)
  236. }
  237. }
  238. if record["ttime"] != "" {
  239. err = s.mb.RecordHaproxySessionsAverageDataPoint(now, record["ttime"])
  240. if err != nil {
  241. scrapeErrors = append(scrapeErrors, err)
  242. }
  243. }
  244. err = s.mb.RecordHaproxySessionsRateDataPoint(now, record["rate"])
  245. if err != nil {
  246. scrapeErrors = append(scrapeErrors, err)
  247. }
  248. rb := s.mb.NewResourceBuilder()
  249. rb.SetHaproxyProxyName(record["pxname"])
  250. rb.SetHaproxyServiceName(record["svname"])
  251. rb.SetHaproxyAddr(s.cfg.Endpoint)
  252. s.mb.EmitForResource(metadata.WithResource(rb.Emit()))
  253. }
  254. if len(scrapeErrors) > 0 {
  255. return s.mb.Emit(), scrapererror.NewPartialScrapeError(multierr.Combine(scrapeErrors...), len(scrapeErrors))
  256. }
  257. return s.mb.Emit(), nil
  258. }
  259. func (s *scraper) readStats(buf []byte) ([]map[string]string, error) {
  260. reader := csv.NewReader(bytes.NewReader(buf))
  261. headers, err := reader.Read()
  262. if err != nil {
  263. return nil, err
  264. }
  265. // The CSV output starts with `# `, removing it to be able to read headers.
  266. headers[0] = strings.TrimPrefix(headers[0], "# ")
  267. rows, err := reader.ReadAll()
  268. if err != nil {
  269. return nil, err
  270. }
  271. results := make([]map[string]string, len(rows))
  272. for i, record := range rows {
  273. result := make(map[string]string)
  274. results[i] = result
  275. for j, header := range headers {
  276. result[header] = record[j]
  277. }
  278. }
  279. return results, err
  280. }
  281. func (s *scraper) start(_ context.Context, host component.Host) error {
  282. var err error
  283. s.httpClient, err = s.cfg.HTTPClientSettings.ToClient(host, s.telemetrySettings)
  284. return err
  285. }
  286. func newScraper(cfg *Config, settings receiver.CreateSettings) *scraper {
  287. return &scraper{
  288. logger: settings.TelemetrySettings.Logger,
  289. mb: metadata.NewMetricsBuilder(cfg.MetricsBuilderConfig, settings),
  290. cfg: cfg,
  291. telemetrySettings: settings.TelemetrySettings,
  292. }
  293. }