scraper.go 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103
  1. // Copyright The OpenTelemetry Authors
  2. // SPDX-License-Identifier: Apache-2.0
  3. package internal // import "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/purefareceiver/internal"
  4. import (
  5. "context"
  6. "fmt"
  7. "net/url"
  8. "time"
  9. configutil "github.com/prometheus/common/config"
  10. "github.com/prometheus/common/model"
  11. "github.com/prometheus/prometheus/config"
  12. "github.com/prometheus/prometheus/discovery"
  13. "go.opentelemetry.io/collector/component"
  14. "go.opentelemetry.io/collector/receiver"
  15. )
  16. type Scraper interface {
  17. ToPrometheusReceiverConfig(host component.Host, fact receiver.Factory) ([]*config.ScrapeConfig, error)
  18. }
  19. type ScraperType string
  20. const (
  21. ScraperTypeArray ScraperType = "array"
  22. ScraperTypeHosts ScraperType = "hosts"
  23. ScraperTypeDirectories ScraperType = "directories"
  24. ScraperTypePods ScraperType = "pods"
  25. ScraperTypeVolumes ScraperType = "volumes"
  26. )
  27. type scraper struct {
  28. scraperType ScraperType
  29. endpoint string
  30. configs []ScraperConfig
  31. scrapeInterval time.Duration
  32. labels model.LabelSet
  33. }
  34. func NewScraper(_ context.Context,
  35. scraperType ScraperType,
  36. endpoint string,
  37. configs []ScraperConfig,
  38. scrapeInterval time.Duration,
  39. labels model.LabelSet,
  40. ) Scraper {
  41. return &scraper{
  42. scraperType: scraperType,
  43. endpoint: endpoint,
  44. configs: configs,
  45. scrapeInterval: scrapeInterval,
  46. labels: labels,
  47. }
  48. }
  49. func (h *scraper) ToPrometheusReceiverConfig(host component.Host, _ receiver.Factory) ([]*config.ScrapeConfig, error) {
  50. scrapeCfgs := []*config.ScrapeConfig{}
  51. for _, arr := range h.configs {
  52. u, err := url.Parse(h.endpoint)
  53. if err != nil {
  54. return nil, err
  55. }
  56. bearerToken, err := RetrieveBearerToken(arr.Auth, host.GetExtensions())
  57. if err != nil {
  58. return nil, err
  59. }
  60. httpConfig := configutil.HTTPClientConfig{}
  61. httpConfig.BearerToken = configutil.Secret(bearerToken)
  62. scrapeConfig := &config.ScrapeConfig{
  63. HTTPClientConfig: httpConfig,
  64. ScrapeInterval: model.Duration(h.scrapeInterval),
  65. ScrapeTimeout: model.Duration(h.scrapeInterval),
  66. JobName: fmt.Sprintf("%s/%s/%s", "purefa", h.scraperType, arr.Address),
  67. HonorTimestamps: true,
  68. Scheme: u.Scheme,
  69. MetricsPath: fmt.Sprintf("/metrics/%s", h.scraperType),
  70. Params: url.Values{
  71. "endpoint": {arr.Address},
  72. },
  73. ServiceDiscoveryConfigs: discovery.Configs{
  74. &discovery.StaticConfig{
  75. {
  76. Targets: []model.LabelSet{
  77. {model.AddressLabel: model.LabelValue(u.Host)},
  78. },
  79. Labels: h.labels,
  80. },
  81. },
  82. },
  83. }
  84. scrapeCfgs = append(scrapeCfgs, scrapeConfig)
  85. }
  86. return scrapeCfgs, nil
  87. }