client.go 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. // Copyright The OpenTelemetry Authors
  2. // SPDX-License-Identifier: Apache-2.0
  3. package apachesparkreceiver // import "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/apachesparkreceiver"
  4. import (
  5. "encoding/json"
  6. "fmt"
  7. "io"
  8. "net/http"
  9. "strconv"
  10. "go.opentelemetry.io/collector/component"
  11. "go.uber.org/zap"
  12. "github.com/open-telemetry/opentelemetry-collector-contrib/receiver/apachesparkreceiver/internal/models"
  13. )
  14. const (
  15. metricsPath = "/metrics/json"
  16. applicationsPath = "/api/v1/applications"
  17. )
  18. type client interface {
  19. Get(path string) ([]byte, error)
  20. ClusterStats() (*models.ClusterProperties, error)
  21. Applications() ([]models.Application, error)
  22. StageStats(appID string) ([]models.Stage, error)
  23. ExecutorStats(appID string) ([]models.Executor, error)
  24. JobStats(appID string) ([]models.Job, error)
  25. }
  26. var _ client = (*apacheSparkClient)(nil)
  27. type apacheSparkClient struct {
  28. client *http.Client
  29. cfg *Config
  30. logger *zap.Logger
  31. }
  32. // newApacheSparkClient creates a new client to make requests for the Apache Spark receiver.
  33. func newApacheSparkClient(cfg *Config, host component.Host, settings component.TelemetrySettings) (client, error) {
  34. client, err := cfg.ToClient(host, settings)
  35. if err != nil {
  36. return nil, fmt.Errorf("failed to create HTTP Client: %w", err)
  37. }
  38. return &apacheSparkClient{
  39. client: client,
  40. cfg: cfg,
  41. logger: settings.Logger,
  42. }, nil
  43. }
  44. // Get issues an authorized Get requests to the specified URL.
  45. func (c *apacheSparkClient) Get(path string) ([]byte, error) {
  46. req, err := c.buildReq(path)
  47. if err != nil {
  48. return nil, err
  49. }
  50. resp, err := c.client.Do(req)
  51. if err != nil {
  52. return nil, err
  53. }
  54. defer func() {
  55. if err = resp.Body.Close(); err != nil {
  56. c.logger.Warn("failed to close response body", zap.Error(err))
  57. }
  58. }()
  59. if resp.StatusCode != http.StatusOK {
  60. if resp.StatusCode >= 400 {
  61. c.logger.Error("apachespark", zap.Error(err), zap.String("status_code", strconv.Itoa(resp.StatusCode)))
  62. }
  63. return nil, fmt.Errorf("request GET %s failed - %q", req.URL.String(), resp.Status)
  64. }
  65. body, err := io.ReadAll(resp.Body)
  66. if err != nil {
  67. return nil, fmt.Errorf("failed to read response body %w", err)
  68. }
  69. return body, nil
  70. }
  71. func (c *apacheSparkClient) ClusterStats() (*models.ClusterProperties, error) {
  72. body, err := c.Get(metricsPath)
  73. if err != nil {
  74. return nil, err
  75. }
  76. var clusterStats *models.ClusterProperties
  77. err = json.Unmarshal(body, &clusterStats)
  78. if err != nil {
  79. return nil, err
  80. }
  81. return clusterStats, nil
  82. }
  83. func (c *apacheSparkClient) Applications() ([]models.Application, error) {
  84. body, err := c.Get(applicationsPath)
  85. if err != nil {
  86. return nil, err
  87. }
  88. var apps []models.Application
  89. err = json.Unmarshal(body, &apps)
  90. if err != nil {
  91. return nil, err
  92. }
  93. return apps, nil
  94. }
  95. func (c *apacheSparkClient) StageStats(appID string) ([]models.Stage, error) {
  96. stagePath := fmt.Sprintf("%s/%s/stages", applicationsPath, appID)
  97. body, err := c.Get(stagePath)
  98. if err != nil {
  99. return nil, err
  100. }
  101. var stageStats []models.Stage
  102. err = json.Unmarshal(body, &stageStats)
  103. if err != nil {
  104. return nil, err
  105. }
  106. return stageStats, nil
  107. }
  108. func (c *apacheSparkClient) ExecutorStats(appID string) ([]models.Executor, error) {
  109. executorPath := fmt.Sprintf("%s/%s/executors", applicationsPath, appID)
  110. body, err := c.Get(executorPath)
  111. if err != nil {
  112. return nil, err
  113. }
  114. var executorStats []models.Executor
  115. err = json.Unmarshal(body, &executorStats)
  116. if err != nil {
  117. return nil, err
  118. }
  119. return executorStats, nil
  120. }
  121. func (c *apacheSparkClient) JobStats(appID string) ([]models.Job, error) {
  122. jobPath := fmt.Sprintf("%s/%s/jobs", applicationsPath, appID)
  123. body, err := c.Get(jobPath)
  124. if err != nil {
  125. return nil, err
  126. }
  127. var jobStats []models.Job
  128. err = json.Unmarshal(body, &jobStats)
  129. if err != nil {
  130. return nil, err
  131. }
  132. return jobStats, nil
  133. }
  134. func (c *apacheSparkClient) buildReq(path string) (*http.Request, error) {
  135. url := c.cfg.Endpoint + path
  136. req, err := http.NewRequest(http.MethodGet, url, nil)
  137. if err != nil {
  138. return nil, err
  139. }
  140. return req, nil
  141. }