Skip to content

Commit

Permalink
adopt to the latest mdatagen changes
Browse files Browse the repository at this point in the history
  • Loading branch information
dmitryax committed Jul 31, 2023
1 parent a08e236 commit 3245e9c
Show file tree
Hide file tree
Showing 9 changed files with 141 additions and 14 deletions.
2 changes: 1 addition & 1 deletion internal/receiver/databricksreceiver/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
| ------------- |-----------|
| Stability | [development]: metrics |
| Distributions | [] |
| Issues | [![Open issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aopen%20label%3Areceiver%2Fdatabricks%20&label=open&color=orange&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector-contrib/issues?q=is%3Aopen+is%3Aissue+label%3Areceiver%2Fdatabricks) [![Closed issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aclosed%20label%3Areceiver%2Fdatabricks%20&label=closed&color=blue&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector-contrib/issues?q=is%3Aclosed+is%3Aissue+label%3Areceiver%2Fdatabricks) |
| Issues | ![Open issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aopen%20label%3Areceiver%2Fdatabricks%20&label=open&color=orange&logo=opentelemetry) ![Closed issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aclosed%20label%3Areceiver%2Fdatabricks%20&label=closed&color=blue&logo=opentelemetry) |

[development]: https://github.com/open-telemetry/opentelemetry-collector#development
<!-- end autogenerated section -->
Expand Down
1 change: 1 addition & 0 deletions internal/receiver/databricksreceiver/factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ func newReceiverFactory() receiver.CreateMetricsFunc {
rmp: databricks.NewRunMetricsProvider(dbrsvc),
dbrmp: databricks.MetricsProvider{Svc: dbrsvc},
metricsBuilder: metadata.NewMetricsBuilder(dbrcfg.MetricsBuilderConfig, settings),
resourceBuilder: metadata.NewResourceBuilder(dbrcfg.MetricsBuilderConfig.ResourceAttributes),
scmb: spark.ClusterMetricsBuilder{Ssvc: ssvc},
semb: spark.ExtraMetricsBuilder{
Ssvc: ssvc,
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package spark

import (
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata"
"testing"
"time"

Expand Down Expand Up @@ -47,7 +48,8 @@ func TestClusterMetricsBuilder_GeneratedMetrics(t *testing.T) {
const expectedCount = 112

testBuilder := commontest.NewTestMetricsBuilder()
built := coreMetrics.Build(testBuilder, pcommon.NewTimestampFromTime(time.Now()))
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := coreMetrics.Build(testBuilder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
pm := pmetric.NewMetrics()
for _, metric := range built {
metric.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
package spark

import (
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata"
"testing"
"time"

Expand All @@ -32,7 +33,8 @@ func TestSparkExtraMetricsBuilder_Executors(t *testing.T) {
require.NoError(t, err)

builder := commontest.NewTestMetricsBuilder()
built := execMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now()))
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := execMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
pm := pmetric.NewMetrics()
for _, metrics := range built {
metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics())
Expand All @@ -53,7 +55,8 @@ func TestSparkExtraMetricsBuilder_Jobs(t *testing.T) {
require.NoError(t, err)

builder := commontest.NewTestMetricsBuilder()
built := jobMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now()))
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := jobMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
pm := pmetric.NewMetrics()
for _, metrics := range built {
metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics())
Expand All @@ -77,7 +80,8 @@ func TestSparkExtraMetricsBuilder_Stages(t *testing.T) {
require.NoError(t, err)

builder := commontest.NewTestMetricsBuilder()
built := stageMetrics.Build(builder, pcommon.NewTimestampFromTime(time.Now()))
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := stageMetrics.Build(builder, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
pm := pmetric.NewMetrics()
for _, metrics := range built {
metrics.ResourceMetrics().MoveAndAppendTo(pm.ResourceMetrics())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,15 +124,16 @@ func (m *ResourceMetrics) addStageInfo(clstr Cluster, appID string, info StageIn
})
}

func (m *ResourceMetrics) Build(builder *metadata.MetricsBuilder, now pcommon.Timestamp, rmo ...metadata.ResourceMetricsOption) []pmetric.Metrics {
func (m *ResourceMetrics) Build(mb *metadata.MetricsBuilder, rb *metadata.ResourceBuilder, now pcommon.Timestamp, instanceName string) []pmetric.Metrics {
var out []pmetric.Metrics
for rs, metricInfos := range m.m {
for _, mi := range metricInfos {
mi.build(builder, rs, now)
mi.build(mb, rs, now)
}
rmo = append(rmo, metadata.WithSparkClusterID(rs.cluster.ClusterID))
rmo = append(rmo, metadata.WithSparkClusterName(rs.cluster.ClusterName))
out = append(out, builder.Emit(rmo...))
rb.SetDatabricksInstanceName(instanceName)
rb.SetSparkClusterID(rs.cluster.ClusterID)
rb.SetSparkClusterName(rs.cluster.ClusterName)
out = append(out, mb.Emit(metadata.WithResource(rb.Emit())))
}
return out
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import (
"go.opentelemetry.io/collector/pdata/pmetric"

"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/commontest"
"github.com/signalfx/splunk-otel-collector/internal/receiver/databricksreceiver/internal/metadata"
)

func TestSparkDbrMetrics_Append(t *testing.T) {
Expand All @@ -45,8 +46,9 @@ func TestSparkDbrMetrics_Append(t *testing.T) {
})
outerRM.Append(rmSub2)

builder := commontest.NewTestMetricsBuilder()
built := outerRM.Build(builder, pcommon.NewTimestampFromTime(time.Now()))
mb := commontest.NewTestMetricsBuilder()
rb := metadata.NewResourceBuilder(metadata.DefaultResourceAttributesConfig())
built := outerRM.Build(mb, rb, pcommon.NewTimestampFromTime(time.Now()), "my-app-id")
allMetrics := pmetric.NewMetrics()
for _, metrics := range built {
metrics.ResourceMetrics().CopyTo(allMetrics.ResourceMetrics())
Expand Down
6 changes: 4 additions & 2 deletions internal/receiver/databricksreceiver/scraper.go
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ type scraper struct {
dbrsvc databricks.Service
logger *zap.Logger
metricsBuilder *metadata.MetricsBuilder
resourceBuilder *metadata.ResourceBuilder
dbrInstanceName string
}

Expand All @@ -60,7 +61,8 @@ func (s scraper) scrape(_ context.Context) (pmetric.Metrics, error) {
return pmetric.Metrics{}, fmt.Errorf("scrape failed to add multi job run metrics: %w", err)
}

dbrMetrics := s.metricsBuilder.Emit(metadata.WithDatabricksInstanceName(s.dbrInstanceName))
s.resourceBuilder.SetDatabricksInstanceName(s.dbrInstanceName)
dbrMetrics := s.metricsBuilder.Emit(metadata.WithResource(s.resourceBuilder.Emit()))

// spark metrics
clusters, err := s.dbrsvc.RunningClusters()
Expand Down Expand Up @@ -103,7 +105,7 @@ func (s scraper) scrape(_ context.Context) (pmetric.Metrics, error) {
out := pmetric.NewMetrics()
dbrMetrics.ResourceMetrics().MoveAndAppendTo(out.ResourceMetrics())

sparkMetrics := allSparkDbrMetrics.Build(s.metricsBuilder, now, metadata.WithDatabricksInstanceName(s.dbrInstanceName))
sparkMetrics := allSparkDbrMetrics.Build(s.metricsBuilder, s.resourceBuilder, now, s.dbrInstanceName)
for _, metric := range sparkMetrics {
metric.ResourceMetrics().MoveAndAppendTo(out.ResourceMetrics())
}
Expand Down

0 comments on commit 3245e9c

Please sign in to comment.