Skip to content
This repository has been archived by the owner on May 23, 2024. It is now read-only.

Change metrics prefix to jaeger_tracer and add descriptions #346

Merged
merged 5 commits into from
Jan 13, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions baggage_setter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,11 @@ func TestTruncateBaggage(t *testing.T) {

factory.AssertCounterMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.baggage_truncations",
Name: "jaeger_tracer.baggage_truncations",
Value: 1,
},
metricstest.ExpectedMetric{
Name: "jaeger.baggage_updates",
Name: "jaeger_tracer.baggage_updates",
Tags: map[string]string{"result": "ok"},
Value: 1,
},
Expand Down Expand Up @@ -84,7 +84,7 @@ func TestInvalidBaggage(t *testing.T) {

factory.AssertCounterMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.baggage_updates",
Name: "jaeger_tracer.baggage_updates",
Tags: map[string]string{"result": "err"},
Value: 1,
},
Expand Down
2 changes: 1 addition & 1 deletion config/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -436,7 +436,7 @@ func TestBaggageRestrictionsConfig(t *testing.T) {
require.NoError(t, err)
defer closer.Close()

metricName := "jaeger.baggage_restrictions_updates"
metricName := "jaeger_tracer.baggage_restrictions_updates"
metricTags := map[string]string{"result": "err"}
key := metrics.GetKey(metricName, metricTags, "|", "=")
for i := 0; i < 100; i++ {
Expand Down
4 changes: 2 additions & 2 deletions internal/baggage/remote/restriction_manager_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@ func TestNewRemoteRestrictionManager(t *testing.T) {

factory.AssertCounterMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.baggage_restrictions_updates",
Name: "jaeger_tracer.baggage_restrictions_updates",
Tags: map[string]string{"result": "ok"},
Value: 1,
},
Expand All @@ -147,7 +147,7 @@ func TestDenyBaggageOnInitializationFailure(t *testing.T) {
)
require.False(t, mgr.isReady())

metricName := "jaeger.baggage_restrictions_updates"
metricName := "jaeger_tracer.baggage_restrictions_updates"
metricTags := map[string]string{"result": "err"}
key := metrics.GetKey(metricName, metricTags, "|", "=")
for i := 0; i < 100; i++ {
Expand Down
4 changes: 2 additions & 2 deletions internal/throttler/remote/throttler_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ func TestRemoteThrottler_fetchCreditsErrors(t *testing.T) {

factory.AssertCounterMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.throttler_updates",
Name: "jaeger_tracer.throttler_updates",
Tags: map[string]string{"result": "err"},
Value: 1,
})
Expand Down Expand Up @@ -218,7 +218,7 @@ func TestRemotelyControlledThrottler_pollManager(t *testing.T) {

throttler.refreshCredits()
counters, _ := factory.Snapshot()
counter, ok := counters["jaeger.throttler_updates|result=ok"]
counter, ok := counters["jaeger_tracer.throttler_updates|result=ok"]
assert.True(t, ok)
assert.True(t, counter >= 1)
})
Expand Down
52 changes: 26 additions & 26 deletions metrics.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,83 +21,83 @@ import (
// Metrics is a container of all stats emitted by Jaeger tracer.
type Metrics struct {
// Number of traces started by this tracer as sampled
TracesStartedSampled metrics.Counter `metric:"traces" tags:"state=started,sampled=y"`
TracesStartedSampled metrics.Counter `metric:"traces" tags:"state=started,sampled=y" help:"Number of traces started by this tracer as sampled"`

// Number of traces started by this tracer as not sampled
TracesStartedNotSampled metrics.Counter `metric:"traces" tags:"state=started,sampled=n"`
TracesStartedNotSampled metrics.Counter `metric:"traces" tags:"state=started,sampled=n" help:"Number of traces started by this tracer as not sampled"`

// Number of externally started sampled traces this tracer joined
TracesJoinedSampled metrics.Counter `metric:"traces" tags:"state=joined,sampled=y"`
TracesJoinedSampled metrics.Counter `metric:"traces" tags:"state=joined,sampled=y" help:"Number of externally started sampled traces this tracer joined"`

// Number of externally started not-sampled traces this tracer joined
TracesJoinedNotSampled metrics.Counter `metric:"traces" tags:"state=joined,sampled=n"`
TracesJoinedNotSampled metrics.Counter `metric:"traces" tags:"state=joined,sampled=n" help:"Number of externally started not-sampled traces this tracer joined"`

// Number of sampled spans started by this tracer
SpansStartedSampled metrics.Counter `metric:"started_spans" tags:"sampled=y"`
SpansStartedSampled metrics.Counter `metric:"started_spans" tags:"sampled=y" help:"Number of sampled spans started by this tracer"`

// Number of unsampled spans started by this tracer
SpansStartedNotSampled metrics.Counter `metric:"started_spans" tags:"sampled=n"`
SpansStartedNotSampled metrics.Counter `metric:"started_spans" tags:"sampled=n" help:"Number of unsampled spans started by this tracer"`

// Number of spans finished by this tracer
SpansFinished metrics.Counter `metric:"finished_spans"`
SpansFinished metrics.Counter `metric:"finished_spans" help:"Number of spans finished by this tracer"`

// Number of errors decoding tracing context
DecodingErrors metrics.Counter `metric:"span_context_decoding_errors"`
DecodingErrors metrics.Counter `metric:"span_context_decoding_errors" help:"Number of errors decoding tracing context"`

// Number of spans successfully reported
ReporterSuccess metrics.Counter `metric:"reporter_spans" tags:"result=ok"`
ReporterSuccess metrics.Counter `metric:"reporter_spans" tags:"result=ok" help:"Number of spans successfully reported"`

// Number of spans not reported due to a Sender failure
ReporterFailure metrics.Counter `metric:"reporter_spans" tags:"result=err"`
ReporterFailure metrics.Counter `metric:"reporter_spans" tags:"result=err" help:"Number of spans not reported due to a Sender failure"`

// Number of spans dropped due to internal queue overflow
ReporterDropped metrics.Counter `metric:"reporter_spans" tags:"result=dropped"`
ReporterDropped metrics.Counter `metric:"reporter_spans" tags:"result=dropped" help:"Number of spans dropped due to internal queue overflow"`

// Current number of spans in the reporter queue
ReporterQueueLength metrics.Gauge `metric:"reporter_queue_length"`
ReporterQueueLength metrics.Gauge `metric:"reporter_queue_length" help:"Current number of spans in the reporter queue"`

// Number of times the Sampler succeeded to retrieve sampling strategy
SamplerRetrieved metrics.Counter `metric:"sampler_queries" tags:"result=ok"`
SamplerRetrieved metrics.Counter `metric:"sampler_queries" tags:"result=ok" help:"Number of times the Sampler succeeded to retrieve sampling strategy"`

// Number of times the Sampler failed to retrieve sampling strategy
SamplerQueryFailure metrics.Counter `metric:"sampler_queries" tags:"result=err"`
SamplerQueryFailure metrics.Counter `metric:"sampler_queries" tags:"result=err" help:"Number of times the Sampler failed to retrieve sampling strategy"`

// Number of times the Sampler succeeded to retrieve and update sampling strategy
SamplerUpdated metrics.Counter `metric:"sampler_updates" tags:"result=ok"`
SamplerUpdated metrics.Counter `metric:"sampler_updates" tags:"result=ok" help:"Number of times the Sampler succeeded to retrieve and update sampling strategy"`

// Number of times the Sampler failed to update sampling strategy
SamplerUpdateFailure metrics.Counter `metric:"sampler_updates" tags:"result=err"`
SamplerUpdateFailure metrics.Counter `metric:"sampler_updates" tags:"result=err" help:"Number of times the Sampler failed to update sampling strategy"`

// Number of times baggage was successfully written or updated on spans.
BaggageUpdateSuccess metrics.Counter `metric:"baggage_updates" tags:"result=ok"`
BaggageUpdateSuccess metrics.Counter `metric:"baggage_updates" tags:"result=ok" help:"Number of times baggage was successfully written or updated on spans"`

// Number of times baggage failed to write or update on spans.
BaggageUpdateFailure metrics.Counter `metric:"baggage_updates" tags:"result=err"`
BaggageUpdateFailure metrics.Counter `metric:"baggage_updates" tags:"result=err" help:"Number of times baggage failed to write or update on spans"`

// Number of times baggage was truncated as per baggage restrictions.
BaggageTruncate metrics.Counter `metric:"baggage_truncations"`
BaggageTruncate metrics.Counter `metric:"baggage_truncations" help:"Number of times baggage was truncated as per baggage restrictions"`

// Number of times baggage restrictions were successfully updated.
BaggageRestrictionsUpdateSuccess metrics.Counter `metric:"baggage_restrictions_updates" tags:"result=ok"`
BaggageRestrictionsUpdateSuccess metrics.Counter `metric:"baggage_restrictions_updates" tags:"result=ok" help:"Number of times baggage restrictions were successfully updated"`

// Number of times baggage restrictions failed to update.
BaggageRestrictionsUpdateFailure metrics.Counter `metric:"baggage_restrictions_updates" tags:"result=err"`
BaggageRestrictionsUpdateFailure metrics.Counter `metric:"baggage_restrictions_updates" tags:"result=err" help:"Number of times baggage restrictions failed to update"`

// Number of times debug spans were throttled.
ThrottledDebugSpans metrics.Counter `metric:"throttled_debug_spans"`
ThrottledDebugSpans metrics.Counter `metric:"throttled_debug_spans" help:"Number of times debug spans were throttled"`

// Number of times throttler successfully updated.
ThrottlerUpdateSuccess metrics.Counter `metric:"throttler_updates" tags:"result=ok"`
ThrottlerUpdateSuccess metrics.Counter `metric:"throttler_updates" tags:"result=ok" help:"Number of times throttler successfully updated"`

// Number of times throttler failed to update.
ThrottlerUpdateFailure metrics.Counter `metric:"throttler_updates" tags:"result=err"`
ThrottlerUpdateFailure metrics.Counter `metric:"throttler_updates" tags:"result=err" help:"Number of times throttler failed to update"`
}

// NewMetrics creates a new Metrics struct and initializes it.
func NewMetrics(factory metrics.Factory, globalTags map[string]string) *Metrics {
m := &Metrics{}
// TODO the namespace "jaeger" should be configurable (e.g. in all-in-one "jaeger-client" would make more sense)
metrics.MustInit(m, factory.Namespace(metrics.NSOptions{Name: "jaeger"}), globalTags)
yurishkuro marked this conversation as resolved.
Show resolved Hide resolved
// TODO the namespace "jaeger" should be configurable
metrics.MustInit(m, factory.Namespace(metrics.NSOptions{Name: "jaeger_tracer"}), globalTags)
return m
}

Expand Down
4 changes: 2 additions & 2 deletions metrics_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,14 @@ func TestNewMetrics(t *testing.T) {
m.ReporterQueueLength.Update(11)
factory.AssertCounterMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.started_spans",
Name: "jaeger_tracer.started_spans",
Tags: map[string]string{"lib": "jaeger", "sampled": "y"},
Value: 1,
},
)
factory.AssertGaugeMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.reporter_queue_length",
Name: "jaeger_tracer.reporter_queue_length",
Tags: map[string]string{"lib": "jaeger"},
Value: 11,
},
Expand Down
14 changes: 7 additions & 7 deletions propagation_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,10 @@ func TestSpanPropagator(t *testing.T) {
}

metricsFactory.AssertCounterMetrics(t, []metricstest.ExpectedMetric{
{Name: "jaeger.started_spans", Tags: map[string]string{"sampled": "y"}, Value: 1 + 2*len(tests)},
{Name: "jaeger.finished_spans", Value: 1 + len(tests)},
{Name: "jaeger.traces", Tags: map[string]string{"state": "started", "sampled": "y"}, Value: 1},
{Name: "jaeger.traces", Tags: map[string]string{"state": "joined", "sampled": "y"}, Value: len(tests)},
{Name: "jaeger_tracer.started_spans", Tags: map[string]string{"sampled": "y"}, Value: 1 + 2*len(tests)},
{Name: "jaeger_tracer.finished_spans", Value: 1 + len(tests)},
{Name: "jaeger_tracer.traces", Tags: map[string]string{"state": "started", "sampled": "y"}, Value: 1},
{Name: "jaeger_tracer.traces", Tags: map[string]string{"state": "joined", "sampled": "y"}, Value: len(tests)},
}...)
}

Expand Down Expand Up @@ -150,7 +150,7 @@ func TestDecodingError(t *testing.T) {
_, err := tracer.Extract(opentracing.HTTPHeaders, tmc)
assert.Error(t, err)

metricsFactory.AssertCounterMetrics(t, metricstest.ExpectedMetric{Name: "jaeger.span_context_decoding_errors", Value: 1})
metricsFactory.AssertCounterMetrics(t, metricstest.ExpectedMetric{Name: "jaeger_tracer.span_context_decoding_errors", Value: 1})
}

func TestBaggagePropagationHTTP(t *testing.T) {
Expand Down Expand Up @@ -213,7 +213,7 @@ func TestJaegerBaggageHeader(t *testing.T) {
// ensure that traces.started counter is incremented, not traces.joined
metricsFactory.AssertCounterMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.traces", Tags: map[string]string{"state": "started", "sampled": "y"}, Value: 1,
Name: "jaeger_tracer.traces", Tags: map[string]string{"state": "started", "sampled": "y"}, Value: 1,
},
)
})
Expand Down Expand Up @@ -286,7 +286,7 @@ func TestDebugCorrelationID(t *testing.T) {
// ensure that traces.started counter is incremented, not traces.joined
metricsFactory.AssertCounterMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.traces", Tags: map[string]string{"state": "started", "sampled": "y"}, Value: 1,
Name: "jaeger_tracer.traces", Tags: map[string]string{"state": "started", "sampled": "y"}, Value: 1,
},
)
})
Expand Down
18 changes: 9 additions & 9 deletions reporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ func TestRemoteReporterAppendAndPeriodicFlush(t *testing.T) {
s.sender.assertBufferedSpans(t, 1)
// here we wait for periodic flush to occur
s.sender.assertFlushedSpans(t, 1)
s.assertCounter(t, "jaeger.reporter_spans", map[string]string{"result": "ok"}, 1)
s.assertCounter(t, "jaeger_tracer.reporter_spans", map[string]string{"result": "ok"}, 1)
}

func TestRemoteReporterFlushViaAppend(t *testing.T) {
Expand All @@ -127,8 +127,8 @@ func TestRemoteReporterFlushViaAppend(t *testing.T) {
s.sender.assertFlushedSpans(t, 2)
s.tracer.StartSpan("sp3").Finish()
s.sender.assertBufferedSpans(t, 1)
s.assertCounter(t, "jaeger.reporter_spans", map[string]string{"result": "ok"}, 2)
s.assertCounter(t, "jaeger.reporter_spans", map[string]string{"result": "err"}, 0)
s.assertCounter(t, "jaeger_tracer.reporter_spans", map[string]string{"result": "ok"}, 2)
s.assertCounter(t, "jaeger_tracer.reporter_spans", map[string]string{"result": "err"}, 0)
}

func TestRemoteReporterFailedFlushViaAppend(t *testing.T) {
Expand All @@ -137,8 +137,8 @@ func TestRemoteReporterFailedFlushViaAppend(t *testing.T) {
s.tracer.StartSpan("sp2").Finish()
s.sender.assertFlushedSpans(t, 2)
s.assertLogs(t, "ERROR: error reporting span \"sp2\": flush error\n")
s.assertCounter(t, "jaeger.reporter_spans", map[string]string{"result": "err"}, 2)
s.assertCounter(t, "jaeger.reporter_spans", map[string]string{"result": "ok"}, 0)
s.assertCounter(t, "jaeger_tracer.reporter_spans", map[string]string{"result": "err"}, 2)
s.assertCounter(t, "jaeger_tracer.reporter_spans", map[string]string{"result": "ok"}, 0)
s.close() // causes explicit flush that also fails with the same error
s.assertLogs(t, "ERROR: error reporting span \"sp2\": flush error\nERROR: error when flushing the buffer: flush error\n")
}
Expand All @@ -153,12 +153,12 @@ func TestRemoteReporterDroppedSpans(t *testing.T) {

s.metricsFactory.AssertCounterMetrics(t,
metricstest.ExpectedMetric{
Name: "jaeger.reporter_spans",
Name: "jaeger_tracer.reporter_spans",
Tags: map[string]string{"result": "ok"},
Value: 0,
},
metricstest.ExpectedMetric{
Name: "jaeger.reporter_spans",
Name: "jaeger_tracer.reporter_spans",
Tags: map[string]string{"result": "dropped"},
Value: 1,
},
Expand Down Expand Up @@ -250,8 +250,8 @@ func testRemoteReporterWithSender(
assert.Equal(t, "downstream", *tag.VStr)

metricsFactory.AssertCounterMetrics(t, []metricstest.ExpectedMetric{
{Name: "jaeger.reporter_spans", Tags: map[string]string{"result": "ok"}, Value: 1},
{Name: "jaeger.reporter_spans", Tags: map[string]string{"result": "err"}, Value: 0},
{Name: "jaeger_tracer.reporter_spans", Tags: map[string]string{"result": "ok"}, Value: 1},
{Name: "jaeger_tracer.reporter_spans", Tags: map[string]string{"result": "err"}, Value: 0},
}...)
}

Expand Down
28 changes: 14 additions & 14 deletions rpcmetrics/observer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ import (

opentracing "github.com/opentracing/opentracing-go"
"github.com/stretchr/testify/assert"
"github.com/uber/jaeger-lib/metrics/metricstest"
u "github.com/uber/jaeger-lib/metrics/metricstest"

"github.com/opentracing/opentracing-go/ext"
jaeger "github.com/uber/jaeger-client-go"
)

func ExampleObserver() {
metricsFactory := metricstest.NewFactory(0)
metricsFactory := u.NewFactory(0)
metricsObserver := NewObserver(
metricsFactory,
DefaultNameNormalizer,
Expand All @@ -53,14 +53,14 @@ func ExampleObserver() {
}

type testTracer struct {
metrics *metricstest.Factory
metrics *u.Factory
tracer opentracing.Tracer
}

func withTestTracer(runTest func(tt *testTracer)) {
sampler := jaeger.NewConstSampler(true)
reporter := jaeger.NewInMemoryReporter()
metrics := metricstest.NewFactory(time.Minute)
metrics := u.NewFactory(time.Minute)
observer := NewObserver(metrics, DefaultNameNormalizer)
tracer, closer := jaeger.NewTracer(
"test",
Expand Down Expand Up @@ -110,11 +110,11 @@ func TestObserver(t *testing.T) {
}

testTracer.metrics.AssertCounterMetrics(t,
metricstest.ExpectedMetric{Name: "requests", Tags: endpointTags("local-span", "error", "false"), Value: 0},
metricstest.ExpectedMetric{Name: "requests", Tags: endpointTags("get-user", "error", "false"), Value: 1},
metricstest.ExpectedMetric{Name: "requests", Tags: endpointTags("get-user", "error", "true"), Value: 1},
metricstest.ExpectedMetric{Name: "requests", Tags: endpointTags("get-user-override", "error", "false"), Value: 1},
metricstest.ExpectedMetric{Name: "requests", Tags: endpointTags("get-user-client", "error", "false"), Value: 0},
u.ExpectedMetric{Name: "requests", Tags: endpointTags("local-span", "error", "false"), Value: 0},
u.ExpectedMetric{Name: "requests", Tags: endpointTags("get-user", "error", "false"), Value: 1},
u.ExpectedMetric{Name: "requests", Tags: endpointTags("get-user", "error", "true"), Value: 1},
u.ExpectedMetric{Name: "requests", Tags: endpointTags("get-user-override", "error", "false"), Value: 1},
u.ExpectedMetric{Name: "requests", Tags: endpointTags("get-user-client", "error", "false"), Value: 0},
)
// TODO something wrong with string generation, .P99 should not be appended to the tag
// as a result we cannot use u.AssertGaugeMetrics
Expand All @@ -128,17 +128,17 @@ func TestTags(t *testing.T) {
type tagTestCase struct {
key string
value interface{}
metrics []metricstest.ExpectedMetric
metrics []u.ExpectedMetric
}

testCases := []tagTestCase{
{key: "something", value: 42, metrics: []metricstest.ExpectedMetric{
{key: "something", value: 42, metrics: []u.ExpectedMetric{
{Name: "requests", Value: 1, Tags: tags("error", "false")},
}},
{key: "error", value: true, metrics: []metricstest.ExpectedMetric{
{key: "error", value: true, metrics: []u.ExpectedMetric{
{Name: "requests", Value: 1, Tags: tags("error", "true")},
}},
{key: "error", value: "true", metrics: []metricstest.ExpectedMetric{
{key: "error", value: "true", metrics: []u.ExpectedMetric{
{Name: "requests", Value: 1, Tags: tags("error", "true")},
}},
}
Expand All @@ -151,7 +151,7 @@ func TestTags(t *testing.T) {
}
for _, v := range values {
testCases = append(testCases, tagTestCase{
key: "http.status_code", value: v, metrics: []metricstest.ExpectedMetric{
key: "http.status_code", value: v, metrics: []u.ExpectedMetric{
{Name: "http_requests", Value: 1, Tags: tags("status_code", fmt.Sprintf("%dxx", i))},
},
})
Expand Down
Loading