From e1a6a01a615117d0a175b8b2d6ada870a83c3a18 Mon Sep 17 00:00:00 2001 From: Pierre Massat Date: Thu, 21 Nov 2024 11:09:22 -0500 Subject: [PATCH] chore(metrics-summaries): Remove metrics summaries from the codebase --- cmd/vroom/config.go | 9 ++++----- cmd/vroom/kafka.go | 20 -------------------- cmd/vroom/main.go | 9 --------- 3 files changed, 4 insertions(+), 34 deletions(-) diff --git a/cmd/vroom/config.go b/cmd/vroom/config.go index f11f7646..f6bb2a8f 100644 --- a/cmd/vroom/config.go +++ b/cmd/vroom/config.go @@ -12,11 +12,10 @@ type ( ProfilingKafkaBrokers []string `env:"SENTRY_KAFKA_BROKERS_PROFILING" env-default:"localhost:9092"` SpansKafkaBrokers []string `env:"SENTRY_KAFKA_BROKERS_SPANS" env-default:"localhost:9092"` - CallTreesKafkaTopic string `env:"SENTRY_KAFKA_TOPIC_CALL_TREES" env-default:"profiles-call-tree"` - MetricsSummaryKafkaTopic string `env:"SENTRY_KAFKA_TOPIC_METRICS_SUMMARY" env-default:"snuba-metrics-summaries"` - OccurrencesKafkaTopic string `env:"SENTRY_KAFKA_TOPIC_OCCURRENCES" env-default:"ingest-occurrences"` - ProfileChunksKafkaTopic string `env:"SENTRY_KAFKA_TOPIC_PROFILE_CHUNKS" env-default:"snuba-profile-chunks"` - ProfilesKafkaTopic string `env:"SENTRY_KAKFA_TOPIC_PROFILES" env-default:"processed-profiles"` + CallTreesKafkaTopic string `env:"SENTRY_KAFKA_TOPIC_CALL_TREES" env-default:"profiles-call-tree"` + OccurrencesKafkaTopic string `env:"SENTRY_KAFKA_TOPIC_OCCURRENCES" env-default:"ingest-occurrences"` + ProfileChunksKafkaTopic string `env:"SENTRY_KAFKA_TOPIC_PROFILE_CHUNKS" env-default:"snuba-profile-chunks"` + ProfilesKafkaTopic string `env:"SENTRY_KAKFA_TOPIC_PROFILES" env-default:"processed-profiles"` SnubaHost string `env:"SENTRY_SNUBA_HOST" env-default:"http://localhost:1218"` diff --git a/cmd/vroom/kafka.go b/cmd/vroom/kafka.go index 1c10d07f..5e6625e2 100644 --- a/cmd/vroom/kafka.go +++ b/cmd/vroom/kafka.go @@ -56,26 +56,6 @@ type ( VersionCode string `json:"version_code"` VersionName string `json:"version_name"` } - - // MetricsSummaryKafkaMessage is representing the struct we send to Kafka to insert Metrics Summary in ClickHouse. - MetricsSummaryKafkaMessage struct { - Count uint64 `json:"count"` - DurationMs uint32 `json:"duration_ms"` - EndTimestamp float64 `json:"end_timestamp"` - Group string `json:"group"` - IsSegment bool `json:"is_segment"` - Max float64 `json:"max"` - Min float64 `json:"min"` - Sum float64 `json:"sum"` - Mri string `json:"mri"` - ProjectID uint64 `json:"project_id"` - Received int64 `json:"received"` - RetentionDays int `json:"retention_days"` - SegmentID string `json:"segment_id"` - SpanID string `json:"span_id"` - Tags map[string]string `json:"tags"` - TraceID string `json:"trace_id"` - } ) func buildFunctionsKafkaMessage(p profile.Profile, functions []nodetree.CallTreeFunction) FunctionsKafkaMessage { diff --git a/cmd/vroom/main.go b/cmd/vroom/main.go index 3cd3a7d0..c7c70838 100644 --- a/cmd/vroom/main.go +++ b/cmd/vroom/main.go @@ -83,15 +83,6 @@ func newEnvironment() (*environment, error) { ReadTimeout: 3 * time.Second, WriteTimeout: 3 * time.Second, } - e.metricSummaryWriter = &kafka.Writer{ - Addr: kafka.TCP(e.config.SpansKafkaBrokers...), - Async: true, - Balancer: kafka.CRC32Balancer{}, - BatchSize: 100, - ReadTimeout: 3 * time.Second, - Topic: e.config.MetricsSummaryKafkaTopic, - WriteTimeout: 3 * time.Second, - } e.metricsClient = &http.Client{ Timeout: time.Second * 5, Transport: &http.Transport{