Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updates for burrow v3 version of api schema. #9

Open
wants to merge 1 commit into
base: develop
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 34 additions & 28 deletions checks.d/burrow.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@
# project
from checks import AgentCheck

__version__ = "0.1.1"

SERVICE_CHECK_NAME = 'burrow.can_connect'

DEFAULT_BURROW_URI = 'http://localhost:8000'

CLUSTER_ENDPOINT = '/v2/kafka'
CLUSTER_ENDPOINT = '/v3/kafka'

CHECK_TIMEOUT = 10

Expand Down Expand Up @@ -54,7 +56,7 @@ def _consumer_groups_lags(self, clusters, burrow_address, extra_tags):
status = lag_json["status"]
consumer_tags = ["cluster:%s" % cluster, "consumer:%s" % consumer] + extra_tags

self.gauge("kafka.consumer.maxlag", status["maxlag"], tags=consumer_tags)
self.gauge("kafka.consumer.maxlag", status["maxlag"]["end"]["lag"], tags=consumer_tags)
self.gauge("kafka.consumer.totallag", status["totallag"], tags=consumer_tags)
self._submit_lag_status("kafka.consumer.lag_status", status["status"], tags=consumer_tags)

Expand Down Expand Up @@ -84,9 +86,12 @@ def _submit_lag_status(self, metric_namespace, status, tags):
self.gauge("%s.%s" % (metric_namespace, metric_name.lower()), value, tags=tags)

def _submit_partition_lags(self, partition, tags):
lag = partition.get("end").get("lag")
end = partition.get("end")
if not end:
return
lag = end.get("lag")
timestamp = partition.get("end").get("timestamp") / 1000
self.gauge("kafka.consumer.partition_lag", lag, tags=tags, timestamp=timestamp)
self.gauge("kafka.consumer.partition_lag", lag, tags=tags)

def _check_burrow(self, burrow_address, extra_tags):
"""
Expand All @@ -112,17 +117,16 @@ def _topic_offsets(self, clusters, burrow_address, extra_tags):
Retrieve the offsets for all topics in the clusters
"""
for cluster in clusters:
cluster_path = "%s/%s" % (CLUSTER_ENDPOINT, cluster)
offsets_topic = self._rest_request_to_json(burrow_address, cluster_path)["cluster"]["offsets_topic"]
topics_path = "%s/topic" % cluster_path
topics_path = "%s/%s/topic" % (CLUSTER_ENDPOINT, cluster)
topics_list = self._rest_request_to_json(burrow_address, topics_path).get("topics", [])
for topic in topics_list:
if topic == offsets_topic:
continue
topic_path = "%s/%s" % (topics_path, topic)
response = self._rest_request_to_json(burrow_address, topic_path)
tags = ["topic:%s" % topic, "cluster:%s" % cluster] + extra_tags
self._submit_offsets_from_json(offsets_type="topic", json=response, tags=tags)
offsets_list = []
for offset in response["offsets"]:
offsets_list.append({"offsets": [{"offset": offset, "timestamp": 1}]})
self._submit_offsets_from_json(offsets_type="topic", offsets_list=offsets_list, tags=tags)

def _consumer_groups_offsets(self, clusters, burrow_address, extra_tags):
"""
Expand All @@ -132,30 +136,32 @@ def _consumer_groups_offsets(self, clusters, burrow_address, extra_tags):
consumers_path = "%s/%s/consumer" % (CLUSTER_ENDPOINT, cluster)
consumers_list = self._rest_request_to_json(burrow_address, consumers_path).get("consumers", [])
for consumer in consumers_list:
topics_path = "%s/%s/topic" % (consumers_path, consumer)
topics_list = self._rest_request_to_json(burrow_address, topics_path).get("topics", [])
for topic in topics_list:
topic_path = "%s/%s" % (topics_path, topic)
response = self._rest_request_to_json(burrow_address, topic_path)
if not response:
continue
topics_path = "%s/%s" % (consumers_path, consumer)
topics = self._rest_request_to_json(burrow_address, topics_path).get("topics", [])
for topic in topics:
tags = ["topic:%s" % topic, "cluster:%s" % cluster,
"consumer:%s" % consumer] + extra_tags
self._submit_offsets_from_json(offsets_type="consumer", json=response, tags=tags)
self._submit_offsets_from_json(offsets_type="consumer", offsets_list=topics[topic], tags=tags)

def _submit_offsets_from_json(self, offsets_type, json, tags):
def _submit_offsets_from_json(self, offsets_type, offsets_list, tags):
"""
Find the offsets and push them into the metrics
"""
offsets = json.get("offsets")
if offsets:
# for unconsumed or empty partitions, change an offset of -1 to 0 so the
# sum isn't affected by the number of empty partitions.
offsets = [max(offset, 0) for offset in offsets]
self.gauge("kafka.%s.offsets.total" % offsets_type, sum(offsets), tags=tags)
for partition_number, offset in enumerate(offsets):
new_tags = tags + ["partition:%s" % partition_number]
self.gauge("kafka.%s.offsets" % offsets_type, offset, tags=new_tags)
# for unconsumed or empty partitions, change an offset of -1 to 0 so the
# sum isn't affected by the number of empty partitions.
offsets = []
for partition_number, offset_info in enumerate(offsets_list):
new_tags = tags + ["partition:%s" % partition_number]
latest_offset = None
offset_timestamp = 0
for offset in offset_info["offsets"]:
if not offset:
continue
if offset["timestamp"] > offset_timestamp:
latest_offset = offset["offset"]
Copy link

@jrpilat jrpilat Apr 22, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Below this line:
offsets += [latest_offset]

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hi @jrpilat , unfortunately we're still on Kafka v2. Do you mind sharing an example json body what this looks like in v3 to help us confirm if this would break v2 or not.. Thanks!

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@lenfree is this what you need?

curl -s http://localhost:8000/v3/kafka/default/consumer/test | jq .
{
  "error": false,
  "message": "consumer detail returned",
  "topics": {
    "Maker": [
      {
        "offsets": [
          {
            "offset": 247136,
            "timestamp": 1571187859717,
            "lag": 432
          },
          {
            "offset": 247175,
            "timestamp": 1571187860801,
            "lag": 393
          },
          {
            "offset": 247203,
            "timestamp": 1571187861806,
            "lag": 365
          },
          {
            "offset": 247254,
            "timestamp": 1571187862816,
            "lag": 314
          },
          {
            "offset": 247285,
            "timestamp": 1571187863866,
            "lag": 283
          },
          {
            "offset": 247323,
            "timestamp": 1571187865134,
            "lag": 245
          },
          {
            "offset": 247378,
            "timestamp": 1571187866144,
            "lag": 190
          },
          {
            "offset": 247438,
            "timestamp": 1571187867176,
            "lag": 130
          },
          {
            "offset": 247462,
            "timestamp": 1571187868263,
            "lag": 106
          },
          {
            "offset": 247492,
            "timestamp": 1571187869406,
            "lag": 76
          },
          {
            "offset": 247504,
            "timestamp": 1571187871205,
            "lag": 64
          },
          {
            "offset": 247516,
            "timestamp": 1571187872322,
            "lag": 52
          },
          {
            "offset": 247556,
            "timestamp": 1571187873389,
            "lag": 12
          },
          {
            "offset": 247566,
            "timestamp": 1571187875598,
            "lag": 2
          },
          {
            "offset": 247568,
            "timestamp": 1571187978098,
            "lag": 0
          }
        ],
        "owner": "",
        "client_id": "",
        "current-lag": 0
      }
    ]
  },
  "request": {
    "url": "/v3/kafka/default/consumer/test",
    "host": "757320aea5d7"
  }
}

self.gauge("kafka.%s.offsets" % offsets_type, latest_offset, tags=new_tags)
offsets = [max(offset, 0) for offset in offsets]
self.gauge("kafka.%s.offsets.total" % offsets_type, sum(offsets), tags=tags)

def _find_clusters(self, address, target):
"""
Expand Down