Skip to content

Commit

Permalink
Switch http libraries used in checks to requests
Browse files Browse the repository at this point in the history
Fixes

* Fix #1196
* Fix #1398

Along with the self contained agent (pyopenssl etc) it will support SNI.

See https://github.com/kennethreitz/requests/issues/749 for more
information
  • Loading branch information
remh committed Feb 26, 2015
1 parent 243bf8b commit 9ac5989
Show file tree
Hide file tree
Showing 14 changed files with 135 additions and 140 deletions.
18 changes: 11 additions & 7 deletions checks.d/apache.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
# stdlib
import urllib2
import urlparse

# project
from util import headers
from checks import AgentCheck
from checks.utils import add_basic_auth

# 3rd party
import requests

class Apache(AgentCheck):
"""Tracks basic connection/requests/workers metrics
Expand Down Expand Up @@ -37,9 +38,10 @@ def check(self, instance):
url = self.assumed_url.get(instance['apache_status_url'], instance['apache_status_url'])

tags = instance.get('tags', [])
req = urllib2.Request(url, None, headers(self.agentConfig))

auth = None
if 'apache_user' in instance and 'apache_password' in instance:
add_basic_auth(req, instance['apache_user'], instance['apache_password'])
auth = (instance['apache_user'], instance['apache_password'])

# Submit a service check for status page availability.
parsed_url = urlparse.urlparse(url)
Expand All @@ -48,7 +50,9 @@ def check(self, instance):
service_check_name = 'apache.can_connect'
service_check_tags = ['host:%s' % apache_host, 'port:%s' % apache_port]
try:
request = urllib2.urlopen(req)
r = requests.get(url, auth=auth, headers=headers(self.agentConfig))
r.raise_for_status()

except Exception:
self.service_check(service_check_name, AgentCheck.CRITICAL,
tags=service_check_tags)
Expand All @@ -57,10 +61,10 @@ def check(self, instance):
self.service_check(service_check_name, AgentCheck.OK,
tags=service_check_tags)

response = request.read()
response = r.content
metric_count = 0
# Loop through and extract the numerical values
for line in response.split('\n'):
for line in response.splitlines():
values = line.split(': ')
if len(values) == 2: # match
metric, value = values
Expand Down
21 changes: 9 additions & 12 deletions checks.d/couch.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
# stdlib
import urllib2

# project
from util import headers
from checks.utils import add_basic_auth
from checks import AgentCheck

# 3rd party
import simplejson as json
import requests

class CouchDb(AgentCheck):
"""Extracts stats from CouchDB via its REST API
Expand Down Expand Up @@ -37,15 +34,15 @@ def _create_metric(self, data, tags=None):
def _get_stats(self, url, instance):
"Hit a given URL and return the parsed json"
self.log.debug('Fetching Couchdb stats at url: %s' % url)
req = urllib2.Request(url, None, headers(self.agentConfig))

auth = None
if 'user' in instance and 'password' in instance:
add_basic_auth(req, instance['user'], instance['password'])
auth = (instance['user'], instance['password'])

# Do the request, log any errors
request = urllib2.urlopen(req)
response = request.read()
return json.loads(response)
r = requests.get(url, auth=auth, headers=headers(self.agentConfig),
timeout=10)
r.raise_for_status()
return r.json()

def check(self, instance):
server = instance.get('server', None)
Expand All @@ -67,9 +64,9 @@ def get_data(self, server, instance):
service_check_tags = ['instance:%s' % server]
try:
overall_stats = self._get_stats(url, instance)
except urllib2.URLError as e:
except requests.exceptions.HTTPError as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=str(e.reason))
tags=service_check_tags, message=str(e.message))
raise
except Exception as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
Expand Down
27 changes: 13 additions & 14 deletions checks.d/couchbase.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,14 @@
# stdlib
import urllib2
import re
import sys

# exceptions
from urllib2 import HTTPError

# project
from util import headers
from checks import AgentCheck
from checks.utils import add_basic_auth

# 3rd party
import simplejson as json
import requests

#Constants
COUCHBASE_STATS_PATH = '/pools/default'
Expand Down Expand Up @@ -51,14 +47,17 @@ def _create_metrics(self, data, tags=None):
def _get_stats(self, url, instance):
""" Hit a given URL and return the parsed json. """
self.log.debug('Fetching Couchbase stats at url: %s' % url)
req = urllib2.Request(url, None, headers(self.agentConfig))
if 'user' in instance and 'password' in instance:
add_basic_auth(req, instance['user'], instance['password'])

timeout = float(instance.get('timeout', DEFAULT_TIMEOUT))
request = urllib2.urlopen(req, timeout=timeout)
response = request.read()
return json.loads(response)

auth = None
if 'user' in instance and 'password' in instance:
auth = (instance['user'], instance['password'])

r = requests.get(url, auth=auth, headers=headers(self.agentConfig),
timeout=timeout)
r.raise_for_status()
return r.json()

def check(self, instance):
server = instance.get('server', None)
Expand Down Expand Up @@ -92,9 +91,9 @@ def get_data(self, server, instance):
# No overall stats? bail out now
if overall_stats is None:
raise Exception("No data returned from couchbase endpoint: %s" % url)
except urllib2.URLError as e:
except requests.exceptions.HTTPError as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=str(e.reason))
tags=service_check_tags, message=str(e.message))
raise
except Exception as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
Expand Down Expand Up @@ -128,7 +127,7 @@ def get_data(self, server, instance):

try:
bucket_stats = self._get_stats(url, instance)
except HTTPError:
except requests.exceptions.HTTPError:
url_backup = '%s/pools/nodes/buckets/%s/stats' % (server, bucket_name)
bucket_stats = self._get_stats(url_backup, instance)

Expand Down
8 changes: 4 additions & 4 deletions checks.d/fluentd.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
# stdlib
from collections import defaultdict
import urllib2
import urlparse

# project
Expand All @@ -9,6 +8,7 @@

# 3rd party
import simplejson as json
import requests

class Fluentd(AgentCheck):
SERVICE_CHECK_NAME = 'fluentd.is_ok'
Expand All @@ -35,9 +35,9 @@ def check(self, instance):
monitor_agent_port = parsed_url.port or 24220
service_check_tags = ['fluentd_host:%s' % monitor_agent_host, 'fluentd_port:%s' % monitor_agent_port]

req = urllib2.Request(url, None, headers(self.agentConfig))
res = urllib2.urlopen(req).read()
status = json.loads(res)
r = requests.get(url, headers=headers(self.agentConfig))
r.raise_for_status()
status = r.json()

for p in status['plugins']:
for m in self.GAUGES:
Expand Down
20 changes: 9 additions & 11 deletions checks.d/haproxy.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
# stdlib
import urllib2
import time
from collections import defaultdict

# project
from checks import AgentCheck
from util import headers

# 3rd party
import requests

STATS_URL = "/;csv;norefresh"
EVENT_TYPE = SOURCE_TYPE_NAME = 'haproxy'

Expand Down Expand Up @@ -81,20 +83,16 @@ def _fetch_data(self, url, username, password):
''' Hit a given URL and return the parsed json '''
# Try to fetch data from the stats URL

passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, url, username, password)
authhandler = urllib2.HTTPBasicAuthHandler(passman)
opener = urllib2.build_opener(authhandler)
urllib2.install_opener(opener)
auth = (username, password)
url = "%s%s" % (url, STATS_URL)

self.log.debug("HAProxy Fetching haproxy search data from: %s" % url)

req = urllib2.Request(url, None, headers(self.agentConfig))
request = urllib2.urlopen(req)
response = request.read()
# Split the data by line
return response.split('\n')
r = requests.get(url, auth=auth, headers=headers(self.agentConfig))
r.raise_for_status()

return r.content.splitlines()


def _process_data(
self, data, collect_aggregates_only, process_events, url=None,
Expand Down
41 changes: 24 additions & 17 deletions checks.d/http_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
from urlparse import urlparse

# 3rd party
from httplib2 import Http, HttpLib2Error
import tornado
import requests

# project
from checks.network_checks import NetworkCheck, Status, EventType
Expand Down Expand Up @@ -72,10 +72,15 @@ def _check(self, instance):
self.log.debug("Connecting to %s" % addr)
if disable_ssl_validation and urlparse(addr)[0] == "https":
self.warning("Skipping SSL certificate validation for %s based on configuration" % addr)
h = Http(timeout=timeout, disable_ssl_certificate_validation=disable_ssl_validation)

auth = None
if username is not None and password is not None:
h.add_credentials(username, password)
resp, content = h.request(addr, "GET", headers=headers)
auth = (username, password)

r = requests.get(addr, auth=auth,timeout=timeout, headers=headers,
verify=not disable_ssl_validation)
r.raise_for_status()


except socket.timeout, e:
length = int((time.time() - start) * 1000)
Expand All @@ -86,7 +91,17 @@ def _check(self, instance):
"%s. Connection failed after %s ms" % (str(e), length)
))

except HttpLib2Error, e:
except requests.exceptions.HTTPError, r:
length = int((time.time() - start) * 1000)
self.log.info("%s is DOWN, error code: %s" % (addr, str(r.status_code)))

content = r.content if include_content else ''

service_checks.append((
self.SC_STATUS, Status.DOWN, (r.status_code, r.reason, content or '')
))

except requests.exceptions.ConnectionError, e:
length = int((time.time() - start) * 1000)
self.log.info("%s is DOWN, error: %s. Connection failed after %s ms" % (addr, str(e), length))
service_checks.append((
Expand Down Expand Up @@ -119,18 +134,10 @@ def _check(self, instance):
self.gauge('network.http.response_time', running_time, tags=tags_list)

if not service_checks:
if resp is not None and int(resp.status) >= 400:
self.log.info("%s is DOWN, error code: %s" % (addr, str(resp.status)))
if not include_content:
content = ''
service_checks.append((
self.SC_STATUS, Status.DOWN, (resp.status, resp.reason, content or '')
))
else:
self.log.debug("%s is UP" % addr)
service_checks.append((
self.SC_STATUS, Status.UP, "UP"
))
self.log.debug("%s is UP" % addr)
service_checks.append((
self.SC_STATUS, Status.UP, "UP"
))

if ssl_expire and urlparse(addr)[0] == "https":
status, msg = self.check_cert_expiration(instance)
Expand Down
18 changes: 11 additions & 7 deletions checks.d/kyototycoon.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
# stdlib
import re
import urllib2
from collections import defaultdict

# project
from checks import AgentCheck

# 3rd party
import requests

db_stats = re.compile(r'^db_(\d)+$')
whitespace = re.compile(r'\s')

Expand Down Expand Up @@ -61,11 +63,13 @@ def check(self, instance):
if name is not None:
service_check_tags.append('instance:%s' % name)


try:
response = urllib2.urlopen(url)
except urllib2.URLError as e:
r = requests.get(url)
r.raise_for_status()
except requests.exceptions.HTTPError as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
tags=service_check_tags, message=str(e.reason))
tags=service_check_tags, message=str(e.message))
raise
except Exception as e:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
Expand All @@ -74,10 +78,10 @@ def check(self, instance):
else:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK)

body = response.read()
body = r.content

totals = defaultdict(lambda: 0)
for line in body.split('\n'):
totals = defaultdict(int)
for line in body.splitlines():
if '\t' not in line:
continue

Expand Down
Loading

0 comments on commit 9ac5989

Please sign in to comment.