diff --git a/agent.py b/agent.py index 531185585c..40c4433b05 100755 --- a/agent.py +++ b/agent.py @@ -21,17 +21,29 @@ import signal import sys import time -import glob # Custom modules from checks.collector import Collector from checks.check_status import CollectorStatus -from config import get_config, get_system_stats, get_parsed_args, load_check_directory, get_confd_path, check_yaml, get_logging_config -from daemon import Daemon, AgentSupervisor +from config import ( + get_confd_path, + get_config, + get_logging_config, + get_parsed_args, + get_system_stats, + load_check_directory, +) +from daemon import AgentSupervisor, Daemon from emitter import http_emitter -from util import Watchdog, PidFile, EC2, get_os, get_hostname from jmxfetch import JMXFetch - +from util import ( + EC2, + get_hostname, + get_os, + PidFile, + Watchdog, +) +from utils.flare import configcheck, Flare # Constants PID_NAME = "dd-agent" @@ -212,6 +224,7 @@ def main(): 'check', 'configcheck', 'jmx', + 'flare', ] if len(args) < 1: @@ -296,25 +309,7 @@ def parent_func(): agent.start_event = False check.stop() elif 'configcheck' == command or 'configtest' == command: - osname = get_os() - all_valid = True - for conf_path in glob.glob(os.path.join(get_confd_path(osname), "*.yaml")): - basename = os.path.basename(conf_path) - try: - check_yaml(conf_path) - except Exception, e: - all_valid = False - print "%s contains errors:\n %s" % (basename, e) - else: - print "%s is valid" % basename - if all_valid: - print "All yaml files passed. You can now run the Datadog agent." - return 0 - else: - print("Fix the invalid yaml files above in order to start the Datadog agent. " - "A useful external tool for yaml parsing can be found at " - "http://yaml-online-parser.appspot.com/") - return 1 + configcheck() elif 'jmx' == command: from jmxfetch import JMX_LIST_COMMANDS, JMXFetch @@ -342,6 +337,16 @@ def parent_func(): agent.start_event = False print "Couldn't find any valid JMX configuration in your conf.d directory: %s" % confd_directory print "Have you enabled any JMX check ?" print "If you think it's not normal please get in touch with Datadog Support" + + elif 'flare' == command: + case_id = int(args[1]) if len(args) > 1 else None + f = Flare(True, case_id) + f.collect() + try: + f.upload() + except Exception, e: + print 'The upload failed:\n{0}'.format(str(e)) + return 0 diff --git a/config.py b/config.py index 9db8d92294..15d2abef84 100644 --- a/config.py +++ b/config.py @@ -17,6 +17,7 @@ from socket import gaierror from optparse import OptionParser, Values from cStringIO import StringIO +from urlparse import urlparse # project @@ -57,6 +58,10 @@ ] DEFAULT_CHECKS = ("network", "ntp") +LEGACY_DATADOG_URLS = [ + "app.datadoghq.com", + "app.datad0g.com", +] class PathNotFound(Exception): pass @@ -93,6 +98,21 @@ def get_parsed_args(): def get_version(): return AGENT_VERSION + +# Return url endpoint, here because needs access to version number +def get_url_endpoint(default_url, endpoint_type='app'): + parsed_url = urlparse(default_url) + if parsed_url.netloc not in LEGACY_DATADOG_URLS: + return default_url + + subdomain = parsed_url.netloc.split(".")[0] + + # Replace https://app.datadoghq.com in https://5-2-0-app.agent.datadoghq.com + return default_url.replace(subdomain, + "{0}-{1}.agent".format( + get_version().replace(".", "-"), + endpoint_type)) + def skip_leading_wsp(f): "Works on a file, returns a file-like object" return StringIO("\n".join(map(string.strip, f.readlines()))) @@ -627,7 +647,9 @@ def get_proxy(agentConfig, use_system_settings=False): return None -def get_confd_path(osname): +def get_confd_path(osname=None): + if not osname: + osname = get_os() bad_path = '' if osname == 'windows': try: @@ -651,7 +673,9 @@ def get_confd_path(osname): raise PathNotFound(bad_path) -def get_checksd_path(osname): +def get_checksd_path(osname=None): + if not osname: + osname = get_os() if osname == 'windows': return _windows_checksd_path() else: diff --git a/ddagent.py b/ddagent.py index 81abba6ccd..7b2d1d10cb 100755 --- a/ddagent.py +++ b/ddagent.py @@ -28,7 +28,6 @@ from hashlib import md5 from datetime import datetime, timedelta from socket import gaierror, error as socket_error -from urlparse import urlparse # Tornado import tornado.httpserver @@ -40,7 +39,7 @@ # agent import from util import Watchdog, get_uuid, get_hostname, json, get_tornado_ioloop from emitter import http_emitter -from config import get_config, get_version +from config import get_config, get_url_endpoint, get_version from checks.check_status import ForwarderStatus from transaction import Transaction, TransactionManager import modules @@ -73,11 +72,6 @@ THROTTLING_DELAY = timedelta(microseconds=1000000/2) # 2 msg/second -LEGACY_DATADOG_URLS = [ - "app.datadoghq.com", - "app.datad0g.com", -] - class EmitterThread(threading.Thread): def __init__(self, *args, **kwargs): @@ -193,23 +187,8 @@ def __init__(self, data, headers): def __sizeof__(self): return sys.getsizeof(self._data) - @classmethod - def get_url_endpoint(cls, endpoint): - default_url = cls._application._agentConfig[endpoint] - parsed_url = urlparse(default_url) - if parsed_url.netloc not in LEGACY_DATADOG_URLS: - return default_url - - subdomain = parsed_url.netloc.split(".")[0] - - # Replace https://app.datadoghq.com in https://5-2-0-app.agent.datadoghq.com - return default_url.replace(subdomain, - "{0}-{1}.agent".format( - get_version().replace(".", "-"), - subdomain)) - def get_url(self, endpoint): - endpoint_base_url = self.get_url_endpoint(endpoint) + endpoint_base_url = get_url_endpoint(self._application._agentConfig[endpoint]) api_key = self._application._agentConfig.get('api_key') if api_key: return endpoint_base_url + '/intake?api_key=%s' % api_key @@ -285,7 +264,7 @@ def on_response(self, response): class APIMetricTransaction(MetricTransaction): def get_url(self, endpoint): - endpoint_base_url = self.get_url_endpoint(endpoint) + endpoint_base_url = get_url_endpoint(self._application._agentConfig[endpoint]) config = self._application._agentConfig api_key = config['api_key'] url = endpoint_base_url + '/api/v1/series/?api_key=' + api_key diff --git a/packaging/centos/datadog-agent.init b/packaging/centos/datadog-agent.init index 3f7b489edd..b55e1d0265 100644 --- a/packaging/centos/datadog-agent.init +++ b/packaging/centos/datadog-agent.init @@ -186,6 +186,13 @@ case "$1" in su $AGENTUSER -c "$AGENTPATH jmx $@" exit $? ;; + + flare) + shift + su $AGENTUSER -c "$AGENTPATH flare $@" + exit $? + ;; + *) echo "Usage: $0 {start|stop|restart|info|status|configcheck|configtest|jmx}" exit 2 diff --git a/packaging/datadog-agent/source/agent b/packaging/datadog-agent/source/agent index 14032836e4..ba7bee479a 100755 --- a/packaging/datadog-agent/source/agent +++ b/packaging/datadog-agent/source/agent @@ -135,6 +135,14 @@ case $action in exit $? ;; + + flare) + shift + python agent/agent.py flare $@ + exit $? + ;; + + *) echo "Usage: $0 {start|stop|restart|info|status|configcheck|check|jmx}" exit 2 diff --git a/packaging/debian/datadog-agent.init b/packaging/debian/datadog-agent.init index 5bde22a25b..3387926ec3 100644 --- a/packaging/debian/datadog-agent.init +++ b/packaging/debian/datadog-agent.init @@ -82,13 +82,13 @@ case "$1" in log_daemon_msg "Resuming starting process." fi - + log_daemon_msg "Starting $DESC (using supervisord)" "$NAME" PATH=$SYSTEM_PATH start-stop-daemon --start --quiet --oknodo --exec $SUPERVISORD_PATH -- -c $SUPERVISOR_FILE --pidfile $SUPERVISOR_PIDFILE if [ $? -ne 0 ]; then log_end_msg 1 fi - + # check if the agent is running once per second for 10 seconds retries=10 while [ $retries -gt 1 ]; do @@ -108,10 +108,10 @@ case "$1" in exit 1 ;; stop) - + log_daemon_msg "Stopping $DESC (stopping supervisord)" "$NAME" start-stop-daemon --stop --retry 30 --quiet --oknodo --pidfile $SUPERVISOR_PIDFILE - + log_end_msg $? ;; @@ -154,9 +154,15 @@ case "$1" in exit $? ;; + flare) + shift + su $AGENTUSER -c "$AGENTPATH flare $@" + exit $? + ;; + *) N=/etc/init.d/$NAME - echo "Usage: $N {start|stop|restart|info|status|configcheck|configtest|jmx}" + echo "Usage: $N {start|stop|restart|info|status|configcheck|configtest|jmx|flare}" exit 1 ;; esac diff --git a/tests/flare/datadog-agent-1.tar.bz2 b/tests/flare/datadog-agent-1.tar.bz2 new file mode 100644 index 0000000000..b56f3b974d Binary files /dev/null and b/tests/flare/datadog-agent-1.tar.bz2 differ diff --git a/tests/test_flare.py b/tests/test_flare.py new file mode 100644 index 0000000000..595c90879c --- /dev/null +++ b/tests/test_flare.py @@ -0,0 +1,108 @@ +import unittest +import os.path +import mock +from utils.flare import Flare + +def get_mocked_config(): + return { + 'api_key': 'APIKEY', + 'dd_url': 'https://app.datadoghq.com', + } + +def get_mocked_version(): + return '6.6.6' + +def get_mocked_temp(): + return os.path.join( + os.path.dirname(os.path.realpath(__file__)), + 'flare' + ) + +def mocked_strftime(t): + return '1' + +class FakeResponse(object): + def __init__(self, status_code=200): + self.status_code = status_code + self.text = '{"case_id":1337}' + + def json(self): + return {'case_id': 1337} + + def raise_for_status(self): + return None + +class FlareTest(unittest.TestCase): + + @mock.patch('utils.flare.strftime', side_effect=mocked_strftime) + @mock.patch('tempfile.gettempdir', side_effect=get_mocked_temp) + @mock.patch('config.get_version', side_effect=get_mocked_version) + @mock.patch('utils.flare.get_config', side_effect=get_mocked_config) + def test_init(self, mock_config, mock_version, mock_tempdir, mock_strftime): + f = Flare(case_id=1337) + conf = mock_config() + self.assertEqual(f._case_id, 1337) + self.assertEqual(f._api_key, conf['api_key']) + self.assertEqual(f._url, 'https://6-6-6-flare.agent.datadoghq.com/support/flare') + self.assertEqual(f._tar_path, os.path.join(get_mocked_temp(), "datadog-agent-1.tar.bz2")) + + @mock.patch('utils.flare.requests.post', return_value=FakeResponse()) + @mock.patch('config.get_version', side_effect=get_mocked_version) + @mock.patch('utils.flare.strftime', side_effect=mocked_strftime) + @mock.patch('tempfile.gettempdir', side_effect=get_mocked_temp) + @mock.patch('utils.flare.get_config', side_effect=get_mocked_config) + def test_upload_with_case(self, mock_config, mock_tempdir, mock_stfrtime, mock_version, mock_requests): + f = Flare(case_id=1337) + + assert not mock_requests.called + f.upload(confirmation=False) + assert mock_requests.called + args, kwargs = mock_requests.call_args_list[0] + self.assertEqual( + args, + ('https://6-6-6-flare.agent.datadoghq.com/support/flare/1337?api_key=APIKEY',) + ) + self.assertEqual( + kwargs['files']['flare_file'].name, + os.path.join(get_mocked_temp(), "datadog-agent-1.tar.bz2") + ) + self.assertEqual(kwargs['data']['case_id'], 1337) + self.assertEqual(kwargs['data']['email'], '') + assert kwargs['data']['hostname'] + + @mock.patch('utils.flare.requests.post', return_value=FakeResponse()) + @mock.patch('config.get_version', side_effect=get_mocked_version) + @mock.patch('utils.flare.strftime', side_effect=mocked_strftime) + @mock.patch('tempfile.gettempdir', side_effect=get_mocked_temp) + @mock.patch('utils.flare.get_config', side_effect=get_mocked_config) + def test_upload_no_case(self, mock_config, mock_tempdir, mock_stfrtime, mock_version, mock_requests): + f = Flare() + f._ask_for_email = lambda: 'test@example.com' + + assert not mock_requests.called + f.upload(confirmation=False) + assert mock_requests.called + args, kwargs = mock_requests.call_args_list[0] + self.assertEqual( + args, + ('https://6-6-6-flare.agent.datadoghq.com/support/flare?api_key=APIKEY',) + ) + self.assertEqual( + kwargs['files']['flare_file'].name, + os.path.join(get_mocked_temp(), "datadog-agent-1.tar.bz2") + ) + self.assertEqual(kwargs['data']['case_id'], None) + self.assertEqual(kwargs['data']['email'], 'test@example.com') + assert kwargs['data']['hostname'] + + @mock.patch('utils.flare.strftime', side_effect=mocked_strftime) + @mock.patch('tempfile.gettempdir', side_effect=get_mocked_temp) + @mock.patch('utils.flare.get_config', side_effect=get_mocked_config) + def test_endpoint(self, mock_config, mock_temp, mock_stfrtime): + f = Flare() + f._ask_for_email = lambda: None + try: + f.upload(confirmation=False) + raise Exception('Should fail before') + except Exception, e: + self.assertEqual(str(e), "Your request is incorrect: Invalid inputs: {'email': None}") diff --git a/util.py b/util.py index 4d210f04cf..f765234cba 100644 --- a/util.py +++ b/util.py @@ -566,6 +566,10 @@ def is_darwin(name=None): name = name or sys.platform return 'darwin' in name + @staticmethod + def is_mac(name=None): + Platform.is_darwin(name) + @staticmethod def is_freebsd(name=None): name = name or sys.platform @@ -601,6 +605,10 @@ def is_win32(name=None): name = name or sys.platform return name == "win32" + @staticmethod + def is_windows(name=None): + return Platform.is_win32(name) + """ Iterable Recipes """ diff --git a/utils/__init__.py b/utils/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/utils/flare.py b/utils/flare.py new file mode 100644 index 0000000000..90824e40ad --- /dev/null +++ b/utils/flare.py @@ -0,0 +1,347 @@ +import atexit +import glob +import logging +import os.path +import re +import subprocess +import sys +import tarfile +import tempfile +from time import strftime + +# DD imports +from checks.check_status import CollectorStatus, DogstatsdStatus, ForwarderStatus +from config import ( + check_yaml, + get_confd_path, + get_config, + get_config_path, + get_logging_config, + get_url_endpoint, +) +from util import ( + get_hostname, + Platform, +) + +# 3p +import requests + +# Globals +log = logging.getLogger('flare') + +def configcheck(): + all_valid = True + for conf_path in glob.glob(os.path.join(get_confd_path(), "*.yaml")): + basename = os.path.basename(conf_path) + try: + check_yaml(conf_path) + except Exception, e: + all_valid = False + print "%s contains errors:\n %s" % (basename, e) + else: + print "%s is valid" % basename + if all_valid: + print "All yaml files passed. You can now run the Datadog agent." + return 0 + else: + print("Fix the invalid yaml files above in order to start the Datadog agent. " + "A useful external tool for yaml parsing can be found at " + "http://yaml-online-parser.appspot.com/") + return 1 + +class Flare(object): + """ + Compress all important logs and configuration files for debug, + and then send them to Datadog (which transfers them to Support) + """ + + DATADOG_SUPPORT_URL = '/support/flare' + PASSWORD_REGEX = re.compile('( *(\w|_)*pass(word)?:).+') + COMMENT_REGEX = re.compile('^ *#.*') + APIKEY_REGEX = re.compile('^api_key: *\w+(\w{5})$') + REPLACE_APIKEY = r'api_key: *************************\1' + COMPRESSED_FILE = 'datadog-agent-{0}.tar.bz2' + # We limit to 10MB arbitrary + MAX_UPLOAD_SIZE = 10485000 + TIMEOUT = 30 + + + def __init__(self, cmdline=False, case_id=None): + self._case_id = case_id + self._cmdline = cmdline + self._init_tarfile() + self._save_logs_path() + config = get_config() + self._api_key = config.get('api_key') + self._url = "{0}{1}".format( + get_url_endpoint(config.get('dd_url'), endpoint_type='flare'), + self.DATADOG_SUPPORT_URL + ) + self._hostname = get_hostname(config) + self._prefix = "datadog-{0}".format(self._hostname) + + # Collect all conf and logs files and compress them + def collect(self): + if not self._api_key: + raise Exception('No api_key found') + log.info("Collecting logs and configuration files:") + + self._add_logs_tar() + self._add_conf_tar() + log.info(" * datadog-agent configcheck output") + self._add_command_output_tar('configcheck.log', configcheck) + log.info(" * datadog-agent status output") + self._add_command_output_tar('status.log', self._supervisor_status) + log.info(" * datadog-agent info output") + self._add_command_output_tar('info.log', self._info_all) + log.info(" * pip freeze") + self._add_command_output_tar('freeze.log', self._pip_freeze) + + log.info("Saving all files to {0}".format(self._tar_path)) + self._tar.close() + + # Upload the tar file + def upload(self, confirmation=True): + self._check_size() + + if confirmation: + self._ask_for_confirmation() + + email = self._ask_for_email() + + log.info("Uploading {0} to Datadog Support".format(self._tar_path)) + url = self._url + if self._case_id: + url = '{0}/{1}'.format(self._url, str(self._case_id)) + url = "{0}?api_key={1}".format(url, self._api_key) + files = {'flare_file': open(self._tar_path, 'rb')} + data = { + 'case_id': self._case_id, + 'hostname': self._hostname, + 'email': email + } + self._resp = requests.post(url, files=files, data=data, timeout=self.TIMEOUT) + self._analyse_result() + + # Start by creating the tar file which will contain everything + def _init_tarfile(self): + # Default temp path + self._tar_path = os.path.join( + tempfile.gettempdir(), + self.COMPRESSED_FILE.format(strftime("%Y-%m-%d-%H-%M-%S")) + ) + + if os.path.exists(self._tar_path): + os.remove(self._tar_path) + self._tar = tarfile.open(self._tar_path, 'w:bz2') + + # Save logs file paths + def _save_logs_path(self): + prefix = '' + if Platform.is_windows(): + prefix = 'windows_' + config = get_logging_config() + self._collector_log = config.get('{0}collector_log_file'.format(prefix)) + self._forwarder_log = config.get('{0}forwarder_log_file'.format(prefix)) + self._dogstatsd_log = config.get('{0}dogstatsd_log_file'.format(prefix)) + self._jmxfetch_log = config.get('jmxfetch_log_file') + + # Add logs to the tarfile + def _add_logs_tar(self): + self._add_log_file_tar(self._collector_log) + self._add_log_file_tar(self._forwarder_log) + self._add_log_file_tar(self._dogstatsd_log) + self._add_log_file_tar(self._jmxfetch_log) + self._add_log_file_tar( + "{0}/*supervisord.log*".format(os.path.dirname(self._collector_log)) + ) + + def _add_log_file_tar(self, file_path): + for f in glob.glob('{0}*'.format(file_path)): + log.info(" * {0}".format(f)) + self._tar.add( + f, + os.path.join(self._prefix, 'log', os.path.basename(f)) + ) + + # Collect all conf + def _add_conf_tar(self): + conf_path = get_config_path() + log.info(" * {0}".format(conf_path)) + self._tar.add( + self._strip_comment(conf_path), + os.path.join(self._prefix, 'etc', 'datadog.conf') + ) + + if not Platform.is_windows(): + supervisor_path = os.path.join( + os.path.dirname(get_config_path()), + 'supervisor.conf' + ) + log.info(" * {0}".format(supervisor_path)) + self._tar.add( + self._strip_comment(supervisor_path), + os.path.join(self._prefix, 'etc', 'supervisor.conf') + ) + + for file_path in glob.glob(os.path.join(get_confd_path(), '*.yaml')): + self._add_clean_confd(file_path) + + # Return path to a temp file without comment + def _strip_comment(self, file_path): + _, temp_path = tempfile.mkstemp(prefix='dd') + atexit.register(os.remove, temp_path) + with open(temp_path, 'w') as temp_file: + with open(file_path, 'r') as orig_file: + for line in orig_file.readlines(): + if not self.COMMENT_REGEX.match(line): + temp_file.write(re.sub(self.APIKEY_REGEX, self.REPLACE_APIKEY, line)) + + return temp_path + + # Remove password before collecting the file + def _add_clean_confd(self, file_path): + basename = os.path.basename(file_path) + + temp_path, password_found = self._strip_password(file_path) + log.info(" * {0}{1}".format(file_path, password_found)) + self._tar.add( + temp_path, + os.path.join(self._prefix, 'etc', 'conf.d', basename) + ) + + # Return path to a temp file without password and comment + def _strip_password(self, file_path): + _, temp_path = tempfile.mkstemp(prefix='dd') + atexit.register(os.remove, temp_path) + with open(temp_path, 'w') as temp_file: + with open(file_path, 'r') as orig_file: + password_found = '' + for line in orig_file.readlines(): + if self.PASSWORD_REGEX.match(line): + line = re.sub(self.PASSWORD_REGEX, r'\1 ********', line) + password_found = ' - this file contains a password which '\ + 'has been removed in the version collected' + if not self.COMMENT_REGEX.match(line): + temp_file.write(line) + + return temp_path, password_found + + # Add output of the command to the tarfile + def _add_command_output_tar(self, name, command): + temp_file = os.path.join(tempfile.gettempdir(), name) + if os.path.exists(temp_file): + os.remove(temp_file) + backup = sys.stdout + sys.stdout = open(temp_file, 'w') + command() + sys.stdout.close() + sys.stdout = backup + self._tar.add(temp_file, os.path.join(self._prefix, name)) + os.remove(temp_file) + + # Print supervisor status (and nothing on windows) + def _supervisor_status(self): + if Platform.is_windows(): + print 'Windows - status not implemented' + else: + agent_exec = self._get_path_agent_exec() + print '{0} status'.format(agent_exec) + self._print_output_command([agent_exec, 'status']) + supervisor_exec = self._get_path_supervisor_exec() + print '{0} status'.format(supervisor_exec) + self._print_output_command([supervisor_exec, + '-c', self._get_path_supervisor_conf(), + 'status']) + + # Find the agent exec (package or source) + def _get_path_agent_exec(self): + agent_exec = '/etc/init.d/datadog-agent' + if not os.path.isfile(agent_exec): + agent_exec = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + '../../bin/agent' + ) + return agent_exec + + # Find the supervisor exec (package or source) + def _get_path_supervisor_exec(self): + supervisor_exec = '/opt/datadog-agent/bin/supervisorctl' + if not os.path.isfile(supervisor_exec): + supervisor_exec = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + '../../venv/bin/supervisorctl' + ) + return supervisor_exec + + # Find the supervisor conf (package or source) + def _get_path_supervisor_conf(self): + supervisor_conf = '/etc/init.d/datadog-agent' + if not os.path.isfile(supervisor_conf): + supervisor_conf = os.path.join( + os.path.dirname(os.path.realpath(__file__)), + '../../supervisord/supervisord.conf' + ) + return supervisor_conf + + # Print output of command + def _print_output_command(self, command): + try: + status = subprocess.check_output(command, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError, e: + status = 'Not able to get ouput, exit number {0}, exit ouput:\n'\ + '{1}'.format(str(e.returncode), e.output) + print status + + # Print info of all agent components + def _info_all(self): + CollectorStatus.print_latest_status(verbose=True) + DogstatsdStatus.print_latest_status(verbose=True) + ForwarderStatus.print_latest_status(verbose=True) + + # Run a pip freeze + def _pip_freeze(self): + try: + import pip + pip.main(['freeze']) + except ImportError: + print 'Unable to import pip' + + # Check if the file is not too big before upload + def _check_size(self): + if os.path.getsize(self._tar_path) > self.MAX_UPLOAD_SIZE: + log.info('{0} won\'t be uploaded, its size is too important.\n'\ + 'You can send it directly to support by mail.') + sys.exit(1) + + # Function to ask for confirmation before upload + def _ask_for_confirmation(self): + print '{0} is going to be uploaded to Datadog.'.format(self._tar_path) + choice = raw_input('Do you want to continue [Y/n]? ').lower() + if choice not in ['yes', 'y', '']: + print 'Aborting (you can still use {0})'.format(self._tar_path) + sys.exit(1) + + # Ask for email if needed + def _ask_for_email(self): + if self._case_id: + return '' + return raw_input('Please enter your email: ').lower() + + # Print output (success/error) of the request + def _analyse_result(self): + # First catch our custom explicit 400 + if self._resp.status_code == 400: + raise Exception('Your request is incorrect: {0}'.format(self._resp.json()['error'])) + # Then raise potential 500 and 404 + self._resp.raise_for_status() + try: + json_resp = self._resp.json() + # Failed parsing + except ValueError, e: + raise Exception('An unknown error has occured - '\ + 'Please contact support by email') + # Finally, correct + log.info("Your logs were successfully uploaded. For future reference,"\ + " your internal case id is {0}".format(json_resp['case_id']))