From 014e09a5a559ddaa6bfea68f1004e958f0ff9d1b Mon Sep 17 00:00:00 2001 From: opacam Date: Thu, 13 Sep 2018 14:58:42 +0200 Subject: [PATCH] Fix pylint E1101 for youtube modules Despite the fixed pylint errors, this backend will not work for now...but at least will allow to pass the pylint errors. The problems of this modules are beyond the actual source code because google released a new api which is not implemented here and the old one has been shutdown. References: #11 --- coherence/backends/youtube_storage.py | 3 ++- coherence/extern/youtubedl/youtubedl.py | 12 ++++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/coherence/backends/youtube_storage.py b/coherence/backends/youtube_storage.py index 9c93268a..ef0b384f 100644 --- a/coherence/backends/youtube_storage.py +++ b/coherence/backends/youtube_storage.py @@ -48,6 +48,7 @@ def __init__(self, uri, id, self.cache_maxsize = int(cache_maxsize) self.buffer_size = int(buffer_size) self.downloader = None + self.connection = None self.video_url = None # the url we get from the youtube page self.stream_url = None # the real video stream, cached somewhere @@ -62,7 +63,7 @@ def __init__(self, uri, id, def requestFinished(self, result): """ self.connection is set in utils.ReverseProxyResource.render """ self.info("ProxyStream requestFinished: %s", result) - if hasattr(self, 'connection'): + if self.connection is not None: self.connection.transport.loseConnection() def render(self, request): diff --git a/coherence/extern/youtubedl/youtubedl.py b/coherence/extern/youtubedl/youtubedl.py index 9faefd57..33141bc2 100644 --- a/coherence/extern/youtubedl/youtubedl.py +++ b/coherence/extern/youtubedl/youtubedl.py @@ -17,6 +17,7 @@ import sys import time import urllib +import urllib.error from coherence.upnp.core.utils import getPage @@ -336,6 +337,11 @@ def report_finish(self): """Report download finished.""" self.to_stdout('') + def _do_download(self, *args): + raise NotImplementedError('Error: the _do_download method was removed' + 'at some point of the project, operation' + 'cancelled') + def process_info(self, info_dict): """Process a single dictionary returned by an InfoExtractor.""" # Do nothing else if in simulate mode @@ -373,6 +379,8 @@ def process_info(self, info_dict): return try: + # Fixme: This should be reimplemented, probably that the best shoot + # will be to use an external python package to deal with that success = self._do_download(filename, info_dict['url']) except (urllib.error.URLError, http.client.HTTPException, @@ -422,7 +430,7 @@ def download(self, url_list): def get_real_urls(self, url_list): """Download a given list of URLs.""" if len(url_list) > 1 and self.fixed_template(): - raise SameFileError(self._params['outtmpl']) + raise SameFileError(self.params['outtmpl']) for url in url_list: suitable_found = False @@ -438,7 +446,7 @@ def got_all_results(all_results): ret_code = self.trouble() if len(results) > 1 and self.fixed_template(): - raise SameFileError(self._params['outtmpl']) + raise SameFileError(self.params['outtmpl']) real_urls = [] for result in results: