diff --git a/CHANGELOG.md b/CHANGELOG.md index a4233b72e2..31d8786448 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,16 @@ ## Unreleased -#### Fixes - #### New Features #### Improvements - Converted /config/postProcessing to a Vue component ([#4259](https://github.com/pymedusa/Medusa/pull/4259)) +#### Fixes +- Fixed error when changing episode status from episode status management ([#4783](https://github.com/pymedusa/Medusa/pull/4783)) +- Fixed multi-episode snatches not being marked as snatched in history ([#229](https://github.com/pymedusa/Medusa/issues/229)) +- Fixed whole seasons being downloaded as multi-episode replacement ([#4750](https://github.com/pymedusa/Medusa/issues/4750)) + + ----- ## 0.2.8 (2018-07-28) diff --git a/medusa/classes.py b/medusa/classes.py index c4407c7b93..8637084838 100644 --- a/medusa/classes.py +++ b/medusa/classes.py @@ -211,8 +211,11 @@ def add_result_to_cache(self, cache): def create_episode_object(self): """Use this result to create an episode segment out of it.""" - if self.actual_season and self.actual_episodes and self.series: - self.episodes = [self.series.get_episode(self.actual_season, ep) for ep in self.actual_episodes] + if self.actual_season and self.series: + if self.actual_episodes: + self.episodes = [self.series.get_episode(self.actual_season, ep) for ep in self.actual_episodes] + else: + self.episodes = self.series.get_all_episodes(self.actual_season) return self.episodes def finish_search_result(self, provider): diff --git a/medusa/providers/generic_provider.py b/medusa/providers/generic_provider.py index dc2431f55e..5c3cf21431 100644 --- a/medusa/providers/generic_provider.py +++ b/medusa/providers/generic_provider.py @@ -246,21 +246,22 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False results = {} items_list = [] + season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly' for episode in episodes: if not manual_search: - cache_result = self.cache.search_cache(episode, forced_search=forced_search, - down_cur_quality=download_current_quality) - if cache_result: - if episode.episode not in results: - results[episode.episode] = cache_result - else: - results[episode.episode].extend(cache_result) - + cache_results = self.cache.find_needed_episodes( + episode, forced_search=forced_search, down_cur_quality=download_current_quality + ) + if cache_results: + for episode_no in cache_results: + if episode_no not in results: + results[episode_no] = cache_results[episode_no] + else: + results[episode_no] += cache_results[episode_no] continue search_strings = [] - season_search = (len(episodes) > 1 or manual_search_type == 'season') and search_mode == 'sponly' if season_search: search_strings = self._get_season_search_strings(episode) elif search_mode == 'eponly': @@ -272,13 +273,11 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False search_string, ep_obj=episode, manual_search=manual_search ) - # In season search, we can't loop in episodes lists as we only need one episode to get the season string + # In season search, we can't loop in episodes lists as we + # only need one episode to get the season string if search_mode == 'sponly': break - if len(results) == len(episodes): - return results - # Remove duplicate items unique_items = self.remove_duplicate_mappings(items_list) log.debug('Found {0} unique items', len(unique_items)) @@ -302,8 +301,6 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False # unpack all of the quality lists into a single sorted list items_list = list(sorted_items) - cl = [] - # Move through each item and parse it into a quality search_results = [] for item in items_list: @@ -442,6 +439,7 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False search_result.actual_season = int(sql_results[0][b'season']) search_result.actual_episodes = [int(sql_results[0][b'episode'])] + cl = [] # Iterate again over the search results, and see if there is anything we want. for search_result in search_results: @@ -457,15 +455,15 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False log.debug('Found result {0} at {1}', search_result.name, search_result.url) - episode_object = search_result.create_episode_object() + search_result.create_episode_object() # result = self.get_result(episode_object, search_result) search_result.finish_search_result(self) - if not episode_object: + if not search_result.actual_episodes: episode_number = SEASON_RESULT log.debug('Found season pack result {0} at {1}', search_result.name, search_result.url) - elif len(episode_object) == 1: - episode_number = episode_object[0].episode + elif len(search_result.actual_episodes) == 1: + episode_number = search_result.actual_episode log.debug('Found single episode result {0} at {1}', search_result.name, search_result.url) else: episode_number = MULTI_EP_RESULT @@ -520,10 +518,6 @@ def make_id(name): return re.sub(r'[^\w\d_]', '_', str(name).strip().lower()) - def search_rss(self, episodes): - """Find cached needed episodes.""" - return self.cache.find_needed_episodes(episodes) - def seed_ratio(self): """Return ratio.""" return '' diff --git a/medusa/search/core.py b/medusa/search/core.py index 5f7919ee6d..a2280f733d 100644 --- a/medusa/search/core.py +++ b/medusa/search/core.py @@ -11,7 +11,6 @@ import os import threading import time -from builtins import str from medusa import ( app, @@ -476,28 +475,36 @@ def wanted_episodes(series_obj, from_date): u'reason': should_search_reason, } ) + ep_obj = series_obj.get_episode(episode[b'season'], episode[b'episode']) - ep_obj.wanted_quality = [i for i in all_qualities if i > cur_quality] + ep_obj.wanted_quality = [ + quality + for quality in all_qualities + if Quality.is_higher_quality( + cur_quality, quality, allowed_qualities, preferred_qualities + ) + ] wanted.append(ep_obj) return wanted def search_for_needed_episodes(force=False): - """ - Check providers for details on wanted episodes. + """Search providers for needed episodes. - :return: episodes we have a search hit for + :param force: run the search even if no episodes are needed + :return: list of found episodes """ - found_results = {} - show_list = app.showList from_date = datetime.date.fromordinal(1) episodes = [] for cur_show in show_list: if cur_show.paused: - log.debug(u'Not checking for needed episodes of {0} because the show is paused', cur_show.name) + log.debug( + u'Not checking for needed episodes of {0} because the show is paused', + cur_show.name, + ) continue episodes.extend(wanted_episodes(cur_show, from_date)) @@ -505,58 +512,71 @@ def search_for_needed_episodes(force=False): # nothing wanted so early out, ie: avoid whatever arbitrarily # complex thing a provider cache update entails, for example, # reading rss feeds - return list(itervalues(found_results)) - - original_thread_name = threading.currentThread().name + return [] providers = enabled_providers(u'daily') - if not providers: - log.warning(u'No NZB/Torrent providers found or enabled in the application config for daily searches.' - u' Please check your settings') - return list(itervalues(found_results)) + log.warning( + u'No NZB/Torrent providers found or enabled in the application config for daily searches.' + u' Please check your settings' + ) + return [] + original_thread_name = threading.currentThread().name log.info(u'Using daily search providers') + for cur_provider in providers: - threading.currentThread().name = u'{thread} :: [{provider}]'.format(thread=original_thread_name, - provider=cur_provider.name) + threading.currentThread().name = u'{thread} :: [{provider}]'.format( + thread=original_thread_name, provider=cur_provider.name + ) cur_provider.cache.update_cache() + single_results = {} + multi_results = [] for cur_provider in providers: - threading.currentThread().name = u'{thread} :: [{provider}]'.format(thread=original_thread_name, - provider=cur_provider.name) + threading.currentThread().name = u'{thread} :: [{provider}]'.format( + thread=original_thread_name, provider=cur_provider.name + ) try: - cur_found_results = cur_provider.search_rss(episodes) + found_results = cur_provider.cache.find_needed_episodes(episodes) except AuthException as error: log.error(u'Authentication error: {0}', ex(error)) continue # pick a single result for each episode, respecting existing results - for cur_ep in cur_found_results: - if not cur_ep.series or cur_ep.series.paused: - log.debug(u'Skipping {0} because the show is paused ', cur_ep.pretty_name()) + for episode_no, results in iteritems(found_results): + if results[0].series.paused: + log.debug(u'Skipping {0} because the show is paused.', results[0].series.name) continue # if all results were rejected move on to the next episode - wanted_results = filter_results(cur_found_results[cur_ep]) + wanted_results = filter_results(results) if not wanted_results: - log.debug(u'All found results for {0} were rejected.', cur_ep.pretty_name()) + log.debug(u'All found results for {0} were rejected.', results[0].series.name) continue best_result = pick_result(wanted_results) - # if it's already in the list (from another provider) and the newly found quality is no better then skip it - if cur_ep in found_results and best_result.quality <= found_results[cur_ep].quality: - continue - # Skip the result if search delay is enabled for the provider. if delay_search(best_result): continue - found_results[cur_ep] = best_result + if episode_no in (SEASON_RESULT, MULTI_EP_RESULT): + multi_results.append(best_result) + else: + # if it's already in the list (from another provider) and + # the newly found quality is no better then skip it + if episode_no in single_results: + allowed_qualities, preferred_qualities = results[0].series.current_qualities + if not Quality.is_higher_quality(single_results[episode_no].quality, + best_result.quality, allowed_qualities, + preferred_qualities): + continue + + single_results[episode_no] = best_result threading.currentThread().name = original_thread_name - return list(itervalues(found_results)) + return combine_results(multi_results, list(itervalues(single_results))) def delay_search(best_result): @@ -805,47 +825,22 @@ def collect_multi_candidates(candidates, series_obj, episodes, down_cur_quality) if not wanted_candidates: return multi_candidates, single_candidates - searched_seasons = {str(x.season) for x in episodes} - main_db_con = db.DBConnection() - selection = main_db_con.select( - 'SELECT episode ' - 'FROM tv_episodes ' - 'WHERE indexer = ?' - ' AND showid = ?' - ' AND ( season IN ( {0} ) )'.format(','.join(searched_seasons)), - [series_obj.indexer, series_obj.series_id] - ) - all_eps = [int(x[b'episode']) for x in selection] - log.debug(u'Episodes list: {0}', all_eps) - for candidate in wanted_candidates: - season_quality = candidate.quality - - all_wanted = True - any_wanted = False - for cur_ep_num in all_eps: - for season in {x.season for x in episodes}: - if not series_obj.want_episode(season, cur_ep_num, season_quality, - down_cur_quality): - all_wanted = False - else: - any_wanted = True + wanted_episodes = ( + series_obj.want_episode(ep_obj.season, ep_obj.episode, candidate.quality, down_cur_quality) + for ep_obj in candidate.episodes + ) - if all_wanted: + if all(wanted_episodes): log.info(u'All episodes in this season are needed, adding {0} {1}', candidate.provider.provider_type, candidate.name) - ep_objs = [] - for cur_ep_num in all_eps: - for season in {x.season for x in episodes}: - ep_objs.append(series_obj.get_episode(season, cur_ep_num)) - candidate.episodes = ep_objs # Skip the result if search delay is enabled for the provider if not delay_search(candidate): multi_candidates.append(candidate) - elif not any_wanted: + elif not any(wanted_episodes): log.debug(u'No episodes in this season are needed at this quality, ignoring {0} {1}', candidate.provider.provider_type, candidate.name) @@ -866,18 +861,6 @@ def collect_multi_candidates(candidates, series_obj, episodes, down_cur_quality) elif len(cur_result.episodes) > 1: multi_candidates.append(cur_result) - # If this is a torrent all we can do is get the entire torrent, - # user will have to select which eps not do download in his torrent client - else: - log.info(u'Adding multi-episode result for full-season torrent.' - u' Undesired episodes can be skipped in the torrent client if desired!') - ep_objs = [] - for cur_ep_num in all_eps: - for season in {x.season for x in episodes}: - ep_objs.append(series_obj.get_episode(season, cur_ep_num)) - candidate.episodes = ep_objs - multi_candidates.append(candidate) - return multi_candidates, single_candidates diff --git a/medusa/tv/cache.py b/medusa/tv/cache.py index 1ba7fb576c..c63e56121c 100644 --- a/medusa/tv/cache.py +++ b/medusa/tv/cache.py @@ -15,6 +15,10 @@ app, db, ) +from medusa.common import ( + MULTI_EP_RESULT, + SEASON_RESULT, +) from medusa.helper.common import episode_num from medusa.helper.exceptions import AuthException from medusa.logger.adapters.style import BraceAdapter @@ -413,7 +417,7 @@ def add_cache_entry(self, name, url, seeders, leechers, size, pubdate, parsed_re proper_tags = '|'.join(parse_result.proper_tags) if not self.item_in_cache(url): - log.debug('Added RSS item: {0} to cache: {1} with url {2}', name, self.provider_id, url) + log.debug('Added item: {0} to cache: {1} with url {2}', name, self.provider_id, url) return [ b'INSERT INTO [{name}] ' b' (name, season, episodes, indexerid, url, time, quality, ' @@ -427,7 +431,7 @@ def add_cache_entry(self, name, url, seeders, leechers, size, pubdate, parsed_re seeders, leechers, size, pubdate, proper_tags, cur_timestamp, parse_result.series.indexer] ] else: - log.debug('Updating RSS item: {0} to cache: {1}', name, self.provider_id) + log.debug('Updating item: {0} to cache: {1}', name, self.provider_id) return [ b'UPDATE [{name}] ' b'SET name=?, season=?, episodes=?, indexer=?, indexerid=?, ' @@ -441,13 +445,6 @@ def add_cache_entry(self, name, url, seeders, leechers, size, pubdate, parsed_re seeders, leechers, size, pubdate, proper_tags, url] ] - def search_cache(self, episode, forced_search=False, - down_cur_quality=False): - """Search cache for needed episodes.""" - needed_eps = self.find_needed_episodes(episode, forced_search, - down_cur_quality) - return needed_eps[episode] if episode in needed_eps else [] - def item_in_cache(self, url): """Check if the url is already available for the specific provider.""" cache_db_con = self._get_db() @@ -475,7 +472,7 @@ def find_needed_episodes(self, episode, forced_search=False, b' season = ? AND' b' episodes LIKE ?'.format(name=self.provider_id), [episode.series.indexer, episode.series.series_id, episode.season, - b'%|{0}|%'.format(episode.episode)] + '%|{0}|%'.format(episode.episode)] ) else: for ep_obj in episode: @@ -491,7 +488,7 @@ def find_needed_episodes(self, episode, forced_search=False, for x in ep_obj.wanted_quality)) ), [ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, - b'%|{0}|%'.format(ep_obj.episode)]] + '%|{0}|%'.format(ep_obj.episode)]] ) if results: @@ -532,35 +529,45 @@ def find_needed_episodes(self, episode, forced_search=False, log.debug('{0} is not an anime, skipping', series_obj.name) continue - # get season and ep data (ignoring multi-eps for now) - search_result.season = int(cur_result[b'season']) - if search_result.season == -1: - continue - - cur_ep = cur_result[b'episodes'].split('|')[1] - if not cur_ep: - continue - - cur_ep = int(cur_ep) - + # build a result object search_result.quality = int(cur_result[b'quality']) search_result.release_group = cur_result[b'release_group'] search_result.version = cur_result[b'version'] + search_result.name = cur_result[b'name'] + search_result.url = cur_result[b'url'] + search_result.season = int(cur_result[b'season']) + search_result.actual_season = search_result.season - # if the show says we want that episode then add it to the list - if not series_obj.want_episode(search_result.season, cur_ep, search_result.quality, - forced_search, down_cur_quality): - log.debug('Ignoring {0}', cur_result[b'name']) + sql_episodes = cur_result[b'episodes'].strip('|') + # TODO: Add support for season results + # Season result + if not sql_episodes: + ep_objs = self.series.get_all_episodes(search_result.season) + actual_episodes = [ep.episode for ep in ep_objs] + episode_number = SEASON_RESULT + # Multi or single episode result + else: + actual_episodes = [int(ep) for ep in sql_episodes.split('|')] + ep_objs = [series_obj.get_episode(search_result.season, ep) for ep in actual_episodes] + if len(actual_episodes) == 1: + episode_number = actual_episodes[0] + else: + episode_number = MULTI_EP_RESULT + + all_wanted = True + for cur_ep in actual_episodes: + # if the show says we want that episode then add it to the list + if not series_obj.want_episode(search_result.season, cur_ep, search_result.quality, + forced_search, down_cur_quality): + log.debug('Ignoring {0} because one or more episodes are unwanted', cur_result[b'name']) + all_wanted = False + break + + if not all_wanted: continue - search_result.episodes = [series_obj.get_episode(search_result.season, cur_ep)] - - search_result.actual_episodes = [search_result.episodes[0].episode] - search_result.actual_season = search_result.season - - # build a result object - search_result.name = cur_result[b'name'] - search_result.url = cur_result[b'url'] + search_result.episodes = ep_objs + search_result.actual_episodes = actual_episodes log.debug( '{id}: Using cached results from {provider} for series {show_name!r} episode {ep}', { @@ -582,15 +589,13 @@ def find_needed_episodes(self, episode, forced_search=False, # FIXME: Should be changed to search_result.search_type search_result.forced_search = forced_search - search_result.download_current_quality = down_cur_quality - episode_object = search_result.episodes[0] # add it to the list - if episode_object not in needed_eps: - needed_eps[episode_object] = [search_result] + if episode_number not in needed_eps: + needed_eps[episode_number] = [search_result] else: - needed_eps[episode_object].append(search_result) + needed_eps[episode_number].append(search_result) # datetime stamp this search so cache gets cleared self.searched = time()