diff --git a/medusa/__main__.py b/medusa/__main__.py
index 1a43ccb285..e3ba7ad89e 100755
--- a/medusa/__main__.py
+++ b/medusa/__main__.py
@@ -1201,6 +1201,7 @@ def initialize(self, console_logging=True):
# initialize the recommended shows database
recommended_db_con = db.DBConnection('recommended.db')
db.upgradeDatabase(recommended_db_con, recommended_db.InitialSchema)
+ db.sanityCheckDatabase(recommended_db_con, recommended_db.RecommendedSanityCheck)
# Performs a vacuum on cache.db
logger.debug(u'Performing a vacuum on the CACHE database')
diff --git a/medusa/classes.py b/medusa/classes.py
index 1044a155ec..d179749701 100644
--- a/medusa/classes.py
+++ b/medusa/classes.py
@@ -23,17 +23,21 @@
from dateutil import parser
+
from medusa import app, ws
from medusa.common import (
MULTI_EP_RESULT,
Quality,
SEASON_RESULT,
)
+from medusa.helper.common import sanitize_filename
from medusa.logger.adapters.style import BraceAdapter
from medusa.search import SearchType
from six import itervalues
+from trans import trans
+
log = BraceAdapter(logging.getLogger(__name__))
log.logger.addHandler(logging.NullHandler())
@@ -365,6 +369,22 @@ def select_series(self, all_series):
search_results = []
series_names = []
+ def searchterm_in_result(search_term, search_result):
+ norm_search_term = sanitize_filename(search_term.lower())
+ norm_result = sanitize_filename(search_result.lower())
+
+ if norm_search_term in norm_result:
+ return True
+
+ # translates national characters into similar sounding latin characters
+ # For ex. Физрук -> Fizruk
+ search_term_alpha = trans(self.config['searchterm'])
+
+ if search_term_alpha != search_term and search_term_alpha in norm_result:
+ return True
+
+ return False
+
# get all available shows
if all_series:
if 'searchterm' in self.config:
@@ -382,8 +402,11 @@ def select_series(self, all_series):
if search_term.isdigit():
series_names.append(search_term)
+ if search_term.startswith('tt'):
+ series_names.append(search_term)
+
for name in series_names:
- if search_term.lower() in name.lower():
+ if searchterm_in_result(search_term, name):
if 'firstaired' not in cur_show:
default_date = parser.parse('1900-01-01').date()
cur_show['firstaired'] = default_date.strftime(dateFormat)
diff --git a/medusa/databases/cache_db.py b/medusa/databases/cache_db.py
index 37660834ca..00972be837 100644
--- a/medusa/databases/cache_db.py
+++ b/medusa/databases/cache_db.py
@@ -16,10 +16,14 @@
# Add new migrations at the bottom of the list
# and subclass the previous migration.
class InitialSchema(db.SchemaUpgrade):
+ """Cache.db initial schema class."""
+
def test(self):
+ """Test db version."""
return self.hasTable('db_version')
def execute(self):
+ """Execute."""
queries = [
('CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);',),
('CREATE TABLE lastSearch (provider TEXT, time NUMERIC);',),
@@ -229,3 +233,19 @@ def test(self):
def execute(self):
self.connection.action('DROP TABLE IF EXISTS scene_exceptions;')
self.inc_major_version()
+
+
+class AddSeasonUpdatesTable(RemoveSceneExceptionsTable): # pylint:disable=too-many-ancestors
+ def test(self):
+ return self.hasTable('season_updates')
+
+ def execute(self):
+ self.connection.action(
+ """CREATE TABLE "season_updates" (
+ `season_updates_id` INTEGER,
+ `indexer` INTEGER NOT NULL,
+ `series_id` INTEGER NOT NULL,
+ `season` INTEGER,
+ `time` INTEGER,
+ PRIMARY KEY(season_updates_id))"""
+ )
diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py
index 84342d1fb9..be3354fda9 100644
--- a/medusa/databases/main_db.py
+++ b/medusa/databases/main_db.py
@@ -36,6 +36,7 @@ def check(self):
self.fix_show_nfo_lang()
self.fix_subtitle_reference()
self.clean_null_indexer_mappings()
+ self.clean_imdb_tt_ids()
def clean_null_indexer_mappings(self):
log.debug(u'Checking for null indexer mappings')
@@ -219,6 +220,13 @@ def fix_subtitles_codes(self):
def fix_show_nfo_lang(self):
self.connection.action("UPDATE tv_shows SET lang = '' WHERE lang = 0 OR lang = '0';")
+ def clean_imdb_tt_ids(self):
+ # Get all records with 'tt'
+ log.debug(u'Cleaning indexer_mapping table, removing references to same indexer')
+ self.connection.action('DELETE from indexer_mapping WHERE indexer = mindexer')
+ log.debug(u'Cleaning indexer_mapping table from tt indexer ids')
+ self.connection.action("DELETE FROM indexer_mapping where indexer_id like '%tt%' or mindexer_id like '%tt%'")
+
# ======================
# = Main DB Migrations =
diff --git a/medusa/databases/recommended_db.py b/medusa/databases/recommended_db.py
index 1c17530424..d0d33b887f 100644
--- a/medusa/databases/recommended_db.py
+++ b/medusa/databases/recommended_db.py
@@ -12,6 +12,19 @@
log.logger.addHandler(logging.NullHandler())
+class RecommendedSanityCheck(db.DBSanityCheck):
+ """Sanity check class."""
+
+ def check(self):
+ """Check functions."""
+ self.remove_imdb_tt()
+
+ def remove_imdb_tt(self):
+ """Remove tt from imdb id's."""
+ log.debug(u'Remove shows added with an incorrect imdb id.')
+ self.connection.action("DELETE FROM shows WHERE source = 10 AND series_id like '%tt%'")
+
+
# Add new migrations at the bottom of the list
# and subclass the previous migration.
class InitialSchema(db.SchemaUpgrade):
diff --git a/medusa/helpers/trakt.py b/medusa/helpers/trakt.py
index 9bd4b22491..64734192b3 100644
--- a/medusa/helpers/trakt.py
+++ b/medusa/helpers/trakt.py
@@ -3,6 +3,7 @@
import logging
from medusa.helpers import get_title_without_year
+from medusa.indexers.imdb.api import ImdbIdentifier
from medusa.logger.adapters.style import BraceAdapter
from requests.exceptions import RequestException
@@ -70,8 +71,11 @@ def create_show_structure(show_obj):
'ids': {}
}
for valid_trakt_id in ['tvdb_id', 'trakt_id', 'tmdb_id', 'imdb_id']:
- if show_obj.externals.get(valid_trakt_id):
- show['ids'][valid_trakt_id[:-3]] = show_obj.externals.get(valid_trakt_id)
+ external = show_obj.externals.get(valid_trakt_id)
+ if external:
+ if valid_trakt_id == 'imdb_id':
+ external = ImdbIdentifier(external).imdb_id
+ show['ids'][valid_trakt_id[:-3]] = external
return show
diff --git a/medusa/indexers/api.py b/medusa/indexers/api.py
index f7e081f60d..4205b3adb7 100644
--- a/medusa/indexers/api.py
+++ b/medusa/indexers/api.py
@@ -32,9 +32,12 @@ def indexer(self, *args, **kwargs):
def config(self):
if self.indexer_id:
return indexerConfig[self.indexer_id]
- # Sort and put the default language first
- init_config['valid_languages'].sort(key=lambda i: '\0' if i == app.INDEXER_DEFAULT_LANGUAGE else i)
- return init_config
+ _ = init_config
+ if app.INDEXER_DEFAULT_LANGUAGE in _:
+ del _[_['valid_languages'].index(app.INDEXER_DEFAULT_LANGUAGE)]
+ _['valid_languages'].sort()
+ _['valid_languages'].insert(0, app.INDEXER_DEFAULT_LANGUAGE)
+ return _
@property
def name(self):
diff --git a/medusa/indexers/base.py b/medusa/indexers/base.py
index bd5a713359..91d5d22ff1 100644
--- a/medusa/indexers/base.py
+++ b/medusa/indexers/base.py
@@ -20,13 +20,14 @@
IndexerSeasonNotFound,
IndexerSeasonUpdatesNotSupported,
IndexerShowNotFound,
+ IndexerShowUpdatesNotSupported,
)
from medusa.indexers.ui import BaseUI, ConsoleUI
from medusa.logger.adapters.style import BraceAdapter
from medusa.session.core import IndexerSession
from medusa.statistics import weights
-from six import integer_types, itervalues, string_types, text_type, viewitems
+from six import integer_types, itervalues, string_types, viewitems
log = BraceAdapter(logging.getLogger(__name__))
@@ -57,24 +58,18 @@ def __init__(self,
"""Pass these arguments on as args from the subclass."""
self.shows = ShowContainer() # Holds all Show classes
self.corrections = {} # Holds show-name to show_id mapping
-
- self.config = {}
-
- self.config['debug_enabled'] = debug # show debugging messages
-
- self.config['custom_ui'] = custom_ui
-
- self.config['interactive'] = interactive # prompt for correct series?
-
- self.config['select_first'] = select_first
-
- self.config['search_all_languages'] = search_all_languages
-
- self.config['use_zip'] = use_zip
-
- self.config['dvdorder'] = dvdorder
-
- self.config['proxy'] = proxy
+ self.name = None
+
+ self.config = {
+ 'debug_enabled': debug,
+ 'custom_ui': custom_ui,
+ 'interactive': interactive,
+ 'select_first': select_first,
+ 'search_all_languages': search_all_languages,
+ 'use_zip': use_zip,
+ 'dvdorder': dvdorder,
+ 'proxy': proxy
+ }
if cache is True:
self.config['cache_enabled'] = True
@@ -93,6 +88,7 @@ def __init__(self,
self.config['banners_enabled'] = banners
self.config['image_type'] = image_type
self.config['actors_enabled'] = actors
+ self.config['limit_seasons'] = []
if self.config['debug_enabled']:
warnings.warn('The debug argument to tvdbv2_api.__init__ will be removed in the next version. '
@@ -127,7 +123,46 @@ def __init__(self,
else:
self.config['language'] = language
- def _get_temp_dir(self): # pylint: disable=no-self-use
+ def get_nested_value(self, value, config):
+ """
+ Get a nested value from a dictionary using a dot separated string.
+
+ For example the config 'plot.summaries[0].text' will return the value for dict['plot']['summaries'][0].
+ :param value: Dictionary you want to get a value from.
+ :param config: Dot separated string.
+ :return: The value matching the config.
+ """
+ # Remove a level
+ split_config = config.split('.')
+ check_key = split_config[0]
+
+ if check_key.endswith(']'):
+ list_index = int(check_key.split('[')[-1].rstrip(']'))
+ check_key = check_key.split('[')[0]
+ check_value = value.get(check_key)
+ if check_value and list_index < len(check_value):
+ check_value = check_value[list_index]
+ else:
+ check_value = value.get(check_key)
+ next_keys = '.'.join(split_config[1:])
+
+ if check_value is None:
+ return None
+
+ if isinstance(check_value, dict) and next_keys:
+ return self.get_nested_value(check_value, next_keys)
+ else:
+ try:
+ # Some object have a __dict__ attr. Let's try that.
+ # It shouldn't match basic types like strings, integers or floats.
+ parse_dict = check_value.__dict__
+ except AttributeError:
+ return check_value
+ else:
+ return self.get_nested_value(parse_dict, next_keys)
+
+ @staticmethod
+ def _get_temp_dir(): # pylint: disable=no-self-use
"""Return the [system temp dir]/tvdb_api-u501 (or tvdb_api-myuser)."""
if hasattr(os, 'getuid'):
uid = 'u{0}'.format(os.getuid()) # pylint: disable=no-member
@@ -145,19 +180,21 @@ def _get_show_data(self, sid, language):
return None
def _get_series(self, series):
- """Search for the series name.
+ """Search indexer for the series name.
If a custom_ui UI is configured, it uses this to select the correct
series. If not, and interactive == True, ConsoleUI is used, if not
BaseUI is used to select the first result.
:param series: the query for the series name
- :return: A list of series mapped to a UI (for example: a BaseUI or custom_ui).
+ :return: A list of series mapped to a UI (for example: a BaseUi or custom_ui).
"""
all_series = self.search(series)
if not all_series:
log.debug('Series result returned zero')
- raise IndexerShowNotFound('Show search returned zero results (cannot find show on Indexer)')
+ raise IndexerShowNotFound(
+ 'Show search for {series} returned zero results (cannot find show on Indexer)'.format(series=series)
+ )
if not isinstance(all_series, list):
all_series = [all_series]
@@ -184,7 +221,7 @@ def _set_show_data(self, sid, key, value):
def __repr__(self):
"""Indexer representation, returning representation of all shows indexed."""
- return text_type(self.shows)
+ return str(self.shows)
def _set_item(self, sid, seas, ep, attrib, value): # pylint: disable=too-many-arguments
"""Create a new episode, creating Show(), Season() and Episode()s as required.
@@ -391,14 +428,14 @@ def _save_images(self, series_id, images):
self._save_images_by_type(img_type, series_id, images_by_type)
def __getitem__(self, key):
- """Handle tvdbv2_instance['seriesname'] calls. The dict index should be the show id."""
+ """Handle indexer['seriesname'] calls. The dict index should be the show id."""
if isinstance(key, (integer_types, int)):
# Item is integer, treat as show id
if key not in self.shows:
self._get_show_data(key, self.config['language'])
return self.shows[key]
- key = text_type(key).lower()
+ key = str(key).lower()
self.config['searchterm'] = key
selected_series = self._get_series(key)
if isinstance(selected_series, dict):
@@ -409,19 +446,14 @@ def __getitem__(self, key):
self._set_show_data(show['id'], k, v)
return selected_series
- def get_last_updated_series(self, from_time, weeks=1, filter_show_list=None):
- """Retrieve a list with updated shows.
+ def get_last_updated_series(self, *args, **kwargs):
+ """Retrieve a list with updated shows."""
+ raise IndexerShowUpdatesNotSupported('Method get_last_updated_series not implemented by this indexer')
- :param from_time: epoch timestamp, with the start date/time
- :param weeks: number of weeks to get updates for.
- :param filter_show_list: Optional list of show objects, to use for filtering the returned list.
- """
+ def get_last_updated_seasons(self, *args, **kwargs):
+ """Retrieve a list with updated show seasons."""
raise IndexerSeasonUpdatesNotSupported('Method get_last_updated_series not implemented by this indexer')
- def get_episodes_for_season(self, show_id, *args, **kwargs):
- self._get_episodes(show_id, *args, **kwargs)
- return self.shows[show_id]
-
class ShowContainer(dict):
"""Simple dict that holds a series of Show instances."""
@@ -502,7 +534,7 @@ def __bool__(self):
def aired_on(self, date):
"""Search and return a list of episodes with the airdates."""
- ret = self.search(text_type(date), 'firstaired')
+ ret = self.search(str(date), 'firstaired')
if len(ret) == 0:
raise IndexerEpisodeNotFound('Could not find any episodes that aired on {0}'.format(date))
return ret
@@ -631,13 +663,13 @@ def search(self, term=None, key=None):
if term is None:
raise TypeError('must supply string to search for (contents)')
- term = text_type(term).lower()
+ term = str(term).lower()
for cur_key, cur_value in viewitems(self):
- cur_key, cur_value = text_type(cur_key).lower(), text_type(cur_value).lower()
+ cur_key, cur_value = str(cur_key).lower(), str(cur_value).lower()
if key is not None and cur_key != key:
# Do not search this key
continue
- if cur_value.find(text_type(term).lower()) > -1:
+ if cur_value.find(str(term).lower()) > -1:
return self
diff --git a/medusa/indexers/config.py b/medusa/indexers/config.py
index 2be2d6b05c..ee6443c888 100644
--- a/medusa/indexers/config.py
+++ b/medusa/indexers/config.py
@@ -6,6 +6,7 @@
from builtins import str
from medusa.app import app
+from medusa.indexers.imdb.api import Imdb
from medusa.indexers.tmdb.api import Tmdb
from medusa.indexers.tvdbv2.api import TVDBv2
from medusa.indexers.tvmaze.api import TVmaze
@@ -31,13 +32,13 @@
INDEXER_TVRAGE = 2 # Must keep
INDEXER_TVMAZE = 3
INDEXER_TMDB = 4
-EXTERNAL_IMDB = 10
+# FIXME: Change all references to EXTERNAL_IMDB to INDEXER_IMDB
+INDEXER_IMDB = EXTERNAL_IMDB = 10
EXTERNAL_ANIDB = 11
EXTERNAL_TRAKT = 12
EXTERNAL_ANILIST = 13
EXTERNAL_MAPPINGS = {
- EXTERNAL_IMDB: 'imdb_id',
EXTERNAL_ANIDB: 'anidb_id',
INDEXER_TVRAGE: 'tvrage_id',
EXTERNAL_TRAKT: 'trakt_id',
@@ -45,7 +46,7 @@
}
# trakt indexer name vs Medusa indexer
-TRAKT_INDEXERS = {'tvdb': INDEXER_TVDBV2, 'tmdb': INDEXER_TMDB, 'imdb': EXTERNAL_IMDB, 'trakt': EXTERNAL_TRAKT}
+TRAKT_INDEXERS = {'tvdb': INDEXER_TVDBV2, 'tmdb': INDEXER_TMDB, 'imdb': INDEXER_IMDB, 'trakt': EXTERNAL_TRAKT}
STATUS_MAP = {
'Continuing': [
@@ -127,6 +128,24 @@
'show_url': 'https://www.themoviedb.org/tv/',
'mapped_to': 'tmdb_id', # The attribute to which other indexers can map there tmdb id to
'identifier': 'tmdb', # Also used as key for the custom scenename exceptions. (_get_custom_exceptions())
+ },
+ INDEXER_IMDB: {
+ 'enabled': True,
+ 'id': INDEXER_IMDB,
+ 'name': 'IMDb',
+ 'module': Imdb,
+ 'api_params': {
+ 'language': 'en',
+ 'use_zip': True,
+ 'session': IndexerSession(cache_control={'cache_etags': False}),
+ },
+ 'xem_mapped_to': INDEXER_TVDBV2,
+ 'icon': 'imdb16.png',
+ 'scene_loc': '{base_url}/scene_exceptions/scene_exceptions_imdb.json'.format(base_url=app.BASE_PYMEDUSA_URL),
+ 'show_url': 'http://www.imdb.com/title/tt',
+ 'base_url': 'https://v2.sg.media-imdb.com',
+ 'mapped_to': 'imdb_id', # The attribute to which other indexers can map their imdb id to
+ 'identifier': 'imdb', # Also used as key for the custom scenename exceptions. (_get_custom_exceptions())
}
}
diff --git a/medusa/indexers/exceptions.py b/medusa/indexers/exceptions.py
index 0620a3405f..391e4dbb61 100644
--- a/medusa/indexers/exceptions.py
+++ b/medusa/indexers/exceptions.py
@@ -29,6 +29,10 @@ def __init__(self, message, language):
self.language = language
+class IndexerShowIncomplete(IndexerException):
+ """Show found but incomplete in the indexer (incomplete show)."""
+
+
class IndexerSeasonNotFound(IndexerException):
"""Season cannot be found in the indexer."""
@@ -41,6 +45,10 @@ class IndexerAttributeNotFound(IndexerException):
"""Raised if an episode does not have the requested attribute (such as a episode name)."""
+class IndexerShowUpdatesNotSupported(IndexerException):
+ """Raised if an episode does not have the requested attribute (such as a episode name)."""
+
+
class IndexerSeasonUpdatesNotSupported(IndexerException):
"""Raised if an episode does not have the requested attribute (such as a episode name)."""
diff --git a/medusa/indexers/imdb/__init__.py b/medusa/indexers/imdb/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/medusa/indexers/imdb/api.py b/medusa/indexers/imdb/api.py
new file mode 100644
index 0000000000..6bd28e5fa6
--- /dev/null
+++ b/medusa/indexers/imdb/api.py
@@ -0,0 +1,825 @@
+# coding=utf-8
+"""Imdb indexer api module."""
+
+from __future__ import unicode_literals
+
+import locale
+import logging
+from collections import OrderedDict, namedtuple
+from datetime import datetime
+from itertools import chain
+from time import time
+
+from imdbpie import imdbpie
+
+from medusa import app
+from medusa.bs4_parser import BS4Parser
+from medusa.indexers.base import (Actor, Actors, BaseIndexer)
+from medusa.indexers.exceptions import (
+ IndexerError, IndexerShowIncomplete, IndexerShowNotFound, IndexerUnavailable
+)
+from medusa.logger.adapters.style import BraceAdapter
+from medusa.show.show import Show
+
+from requests.exceptions import RequestException
+
+from six import string_types, text_type
+
+
+log = BraceAdapter(logging.getLogger(__name__))
+log.logger.addHandler(logging.NullHandler())
+
+
+class ImdbIdentifier(object):
+ """Imdb identifier class."""
+
+ def __init__(self, imdb_id):
+ """Initialize an identifier object. Can be used to get the full textual id e.a. 'tt3986523'.
+
+ Or the series_id: 3986523
+ """
+ self._imdb_id = None
+ self._series_id = None
+ self.imdb_id = imdb_id
+
+ def _clean(self, imdb_id):
+ if isinstance(imdb_id, string_types):
+ return imdb_id.strip('/').split('/')[-1]
+
+ @property
+ def series_id(self):
+ """Return series id."""
+ return self._series_id
+
+ @series_id.setter
+ def series_id(self, value):
+ """Set series id."""
+ self._series_id = value
+
+ @property
+ def imdb_id(self):
+ """Return imdb id."""
+ return self._imdb_id
+
+ @imdb_id.setter
+ def imdb_id(self, value):
+ """Set imdb id."""
+ if isinstance(value, string_types) and 'tt' in value:
+ self._imdb_id = self._clean(value)
+ self.series_id = int(self._imdb_id.split('tt')[-1])
+ else:
+ self._imdb_id = 'tt{0}'.format(text_type(value).zfill(7))
+ self.series_id = int(value)
+
+
+class Imdb(BaseIndexer):
+ """Create easy-to-use interface to name of season/episode name.
+
+ >>> indexer_api = imdb()
+ >>> indexer_api['Scrubs'][1][24]['episodename']
+ u'My Last Day'
+ """
+
+ def __init__(self, *args, **kwargs): # pylint: disable=too-many-locals,too-many-arguments
+ """Imdb constructor."""
+ super(Imdb, self).__init__(*args, **kwargs)
+
+ self.indexer = 10
+
+ # Initiate the imdbpie API
+ self.imdb_api = imdbpie.Imdb(session=self.config['session'])
+
+ self.config['artwork_prefix'] = '{base_url}{image_size}{file_path}'
+
+ # An api to indexer series/episode object mapping
+ self.series_map = [
+ ('id', 'imdb_id'),
+ ('id', 'base.id'),
+ ('seriesname', 'title'),
+ ('seriesname', 'base.title'),
+ ('summary', 'plot.outline.text'),
+ ('firstaired', 'year'),
+ ('poster', 'base.image.url'),
+ ('show_url', 'base.id'),
+ ('firstaired', 'base.seriesStartYear'),
+ ('rating', 'ratings.rating'),
+ ('votes', 'ratings.ratingCount'),
+ ('nextepisode', 'base.nextEpisode'),
+ ('lastaired', 'base.seriesEndYear'),
+ # Could not find contentrating in api.
+ ]
+
+ self.episode_map = [
+ ('id', 'id'),
+ ('episodename', 'title'),
+ ('firstaired', 'year'),
+ ('absolute_number', 'absolute_number'),
+ ]
+
+ def _map_results(self, imdb_response, key_mappings=None, list_separator='|'):
+ """
+ Map results to a a key_mapping dict.
+
+ :param imdb_response: imdb response obect, or a list of response objects.
+ :type imdb_response: list(object)
+ :param key_mappings: Dict of imdb attributes, that are mapped to normalized keys.
+ :type key_mappings: list
+ :param list_separator: A list separator used to transform lists to a character separator string.
+ :type list_separator: string.
+ """
+ parsed_response = []
+
+ if not isinstance(imdb_response, list):
+ imdb_response = [imdb_response]
+
+ for item in imdb_response:
+ return_dict = {}
+ try:
+ title_type = item.get('type') or item.get('base', {}).get('titleType')
+ if title_type in ('feature', 'video game', 'TV short', 'TV movie', None):
+ continue
+
+ return_dict['status'] = 'Ended'
+
+ for key, config in self.series_map:
+ value = self.get_nested_value(item, config)
+ if not value:
+ continue
+ if key == 'id' and value:
+ value = ImdbIdentifier(value.rstrip('/')).series_id
+ if key == 'contentrating':
+ value = text_type(value)
+ if key == 'poster':
+ return_dict['poster_thumb'] = value.split('V1')[0] + 'V1_SY{0}_AL_.jpg'.format('1000').split('/')[-1]
+ if key == 'nextepisode' and value:
+ return_dict['status'] = 'Continuing'
+
+ return_dict[key] = value
+
+ # Add static value for airs time.
+ return_dict['airs_time'] = '0:00AM'
+
+ if return_dict.get('firstaired'):
+ return_dict['status'] = 'Ended' if return_dict.get('lastaired') else 'Continuing'
+
+ except Exception as error:
+ log.warning('Exception trying to parse attribute: {0}, with exception: {1!r}', item, error)
+
+ parsed_response.append(return_dict)
+
+ return parsed_response if len(parsed_response) != 1 else parsed_response[0]
+
+ def _show_search(self, series):
+ """
+ Use the Imdb API to search for a show.
+
+ :param series: The series name that's searched for as a string
+ :return: A list of Show objects.series_map
+ """
+ try:
+ results = self.imdb_api.search_for_title(series)
+ except LookupError as error:
+ raise IndexerShowNotFound('Could not get any results searching for {series} using indexer Imdb. Cause: {cause!r}'.format(
+ series=series, cause=error
+ ))
+ except (AttributeError, RequestException) as error:
+ raise IndexerUnavailable('Could not get any results searching for {series} using indexer Imdb. Cause: {cause!r}'.format(
+ series=series, cause=error
+ ))
+
+ if results:
+ return results
+ else:
+ return None
+
+ def search(self, series):
+ """Search imdb.com for the series name.
+
+ :param series: the query for the series name
+ :return: An ordered dict with the show searched for. In the format of OrderedDict{"series": [list of shows]}
+ """
+ # series = series.encode('utf-8')
+ log.debug('Searching for show {0}', series)
+ mapped_results = []
+ try:
+ if series.startswith('tt'):
+ show_by_id = self._get_show_by_id(series)
+ # The search by id result, is already mapped. We can just add it to the array with results.
+ mapped_results.append(show_by_id['series'])
+ return OrderedDict({'series': mapped_results})['series']
+ results = self._show_search(series)
+ except IndexerShowNotFound:
+ results = None
+
+ if not results:
+ return
+
+ mapped_results = self._map_results(results, self.series_map, '|')
+
+ return OrderedDict({'series': mapped_results})['series']
+
+ def _get_show_by_id(self, imdb_id): # pylint: disable=unused-argument
+ """Retrieve imdb show information by imdb id, or if no imdb id provided by passed external id.
+
+ :param imdb_id: The shows imdb id
+ :return: An ordered dict with the show searched for.
+ """
+ results = None
+ log.debug('Getting all show data for {0}', imdb_id)
+ try:
+ results = self.imdb_api.get_title(ImdbIdentifier(imdb_id).imdb_id)
+ except LookupError as error:
+ raise IndexerShowNotFound('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format(
+ imdb_id=imdb_id, cause=error
+ ))
+ except (AttributeError, RequestException) as error:
+ raise IndexerUnavailable('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format(
+ imdb_id=imdb_id, cause=error
+ ))
+
+ if not results:
+ return
+
+ mapped_results = self._map_results(results, self.series_map)
+
+ if not mapped_results:
+ return
+
+ try:
+ # Get firstaired
+ releases = self.imdb_api.get_title_releases(ImdbIdentifier(imdb_id).imdb_id)
+ except LookupError as error:
+ raise IndexerShowNotFound('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format(
+ imdb_id=imdb_id, cause=error
+ ))
+ except (AttributeError, RequestException) as error:
+ raise IndexerUnavailable('Could not get title releases for show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format(
+ imdb_id=imdb_id, cause=error
+ ))
+
+ if releases.get('releases'):
+ first_released = sorted([r['date'] for r in releases['releases']])[0]
+ mapped_results['firstaired'] = first_released
+
+ try:
+ companies = self.imdb_api.get_title_companies(ImdbIdentifier(imdb_id).imdb_id)
+ # If there was a release check if it was distributed.
+ if companies.get('distribution'):
+ origins = self.imdb_api.get_title_versions(ImdbIdentifier(imdb_id).imdb_id)['origins'][0]
+ released_in_regions = [
+ dist for dist in companies['distribution'] if dist.get('regions') and origins in dist['regions']
+ ]
+ # Used item.get('startYear') because a startYear is not always available.
+ first_release = sorted(released_in_regions, key=lambda x: x.get('startYear'))
+
+ if first_release:
+ mapped_results['network'] = first_release[0]['company']['name']
+ except (AttributeError, LookupError, RequestException):
+ log.info('No company data available for {0}, cant get a network', imdb_id)
+
+ return OrderedDict({'series': mapped_results})
+
+ def _get_episodes(self, imdb_id, detailed=True, aired_season=None, *args, **kwargs): # pylint: disable=unused-argument
+ """Get all the episodes for a show by imdb id.
+
+ :param imdb_id: Series imdb id.
+ :return: An ordered dict with the show searched for. In the format of OrderedDict{"episode": [list of episodes]}
+ """
+ # Parse episode data
+ log.debug('Getting all episodes of {0}', imdb_id)
+
+ if aired_season:
+ aired_season = [aired_season] if not isinstance(aired_season, list) else aired_season
+
+ series_id = imdb_id
+ imdb_id = ImdbIdentifier(imdb_id).imdb_id
+
+ try:
+ # results = self.imdb_api.get_title_episodes(imdb_id)
+ results = self.imdb_api.get_title_episodes(ImdbIdentifier(imdb_id).imdb_id)
+ except LookupError as error:
+ raise IndexerShowIncomplete(
+ 'Show episode search exception, '
+ 'could not get any episodes. Exception: {e!r}'.format(
+ e=error
+ )
+ )
+ except (AttributeError, RequestException) as error:
+ raise IndexerUnavailable('Error connecting to Imdb api. Caused by: {0!r}'.format(error))
+
+ if not results or not results.get('seasons'):
+ return False
+
+ absolute_number_counter = 1
+ for season in results.get('seasons'):
+ if aired_season and season.get('season') not in aired_season:
+ continue
+
+ for episode in season['episodes']:
+ season_no, episode_no = episode.get('season'), episode.get('episode')
+
+ if season_no is None or episode_no is None:
+ log.debug('{0}: Found incomplete episode with season: {1!r} and episode: {2!r})',
+ imdb_id, season_no, episode_no)
+ continue # Skip to next episode
+
+ if season_no > 0:
+ episode['absolute_number'] = absolute_number_counter
+ absolute_number_counter += 1
+
+ for k, config in self.episode_map:
+ v = self.get_nested_value(episode, config)
+ if v is not None:
+ if k == 'id':
+ v = ImdbIdentifier(v).series_id
+ if k == 'firstaired':
+ v = '{year}-01-01'.format(year=v)
+
+ self._set_item(series_id, season_no, episode_no, k, v)
+
+ if detailed and season.get('season'):
+ # Enrich episode for the current season.
+ self._get_episodes_detailed(imdb_id, season['season'])
+
+ # Scrape the synopsys and the episode thumbnail.
+ self._enrich_episodes(imdb_id, season['season'])
+
+ # Try to calculate the airs day of week
+ self._calc_airs_day_of_week(imdb_id)
+
+ def _calc_airs_day_of_week(self, imdb_id):
+ series_id = ImdbIdentifier(imdb_id).series_id
+
+ if self[series_id]:
+ all_episodes = []
+
+ for season in self[series_id]:
+ all_episodes.extend([
+ self[series_id][season][ep]
+ for ep in self[series_id][season]
+ if self[series_id][season][ep].get('firstaired')
+ ])
+
+ # Get the last (max 10 airdates) and try to calculate an airday + time.
+ last_airdates = sorted(all_episodes, key=lambda x: x['firstaired'], reverse=True)[:10]
+ weekdays = {}
+ for episode in last_airdates:
+ if episode['firstaired']:
+ day = self._parse_date_with_local(datetime.strptime(episode['firstaired'], '%Y-%m-%d'), '%A', 'C', method='strftime')
+ weekdays[day] = 1 if day not in weekdays else weekdays[day] + 1
+
+ airs_day_of_week = sorted(weekdays.keys(), key=lambda x: weekdays[x], reverse=True)[0] if weekdays else None
+ self._set_show_data(series_id, 'airs_dayofweek', airs_day_of_week)
+
+ @staticmethod
+ def _parse_date_with_local(date, template, locale_format='C', method='strptime'):
+ lc = locale.setlocale(locale.LC_TIME)
+ locale.setlocale(locale.LC_ALL, locale_format)
+ try:
+ if method == 'strptime':
+ return datetime.strptime(date, template)
+ else:
+ return date.strftime(template)
+ except (AttributeError, ValueError):
+ raise
+ finally:
+ locale.setlocale(locale.LC_TIME, lc)
+
+ def _get_episodes_detailed(self, imdb_id, season):
+ """Enrich the episodes with additional information for a specific season.
+
+ :param imdb_id: imdb id including the `tt`.
+ :param season: season passed as integer.
+ """
+ try:
+ results = self.imdb_api.get_title_episodes_detailed(imdb_id=ImdbIdentifier(imdb_id).imdb_id, season=season)
+ except (AttributeError, LookupError, RequestException) as error:
+ raise IndexerShowIncomplete(
+ 'Show episode search exception, '
+ 'could not get any episodes. Exception: {e!r}'.format(
+ e=error
+ )
+ )
+
+ if not results.get('episodes'):
+ return
+
+ series_id = ImdbIdentifier(imdb_id).series_id
+ for episode in results.get('episodes'):
+ try:
+ if episode['releaseDate']['first']['date']:
+ first_aired = self._parse_date_with_local(
+ datetime.strptime(
+ episode['releaseDate']['first']['date'], '%Y-%m-%d'
+ ), '%Y-%m-%d', 'C', method='strftime'
+ )
+ self._set_item(series_id, season, episode['episodeNumber'], 'firstaired', first_aired)
+ except ValueError:
+ pass
+
+ self._set_item(series_id, season, episode['episodeNumber'], 'rating', episode['rating'])
+ self._set_item(series_id, season, episode['episodeNumber'], 'votes', episode['ratingCount'])
+
+ def _enrich_episodes(self, imdb_id, season):
+ """Enrich the episodes with additional information for a specific season.
+
+ For this we're making use of html scraping using beautiful soup.
+ :param imdb_id: imdb id including the `tt`.
+ :param season: season passed as integer.
+ """
+ episodes_url = 'http://www.imdb.com/title/{imdb_id}/episodes?season={season}'
+ episodes = []
+
+ try:
+ response = self.config['session'].get(episodes_url.format(
+ imdb_id=ImdbIdentifier(imdb_id).imdb_id, season=season)
+ )
+ if not response or not response.text:
+ log.warning('Problem requesting episode information for show {0}, and season {1}.', imdb_id, season)
+ return
+
+ Episode = namedtuple('Episode', ['episode_number', 'season_number', 'synopsis', 'thumbnail'])
+ with BS4Parser(response.text, 'html5lib') as html:
+ for episode in html.find_all('div', class_='list_item'):
+ try:
+ episode_number = int(episode.find('meta')['content'])
+ except AttributeError:
+ pass
+
+ try:
+ synopsis = episode.find('div', class_='item_description').get_text(strip=True)
+ if 'Know what this is about?' in synopsis:
+ synopsis = ''
+ except AttributeError:
+ synopsis = ''
+
+ try:
+ episode_thumbnail = episode.find('img', class_='zero-z-index')['src']
+ except (AttributeError, TypeError):
+ episode_thumbnail = None
+
+ episodes.append(Episode(episode_number=episode_number, season_number=season,
+ synopsis=synopsis, thumbnail=episode_thumbnail))
+
+ except Exception as error:
+ log.exception('Error while trying to enrich imdb series {0}, {1}', ImdbIdentifier(imdb_id).imdb_id, error)
+
+ for episode in episodes:
+ self._set_item(imdb_id, episode.season_number, episode.episode_number, 'overview', episode.synopsis)
+ self._set_item(imdb_id, episode.season_number, episode.episode_number, 'filename', episode.thumbnail)
+
+ def _parse_images(self, imdb_id, language='en'):
+ """Parse Show and Season posters.
+
+ Any key starting with an underscore has been processed (not the raw
+ data from the XML)
+
+ This interface will be improved in future versions.
+ Available sources: amazon, custom, getty, paidcustomer, presskit, userupload.
+ Available types: behind_the_scenes, event, poster, product, production_art, publicity, still_frame
+ """
+ log.debug('Getting show banners for {0}', imdb_id)
+
+ try:
+ images = self.imdb_api.get_title_images(ImdbIdentifier(imdb_id).imdb_id)
+ except LookupError as error:
+ raise IndexerShowNotFound('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format(
+ imdb_id=imdb_id, cause=error
+ ))
+ except (AttributeError, RequestException) as error:
+ raise IndexerUnavailable('Could not get images for show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format(
+ imdb_id=imdb_id, cause=error
+ ))
+
+ image_mapping = {'poster': 'poster', 'production_art': 'fanart'} # Removed 'still_frame': 'fanart',
+ thumb_height = 640
+
+ _images = {}
+ try:
+ for image in images.get('images', []):
+ image_type = image_mapping.get(image.get('type'))
+ if image_type not in ('poster', 'fanart'):
+ continue
+ image_type_thumb = image_type + '_thumb'
+ if image_type not in _images:
+ _images[image_type] = {}
+ _images[image_type + '_thumb'] = {}
+
+ # Store the images for each resolution available
+ # Always provide a resolution or 'original'.
+ resolution = '{0}x{1}'.format(image['width'], image['height'])
+ thumb_width = int((float(image['width']) / image['height']) * thumb_height)
+ resolution_thumb = '{0}x{1}'.format(thumb_width, thumb_height)
+
+ if resolution not in _images[image_type]:
+ _images[image_type][resolution] = {}
+ _images[image_type_thumb][resolution_thumb] = {}
+
+ bid = image['id'].split('/')[-1]
+
+ if image_type in ['season', 'seasonwide']:
+ if int(image.sub_key) not in _images[image_type][resolution]:
+ _images[image_type][resolution][int(image.sub_key)] = {}
+ if bid not in _images[image_type][resolution][int(image.sub_key)]:
+ _images[image_type][resolution][int(image.sub_key)][bid] = {}
+ base_path = _images[image_type_thumb][resolution][int(image.sub_key)][bid]
+ else:
+ if bid not in _images[image_type][resolution]:
+ _images[image_type][resolution][bid] = {}
+ _images[image_type_thumb][resolution_thumb][bid] = {}
+ base_path = _images[image_type][resolution][bid]
+ base_path_thumb = _images[image_type_thumb][resolution_thumb][bid]
+
+ base_path['bannertype'] = image_type
+ base_path['bannertype2'] = resolution
+ base_path['_bannerpath'] = image.get('url')
+ base_path['bannerpath'] = image.get('url').split('/')[-1]
+ base_path['languages'] = image.get('languages')
+ base_path['source'] = image.get('source')
+ base_path['id'] = bid
+
+ base_path_thumb['bannertype'] = image_type_thumb
+ base_path_thumb['bannertype2'] = resolution_thumb
+ base_path_thumb['_bannerpath'] = image['url'].split('V1')[0] + 'V1_SY{0}_AL_.jpg'.format(thumb_height)
+ base_path_thumb['bannerpath'] = image['url'].split('V1')[0] + 'V1_SY{0}_AL_.jpg'.format(thumb_height).split('/')[-1]
+ base_path_thumb['id'] = bid
+
+ except Exception as error:
+ log.warning('Could not parse Poster for show id: {0}, with exception: {1!r}', imdb_id, error)
+ return
+
+ def _get_poster_thumb(thumbs):
+ for bid in thumbs.values():
+ for image in bid.values():
+ return image.get('bannerpath')
+
+ if _images.get('poster_thumb'):
+ self._set_show_data(imdb_id, 'poster', _get_poster_thumb(_images.get('poster_thumb')))
+
+ self._save_images(imdb_id, _images, language=language)
+ self._set_show_data(imdb_id, '_banners', _images)
+
+ def _save_images(self, series_id, images, language='en'):
+ """
+ Save the highest rated images for the show.
+
+ :param series_id: The series ID
+ :param images: A nested mapping of image info
+ images[type][res][id] = image_info_mapping
+ type: image type such as `banner`, `poster`, etc
+ res: resolution such as `1024x768`, `original`, etc
+ id: the image id
+ """
+ def by_aspect_ratio(image):
+ w, h = image['bannertype2'].split('x')
+ return int(w) / int(h)
+
+ # Parse Posters and Banners (by aspect ratio)
+ if images.get('poster'):
+ # Flatten image_type[res][id].values() into list of values
+ merged_images = chain.from_iterable(
+ resolution.values()
+ for resolution in images['poster'].values()
+ )
+
+ # Sort by aspect ratio
+ sort_images = sorted(
+ merged_images,
+ key=by_aspect_ratio
+ )
+
+ # Filter out the posters with an aspect ratio of < 0.8
+ posters = [
+ image for image in sort_images if 0.6 > by_aspect_ratio(image) < 0.8
+ and image.get('languages')
+ and image['languages'] == [language]
+ ]
+ banners = [image for image in sort_images if by_aspect_ratio(image) > 3]
+
+ if len(posters):
+ highest_rated = posters[0]
+ img_url = highest_rated['_bannerpath']
+ log.debug(
+ u'Selecting poster with the lowest aspect ratio (resolution={resolution})\n'
+ 'aspect ratio of {aspect_ratio} ', {
+ 'resolution': highest_rated['bannertype2'],
+ 'aspect_ratio': by_aspect_ratio(highest_rated)
+ }
+ )
+ self._set_show_data(series_id, 'poster', img_url)
+
+ if len(banners):
+ highest_rated = banners[-1]
+ img_url = highest_rated['_bannerpath']
+ log.debug(
+ u'Selecting poster with the lowest aspect ratio (resolution={resolution})\n'
+ 'aspect ratio of {aspect_ratio} ', {
+ 'resolution': highest_rated['bannertype2'],
+ 'aspect_ratio': by_aspect_ratio(highest_rated)
+ }
+ )
+ self._set_show_data(series_id, 'banner', img_url)
+
+ if images.get('fanart'):
+ # Flatten image_type[res][id].values() into list of values
+ merged_images = chain.from_iterable(
+ resolution.values()
+ for resolution in images['fanart'].values()
+ )
+
+ # Sort by resolution
+ sort_images = sorted(
+ merged_images,
+ key=by_aspect_ratio,
+ reverse=True,
+ )
+
+ if len(sort_images):
+ highest_rated = sort_images[0]
+ img_url = highest_rated['_bannerpath']
+ log.debug(
+ u'Selecting poster with the lowest aspect ratio (resolution={resolution})\n'
+ 'aspect ratio of {aspect_ratio} ', {
+ 'resolution': highest_rated['bannertype2'],
+ 'aspect_ratio': by_aspect_ratio(highest_rated)
+ }
+ )
+ self._set_show_data(series_id, 'fanart', img_url)
+
+ def _parse_actors(self, imdb_id):
+ """Get and parse actors using the get_title_credits route.
+
+ Actors are retrieved using t['show name]['_actors'].
+
+ Any key starting with an underscore has been processed (not the raw
+ data from the indexer)
+ """
+ log.debug('Getting actors for {0}', imdb_id)
+
+ try:
+ actors = self.imdb_api.get_title_credits(ImdbIdentifier(imdb_id).imdb_id)
+ except LookupError as error:
+ raise IndexerShowNotFound('Could not find show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format(
+ imdb_id=imdb_id, cause=error
+ ))
+ except (AttributeError, RequestException) as error:
+ raise IndexerUnavailable('Could not get actors for show {imdb_id} using indexer Imdb. Cause: {cause!r}'.format(
+ imdb_id=imdb_id, cause=error
+ ))
+
+ if not actors.get('credits') or not actors['credits'].get('cast'):
+ return
+
+ cur_actors = Actors()
+ for order, cur_actor in enumerate(actors['credits']['cast'][:25]):
+ save_actor = Actor()
+ save_actor['id'] = cur_actor['id'].split('/')[-2]
+ save_actor['image'] = cur_actor.get('image', {}).get('url', None)
+ save_actor['name'] = cur_actor['name']
+ save_actor['role'] = cur_actor['characters'][0] if cur_actor.get('characters') else ''
+ save_actor['sortorder'] = order
+ cur_actors.append(save_actor)
+ self._set_show_data(imdb_id, '_actors', cur_actors)
+
+ def _get_show_data(self, imdb_id, language='en'): # pylint: disable=too-many-branches,too-many-statements,too-many-locals
+ """Get show data by imdb id.
+
+ Take a series ID, gets the epInfo URL and parses the imdb json response into the shows dict in a format:
+ shows[series_id][season_number][episode_number]
+ """
+ # Parse show information
+ log.debug('Getting all series data for {0}', imdb_id)
+
+ # Parse show information
+ series_info = self._get_show_by_id(imdb_id)
+
+ if not series_info:
+ log.debug('Series result returned zero')
+ raise IndexerError('Series result returned zero')
+
+ # save all retrieved show information to Show object.
+ for k, v in series_info['series'].items():
+ if v is not None:
+ self._set_show_data(imdb_id, k, v)
+
+ # Get external ids.
+ # As the external id's are not part of the shows default response, we need to make an additional call for it.
+ # Im checking for the external value. to make sure only externals with a value get in.
+ self._set_show_data(imdb_id, 'externals', {external_id: text_type(getattr(self.shows[imdb_id], external_id, None))
+ for external_id in ['tvdb_id', 'imdb_id', 'tvrage_id']
+ if getattr(self.shows[imdb_id], external_id, None)})
+
+ # get episode data
+ if self.config['episodes_enabled']:
+ self._get_episodes(imdb_id, aired_season=self.config['limit_seasons'])
+
+ # Parse banners
+ if self.config['banners_enabled']:
+ self._parse_images(imdb_id, language=language)
+
+ # Parse actors
+ if self.config['actors_enabled']:
+ self._parse_actors(imdb_id)
+
+ return True
+
+ @staticmethod
+ def _calc_update_interval(date_season_last, season_finished=True):
+
+ minimum_interval = 2 * 24 * 3600 # 2 days
+
+ # Season net yet finished, let's use the minimum update interval of 2 days.
+ if not season_finished:
+ return minimum_interval
+
+ # season is finished, or show has ended. So let's calculate using the delta divided by 50.
+ interval = int((datetime.combine(date_season_last, datetime.min.time()) - datetime.utcfromtimestamp(0)).total_seconds() / 50)
+
+ return max(minimum_interval, interval)
+
+ # Public methods, usable separate from the default api's interface api['show_id']
+ def get_last_updated_seasons(self, show_list=None, cache=None, *args, **kwargs):
+ """Return updated seasons for shows passed, using the from_time.
+
+ :param show_list[int]: The list of shows, where seasons updates are retrieved for.
+ :param from_time[int]: epoch timestamp, with the start date/time
+ :param weeks: number of weeks to get updates for.
+ """
+ show_season_updates = {}
+
+ # we don't have a single api call tha we can run to check if an update is required.
+ # So we'll have to check what's there in the library, and decide based on the last episode's date, if a
+ # season update is needed.
+
+ for series_id in show_list:
+ series_obj = Show.find_by_id(app.showList, self.indexer, series_id)
+ all_episodes_local = series_obj.get_all_episodes()
+
+ total_updates = []
+ results = None
+ # A small api call to get the amount of known seasons
+ try:
+ results = self.imdb_api.get_title_episodes(ImdbIdentifier(series_id).imdb_id)
+ except LookupError as error:
+ raise IndexerShowIncomplete(
+ 'Show episode search exception, '
+ 'could not get any episodes. Exception: {error!r}'.format(
+ error=error
+ )
+ )
+ except (AttributeError, RequestException) as error:
+ raise IndexerUnavailable('Error connecting to Imdb api. Caused by: {0!r}'.format(error))
+
+ if not results or not results.get('seasons'):
+ continue
+
+ # Get all the seasons
+
+ # Loop through seasons
+ for season in results['seasons']:
+ season_number = season.get('season')
+
+ # Imdb api gives back a season without the 'season' key. This season has special episodes.
+ # Dont know what this is, but skipping it.
+ if not season_number:
+ continue
+
+ # Check if the season is already known in our local db.
+ local_season_episodes = [ep for ep in all_episodes_local if ep.season == season_number]
+ remote_season_episodes = season['episodes']
+ if not local_season_episodes or len(remote_season_episodes) != len(local_season_episodes):
+ total_updates.append(season_number)
+ log.debug('{series}: Season {season} seems to be a new season. Adding it.',
+ {'series': series_obj.name, 'season': season_number})
+ continue
+
+ # Per season, get latest episode airdate
+ sorted_episodes = sorted(local_season_episodes, key=lambda x: x.airdate)
+ # date_season_start = sorted_episodes[0].airdate
+ date_season_last = sorted_episodes[-1].airdate
+
+ # Get date for last updated, from the cache object.
+
+ # Calculate update interval for the season
+ update_interval = self._calc_update_interval(
+ # date_season_start,
+ date_season_last,
+ season_finished=bool([s for s in results['seasons'] if s.get('season') == season_number + 1])
+ )
+
+ last_update = cache.get_last_update_season(self.indexer, series_id, season_number)
+ if last_update < time() - update_interval:
+ # This season should be updated.
+ total_updates.append(season_number)
+
+ # Update last_update for this season.
+ cache.set_last_update_season(self.indexer, series_id, season_number)
+ else:
+ log.debug(
+ '{series}: Season {season} seems to have been recently updated. Not scheduling a new refresh',
+ {'series': series_obj.name, 'season': season_number}
+ )
+
+ show_season_updates[series_id] = list(set(total_updates))
+
+ return show_season_updates
diff --git a/medusa/indexers/imdb/exceptions.py b/medusa/indexers/imdb/exceptions.py
new file mode 100644
index 0000000000..ce7494e1c5
--- /dev/null
+++ b/medusa/indexers/imdb/exceptions.py
@@ -0,0 +1,57 @@
+# coding=utf-8
+# Author: p0psicles
+#
+# This file is part of Medusa.
+#
+# Medusa is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Medusa is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Medusa. If not, see
+
+
+
+
+
+
+