From c6c6453da1e12785359d6ad9db82bdaf150132ee Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 19 May 2018 15:33:03 +0200 Subject: [PATCH 01/93] Added ep_status and ep_quality to tv_episodes table --- medusa/databases/main_db.py | 144 +++++++++++++++++++++++++----------- 1 file changed, 101 insertions(+), 43 deletions(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index aa2660a857..d875ceaa71 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -4,12 +4,11 @@ import datetime import logging -import os.path import sys import warnings from medusa import common, db, helpers, subtitles -from medusa.helper.common import dateTimeFormat, episode_num +from medusa.helper.common import dateTimeFormat from medusa.indexers.indexer_config import STATUS_MAP from medusa.logger.adapters.style import BraceAdapter from medusa.name_parser.parser import NameParser @@ -23,7 +22,7 @@ MAX_DB_VERSION = 44 # Used to check when checking for updates -CURRENT_MINOR_DB_VERSION = 9 +CURRENT_MINOR_DB_VERSION = 10 class MainSanityCheck(db.DBSanityCheck): @@ -37,7 +36,7 @@ def check(self): self.fix_invalid_airdates() # self.fix_subtitles_codes() self.fix_show_nfo_lang() - self.convert_archived_to_compound() + # self.convert_archived_to_compound() self.fix_subtitle_reference() self.clean_null_indexer_mappings() @@ -87,39 +86,39 @@ def fix_subtitle_reference(self): "WHERE episode_id = %i" % (sql_result[b'episode_id']) ) - def convert_archived_to_compound(self): - log.debug(u'Checking for archived episodes not qualified') - - query = "SELECT episode_id, showid, e.status, e.location, season, episode, anime " + \ - "FROM tv_episodes e, tv_shows s WHERE e.status = %s AND e.showid = s.indexer_id" % common.ARCHIVED - - sql_results = self.connection.select(query) - if sql_results: - log.warning(u'Found {0} shows with bare archived status, ' - u'attempting automatic conversion...', - len(sql_results)) - - for archivedEp in sql_results: - fixedStatus = common.Quality.composite_status(common.ARCHIVED, common.Quality.UNKNOWN) - existing = archivedEp[b'location'] and os.path.exists(archivedEp[b'location']) - if existing: - quality = common.Quality.name_quality(archivedEp[b'location'], archivedEp[b'anime'], extend=False) - fixedStatus = common.Quality.composite_status(common.ARCHIVED, quality) - - log.info( - u'Changing status from {old_status} to {new_status} for' - u' {id}: {ep} at {location} (File {result})', - {'old_status': common.statusStrings[common.ARCHIVED], - 'new_status': common.statusStrings[fixedStatus], - 'id': archivedEp[b'showid'], - 'ep': episode_num(archivedEp[b'season'], - archivedEp[b'episode']), - 'location': archivedEp[b'location'] or 'unknown location', - 'result': 'EXISTS' if existing else 'NOT FOUND', } - ) - - self.connection.action("UPDATE tv_episodes SET status = %i WHERE episode_id = %i" % - (fixedStatus, archivedEp[b'episode_id'])) + # def convert_archived_to_compound(self): + # log.debug(u'Checking for archived episodes not qualified') + # + # query = "SELECT episode_id, showid, e.status, e.location, season, episode, anime " + \ + # "FROM tv_episodes e, tv_shows s WHERE e.ep_status = %s AND e.showid = s.indexer_id" % ARCHIVED + # + # sql_results = self.connection.select(query) + # if sql_results: + # log.warning(u'Found {0} shows with bare archived status, ' + # u'attempting automatic conversion...', + # len(sql_results)) + # + # for archivedEp in sql_results: + # fixedStatus = common.Quality.composite_status(common.ARCHIVED, common.Quality.UNKNOWN) + # existing = archivedEp[b'location'] and os.path.exists(archivedEp[b'location']) + # if existing: + # quality = common.Quality.name_quality(archivedEp[b'location'], archivedEp[b'anime'], extend=False) + # fixedStatus = common.Quality.composite_status(common.ARCHIVED, quality) + # + # log.info( + # u'Changing status from {old_status} to {new_status} for' + # u' {id}: {ep} at {location} (File {result})', + # {'old_status': common.statusStrings[common.ARCHIVED], + # 'new_status': common.statusStrings[fixedStatus], + # 'id': archivedEp[b'showid'], + # 'ep': episode_num(archivedEp[b'season'], + # archivedEp[b'episode']), + # 'location': archivedEp[b'location'] or 'unknown location', + # 'result': 'EXISTS' if existing else 'NOT FOUND', } + # ) + # + # self.connection.action("UPDATE tv_episodes SET status = %i WHERE episode_id = %i" % + # (fixedStatus, archivedEp[b'episode_id'])) def fix_duplicate_episodes(self): @@ -170,30 +169,30 @@ def fix_missing_table_indexes(self): if not self.connection.select("PRAGMA index_info('idx_status')"): log.info(u'Missing idx_status for TV Episodes table detected!,' u' fixing...') - self.connection.action("CREATE INDEX idx_status ON tv_episodes (status, season, episode, airdate)") + self.connection.action("CREATE INDEX idx_status ON tv_episodes (ep_status, ep_quality, season, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_air')"): log.info(u'Missing idx_sta_epi_air for TV Episodes table' u' detected!, fixing...') - self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status, episode, airdate)") + self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (ep_status, ep_quality, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_sta_air')"): log.info(u'Missing idx_sta_epi_sta_air for TV Episodes table' u' detected!, fixing...') - self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, airdate)") + self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, ep_status, ep_quality, airdate)") def fix_unaired_episodes(self): curDate = datetime.date.today() sql_results = self.connection.select( - "SELECT episode_id FROM tv_episodes WHERE (airdate > ? or airdate = 1) AND status in (?,?) AND season > 0", + "SELECT episode_id FROM tv_episodes WHERE (airdate > ? or airdate = 1) AND ep_status in (?, ?) AND season > 0", [curDate.toordinal(), common.SKIPPED, common.WANTED]) for cur_unaired in sql_results: log.info(u'Fixing unaired episode status for episode_id: {0!s}', cur_unaired[b'episode_id']) - self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", + self.connection.action("UPDATE tv_episodes SET ep_status = ? WHERE episode_id = ?", [common.UNAIRED, cur_unaired[b'episode_id']]) def fix_indexer_show_statues(self): @@ -209,7 +208,7 @@ def fix_episode_statuses(self): cur_ep[b'showid']) log.info(u'Fixing malformed episode status with' u' episode_id: {0!s}', cur_ep[b'episode_id']) - self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", + self.connection.action("UPDATE tv_episodes SET ep_status = ? WHERE episode_id = ?", [common.UNSET, cur_ep[b'episode_id']]) def fix_invalid_airdates(self): @@ -706,3 +705,62 @@ def create_series_dict(): # Flag the image migration. from medusa import app app.MIGRATE_IMAGES = True + + +class AddSeparatedStatusQualityFields(AddIndexerIds): + """Add new separated status and quality fields.""" + + def test(self): + """Test if the version is at least 44.10""" + return self.connection.version >= (44, 10) + + def execute(self): + backupDatabase(self.connection.version) + + log.info(u'Dropping the unique index on idx_sta_epi_air') + self.connection.action('DROP INDEX IF EXISTS idx_sta_epi_air') + + log.info(u'Dropping the unique index on idx_sta_epi_air') + self.connection.action('DROP INDEX IF EXISTS idx_sta_epi_sta_air') + + log.info(u'Dropping the unique index on idx_status') + self.connection.action('DROP INDEX IF EXISTS idx_status') + + log.info(u'Adding new ep_status and ep_quality fields in the tv_episodes table') + self.connection.action('DROP TABLE IF EXISTS new_tv_episodes;') + + self.connection.action('CREATE TABLE IF NOT EXISTS new_tv_episodes ' + '(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, ' + 'name TEXT, season NUMERIC, episode NUMERIC,description TEXT, airdate NUMERIC, hasnfo NUMERIC, ' + 'hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, ' + 'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, ' + 'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, ' + 'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC, ' + 'ep_status NUMERIC, ep_quality NUMERIC)') + + self.connection.action('INSERT INTO new_tv_episodes (showid, indexerid, indexer, ' + 'name, season, episode,description, airdate, hasnfo, ' + 'hastbn, status, location, file_size, release_name, ' + 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' + 'is_proper, scene_season, scene_episode, absolute_number, ' + 'scene_absolute_number, version, release_group, manually_searched, ' + 'ep_status, ep_quality) SELECT showid, indexerid, indexer, ' + 'name, season, episode,description, airdate, hasnfo, ' + 'hastbn, status, location, file_size, release_name, ' + 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' + 'is_proper, scene_season, scene_episode, absolute_number, ' + 'scene_absolute_number, version, release_group, manually_searched, -1, -1 ' + 'FROM tv_episodes;') + self.connection.action("DROP TABLE IF EXISTS tv_episodes;") + self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") + self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") + + log.info(u'Split composite status in to ep_status and ep_quality') + sql_results = self.connection.select("SELECT status from tv_episodes GROUP BY status HAVING status > -1") + + for status in sql_results: + split = common.Quality.split_composite_status(status[b'status']) + self.connection.action("UPDATE tv_episodes SET ep_status = ?, ep_quality = ? WHERE status = ?", + [split.status, split.quality, status[b'status']]) + + self.inc_minor_version() From be96bac7c65f8d568b009bc461be305009caa90b Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 19 May 2018 18:09:44 +0200 Subject: [PATCH 02/93] Replaced ep_status and ep_quality with status and quality. --- medusa/databases/main_db.py | 42 +++++++++++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 6 deletions(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index d875ceaa71..f310e2c8a3 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -169,30 +169,30 @@ def fix_missing_table_indexes(self): if not self.connection.select("PRAGMA index_info('idx_status')"): log.info(u'Missing idx_status for TV Episodes table detected!,' u' fixing...') - self.connection.action("CREATE INDEX idx_status ON tv_episodes (ep_status, ep_quality, season, episode, airdate)") + self.connection.action("CREATE INDEX idx_status ON tv_episodes (status, quality, season, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_air')"): log.info(u'Missing idx_sta_epi_air for TV Episodes table' u' detected!, fixing...') - self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (ep_status, ep_quality, episode, airdate)") + self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status, quality, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_sta_air')"): log.info(u'Missing idx_sta_epi_sta_air for TV Episodes table' u' detected!, fixing...') - self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, ep_status, ep_quality, airdate)") + self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, quality, airdate)") def fix_unaired_episodes(self): curDate = datetime.date.today() sql_results = self.connection.select( - "SELECT episode_id FROM tv_episodes WHERE (airdate > ? or airdate = 1) AND ep_status in (?, ?) AND season > 0", + "SELECT episode_id FROM tv_episodes WHERE (airdate > ? or airdate = 1) AND status in (?, ?) AND season > 0", [curDate.toordinal(), common.SKIPPED, common.WANTED]) for cur_unaired in sql_results: log.info(u'Fixing unaired episode status for episode_id: {0!s}', cur_unaired[b'episode_id']) - self.connection.action("UPDATE tv_episodes SET ep_status = ? WHERE episode_id = ?", + self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [common.UNAIRED, cur_unaired[b'episode_id']]) def fix_indexer_show_statues(self): @@ -208,7 +208,7 @@ def fix_episode_statuses(self): cur_ep[b'showid']) log.info(u'Fixing malformed episode status with' u' episode_id: {0!s}', cur_ep[b'episode_id']) - self.connection.action("UPDATE tv_episodes SET ep_status = ? WHERE episode_id = ?", + self.connection.action("UPDATE tv_episodes SET status = ? WHERE episode_id = ?", [common.UNSET, cur_ep[b'episode_id']]) def fix_invalid_airdates(self): @@ -763,4 +763,34 @@ def execute(self): self.connection.action("UPDATE tv_episodes SET ep_status = ?, ep_quality = ? WHERE status = ?", [split.status, split.quality, status[b'status']]) + # Remove ep_status and ep_quality and add quality field. + # Move status from ep_status and quality from ep_quality + log.info(u'Adding data from ep_status and ep_quality fields into status/quality fields the tv_episodes table') + self.connection.action('DROP TABLE IF EXISTS new_tv_episodes;') + + self.connection.action('CREATE TABLE IF NOT EXISTS new_tv_episodes ' + '(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, ' + 'name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, ' + 'hastbn NUMERIC, status NUMERIC, quality NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, ' + 'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, ' + 'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, ' + 'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC)') + + self.connection.action('INSERT INTO new_tv_episodes (showid, indexerid, indexer, ' + 'name, season, episode, description, airdate, hasnfo, ' + 'hastbn, status, quality, location, file_size, release_name, ' + 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' + 'is_proper, scene_season, scene_episode, absolute_number, ' + 'scene_absolute_number, version, release_group, manually_searched) ' + 'SELECT showid, indexerid, indexer, ' + 'name, season, episode, description, airdate, hasnfo, ' + 'hastbn, ep_status, ep_quality, location, file_size, release_name, ' + 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' + 'is_proper, scene_season, scene_episode, absolute_number, ' + 'scene_absolute_number, version, release_group, manually_searched ' + 'FROM tv_episodes;') + self.connection.action("DROP TABLE IF EXISTS tv_episodes;") + self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") + self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") + self.inc_minor_version() From 13ac6818be87d7604ac62b8b1945638eb901233c Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 19 May 2018 18:29:43 +0200 Subject: [PATCH 03/93] Adapted Episode class to use separated status and quality. --- medusa/tv/episode.py | 95 +++++++++++++++----------------------------- 1 file changed, 32 insertions(+), 63 deletions(-) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 45f269c999..be77ada3ce 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -251,7 +251,8 @@ def __init__(self, series, season, episode, filepath=''): self.airdate = date.fromordinal(1) self.hasnfo = False self.hastbn = False - self._status = UNSET + self.status = UNSET + self.quality = 0 self.file_size = 0 self.release_name = '' self.is_proper = False @@ -409,52 +410,14 @@ def air_date(self): ).isoformat(b'T') @property - def status(self): - """Return the existing status as is.""" - return self._status - - @status.setter - def status(self, value): - """Set the status.""" - self._status = value - - @property - def splitted_status(self): - """Return the existing status removing the quality from it.""" - return Quality.split_composite_status(self._status) - - @property - def splitted_status_status(self): - """Return the status from the status/quality composite.""" - return self.splitted_status.status - - @splitted_status_status.setter - def splitted_status_status(self, value): - """ - Only set the status (reuse existing quality) of the composite status. - - :param value: The new status. - """ - self._status = Quality.composite_status(value, self.splitted_status_quality) - - @property - def splitted_status_quality(self): - """Return the quality from the status/quality composite.""" - return self.splitted_status.quality - - @splitted_status_quality.setter - def splitted_status_quality(self, value): - """ - Only set the quality (reuse existing status) of the composite status. - - :param value: The new quality. - """ - self._status = Quality.composite_status(self.splitted_status_status, value) + def status_name(self): + """Return the status name.""" + return statusStrings[self.status] @property - def status_name(self): + def quality_name(self): """Return the status name.""" - return statusStrings[Quality.split_composite_status(self.status).status] + return Quality.qualityStrings[self.quality] def is_location_valid(self, location=None): """Whether the location is a valid file. @@ -665,6 +628,7 @@ def load_from_db(self, season, episode): self.subtitles_lastsearch = sql_results[0][b'subtitles_lastsearch'] self.airdate = date.fromordinal(int(sql_results[0][b'airdate'])) self.status = int(sql_results[0][b'status'] or -1) + self.quality = int(sql_results[0][b'quality'] or -1) # don't overwrite my location if sql_results[0][b'location']: @@ -1095,6 +1059,7 @@ def __str__(self): result += 'hasnfo: %r\n' % self.hasnfo result += 'hastbn: %r\n' % self.hastbn result += 'status: %r\n' % self.status + result += 'quality: %r\n' % self.quality return result def to_json(self, detailed=True): @@ -1115,6 +1080,7 @@ def to_json(self, detailed=True): data['title'] = self.name data['subtitles'] = self.subtitles data['status'] = self.status_name + data['quality'] = self.quality data['release'] = NonEmptyDict() data['release']['name'] = self.release_name data['release']['group'] = self.release_group @@ -1257,6 +1223,7 @@ def get_sql(self): b' hasnfo = ?, ' b' hastbn = ?, ' b' status = ?, ' + b' quality = ?, ' b' location = ?, ' b' file_size = ?, ' b' release_name = ?, ' @@ -1272,9 +1239,9 @@ def get_sql(self): b' episode_id = ?', [self.indexerid, self.indexer, self.name, self.description, ','.join(self.subtitles), self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, - self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, - self.series.series_id, self.season, self.episode, self.absolute_number, self.version, - self.release_group, self.manually_searched, ep_id]] + self.hastbn, self.status, self.quality, self.location, self.file_size, self.release_name, + self.is_proper, self.series.series_id, self.season, self.episode, self.absolute_number, + self.version, self.release_group, self.manually_searched, ep_id]] else: # Don't update the subtitle language when the srt file doesn't contain the # alpha2 code, keep value from subliminal @@ -1292,6 +1259,7 @@ def get_sql(self): b' hasnfo = ?, ' b' hastbn = ?, ' b' status = ?, ' + b' quality = ?, ' b' location = ?, ' b' file_size = ?, ' b' release_name = ?, ' @@ -1307,9 +1275,9 @@ def get_sql(self): b' episode_id = ?', [self.indexerid, self.indexer, self.name, self.description, self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, - self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, - self.series.series_id, self.season, self.episode, self.absolute_number, self.version, - self.release_group, self.manually_searched, ep_id]] + self.hastbn, self.status, self.quality, self.location, self.file_size, self.release_name, + self.is_proper, self.series.series_id, self.season, self.episode, self.absolute_number, + self.version, self.release_group, self.manually_searched, ep_id]] else: # use a custom insert method to get the data into the DB. return [ @@ -1327,6 +1295,7 @@ def get_sql(self): b' hasnfo, ' b' hastbn, ' b' status, ' + b' quality, ' b' location, ' b' file_size, ' b' release_name, ' @@ -1339,11 +1308,11 @@ def get_sql(self): b' release_group) ' b'VALUES ' b' ((SELECT episode_id FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?), ' - b' ?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);', + b' ?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);', [self.series.indexer, self.series.series_id, self.season, self.episode, self.indexerid, self.series.indexer, self.name, self.description, ','.join(self.subtitles), self.subtitles_searchcount, self.subtitles_lastsearch, - self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, - self.release_name, self.is_proper, self.series.series_id, self.season, self.episode, + self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.quality, self.location, + self.file_size, self.release_name, self.is_proper, self.series.series_id, self.season, self.episode, self.absolute_number, self.version, self.release_group]] except Exception as error: log.error('{id}: Error while updating database: {error_msg!r}', @@ -1364,6 +1333,7 @@ def save_to_db(self): b'hasnfo': self.hasnfo, b'hastbn': self.hastbn, b'status': self.status, + b'quality': self.quality, b'location': self.location, b'file_size': self.file_size, b'release_name': self.release_name, @@ -1490,8 +1460,6 @@ def release_group(series, name): return '' return parse_result.release_group.strip('.- []{}') - _, ep_qual = Quality.split_composite_status(self.status) # @UnusedVariable - if app.NAMING_STRIP_YEAR: series_name = re.sub(r'\(\d+\)$', '', self.series.name).rstrip() else: @@ -1537,12 +1505,12 @@ def release_group(series, name): '%EN': ep_name, '%E.N': dot(ep_name), '%E_N': us(ep_name), - '%QN': Quality.qualityStrings[ep_qual], - '%Q.N': dot(Quality.qualityStrings[ep_qual]), - '%Q_N': us(Quality.qualityStrings[ep_qual]), - '%SQN': Quality.sceneQualityStrings[ep_qual] + encoder, - '%SQ.N': dot(Quality.sceneQualityStrings[ep_qual] + encoder), - '%SQ_N': us(Quality.sceneQualityStrings[ep_qual] + encoder), + '%QN': Quality.qualityStrings[self.quality], + '%Q.N': dot(Quality.qualityStrings[self.quality]), + '%Q_N': us(Quality.qualityStrings[self.quality]), + '%SQN': Quality.sceneQualityStrings[self.quality] + encoder, + '%SQ.N': dot(Quality.sceneQualityStrings[self.quality] + encoder), + '%SQ_N': us(Quality.sceneQualityStrings[self.quality] + encoder), '%S': str(self.season), '%0S': '%02d' % self.season, '%E': str(self.episode), @@ -2025,7 +1993,7 @@ def update_status(self, filepath): The status should only be changed if either the size or the filename changed. :param filepath: Path to the new episode file. """ - old_status, old_quality = Quality.split_composite_status(self.status) + old_status, old_quality = self.status, self.quality old_location = self.location # Changing the name of the file might also change its quality @@ -2055,7 +2023,8 @@ def update_status(self, filepath): new_status = ARCHIVED with self.lock: - self.status = Quality.composite_status(new_status, new_quality) + self.status = new_status + self.quality = new_quality if not same_name: # Reset release name as the name changed From f2b6a1311a7c05d76e8b9e2510efd623e2fe763b Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sat, 19 May 2018 21:01:25 +0200 Subject: [PATCH 04/93] -1 is also a valid status. * Missed self.quality. --- medusa/databases/main_db.py | 2 +- medusa/tv/episode.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index f310e2c8a3..2a7278382a 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -756,7 +756,7 @@ def execute(self): self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") log.info(u'Split composite status in to ep_status and ep_quality') - sql_results = self.connection.select("SELECT status from tv_episodes GROUP BY status HAVING status > -1") + sql_results = self.connection.select("SELECT status from tv_episodes GROUP BY status") for status in sql_results: split = common.Quality.split_composite_status(status[b'status']) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index be77ada3ce..d738f5e16e 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -1493,7 +1493,7 @@ def release_group(series, name): relgrp = app.UNKNOWN_RELEASE_GROUP # try to get the release encoder to comply with scene naming standards - encoder = Quality.scene_quality_from_name(self.release_name.replace(rel_grp[relgrp], ''), ep_qual) + encoder = Quality.scene_quality_from_name(self.release_name.replace(rel_grp[relgrp], ''), self.quality) if encoder: log.debug('Found codec for {series} {ep}', {'series': series_name, 'ep': ep_name}) From d1fccb1f51d2df033f2d7592ace9b552778970ba Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 20 May 2018 00:05:07 +0200 Subject: [PATCH 05/93] Refactored most of the composite statussus / quality into separate attributes. --- medusa/common.py | 17 ++++++------ medusa/failed_history.py | 3 +- medusa/history.py | 30 +++++++------------- medusa/post_processor.py | 39 +++++++++++++------------- medusa/refiners/tv_episode.py | 2 +- medusa/search/backlog.py | 17 +++++++----- medusa/search/core.py | 16 ++++++----- medusa/search/daily.py | 4 +-- medusa/search/manual.py | 5 ++-- medusa/search/proper.py | 4 +-- medusa/server/api/v1/core.py | 31 +++++++++++---------- medusa/server/web/home/handler.py | 9 ++++-- medusa/server/web/manage/handler.py | 19 ++++++++----- medusa/subtitles.py | 3 +- medusa/tv/series.py | 43 ++++++++++++++++------------- tests/test_common.py | 2 +- 16 files changed, 125 insertions(+), 119 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index a3aa3abe97..bbe49b8f6f 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -605,18 +605,18 @@ def status_from_name(name, anime=False): } @staticmethod - def should_search(status, show_obj, manually_searched): + def should_search(cur_status, cur_quality, show_obj, manually_searched): """Return true if that episodes should be search for a better quality. If cur_quality is Quality.NONE, it will return True as its a invalid quality If cur_quality is Quality.UNKNOWN it will return True only if is not in Allowed (Unknown can be in Allowed) - :param status: current status of the episode + :param cur_status: current status of the episode + :param cur_quality: current quality of the episode :param show_obj: Series object of the episode we will check if we should search or not :param manually_searched: if episode was manually searched by user :return: True if need to run a search for given episode """ - cur_status, cur_quality = Quality.split_composite_status(int(status) or UNSET) allowed_qualities, preferred_qualities = show_obj.current_qualities # When user manually searched, we should consider this as final quality. @@ -759,15 +759,16 @@ def from_guessit(guess): return quality if quality is not None else Quality.UNKNOWN @staticmethod - def to_guessit(status): - """Return a guessit dict containing 'screen_size and format' from a Quality (composite status). + def to_guessit(quality): + """Return a guessit dict containing 'screen_size and format' from a Quality (status). - :param status: a quality composite status - :type status: int + This was previously a composite status. But status/quality have been separated into their own attributes. + + :param quality: a quality status + :type quality: int :return: dict {'screen_size': , 'format': } :rtype: dict (str, str) """ - _, quality = Quality.split_composite_status(status) screen_size = Quality.to_guessit_screen_size(quality) fmt = Quality.to_guessit_format(quality) result = dict() diff --git a/medusa/failed_history.py b/medusa/failed_history.py index 72fe9349ed..ca85003421 100644 --- a/medusa/failed_history.py +++ b/medusa/failed_history.py @@ -187,8 +187,7 @@ def mark_failed(ep_obj): try: with ep_obj.lock: - quality = Quality.split_composite_status(ep_obj.status)[1] - ep_obj.status = Quality.composite_status(FAILED, quality) + ep_obj.status = FAILED ep_obj.save_to_db() except EpisodeNotFoundException as error: diff --git a/medusa/history.py b/medusa/history.py index 1151379fac..daf8eabf60 100644 --- a/medusa/history.py +++ b/medusa/history.py @@ -26,16 +26,13 @@ from medusa.show.history import History -def _logHistoryItem(action, ep_obj, quality, resource, - provider, version=-1, proper_tags='', manually_searched=False, info_hash=None, size=-1): +def _log_history_item(action, ep_obj, resource, provider, version=-1, proper_tags='', + manually_searched=False, info_hash=None, size=-1): """ Insert a history item in DB :param action: action taken (snatch, download, etc) - :param showid: showid this entry is about - :param season: show season - :param episode: show episode - :param quality: media quality + :param ep_obj: episode object :param resource: resource used :param provider: provider used :param version: tracked version of file (defaults to -1) @@ -49,7 +46,7 @@ def _logHistoryItem(action, ep_obj, quality, resource, "(action, date, indexer_id, showid, season, episode, quality, " "resource, provider, version, proper_tags, manually_searched, info_hash, size) " "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", - [action, logDate, ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, ep_obj.episode, quality, + [action, logDate, ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, ep_obj.episode, ep_obj.quality, resource, provider, version, proper_tags, manually_searched, info_hash, size]) @@ -78,8 +75,8 @@ def log_snatch(searchResult): resource = searchResult.name - _logHistoryItem(action, ep_obj, quality, resource, - provider, version, proper_tags, manually_searched, info_hash, size) + _log_history_item(action, ep_obj, resource, + provider, version, proper_tags, manually_searched, info_hash, size) def log_download(ep_obj, filename, new_ep_quality, release_group=None, version=-1): @@ -94,8 +91,6 @@ def log_download(ep_obj, filename, new_ep_quality, release_group=None, version=- """ size = int(ep_obj.file_size) - quality = new_ep_quality - # store the release group as the provider if possible if release_group: provider = release_group @@ -104,10 +99,10 @@ def log_download(ep_obj, filename, new_ep_quality, release_group=None, version=- action = ep_obj.status - _logHistoryItem(action, ep_obj, quality, filename, provider, version, size=size) + _log_history_item(action, ep_obj, filename, provider, version, size=size) -def logSubtitle(ep_obj, status, subtitle_result): +def logSubtitle(ep_obj, subtitle_result): """ Log download of subtitle @@ -120,10 +115,7 @@ def logSubtitle(ep_obj, status, subtitle_result): resource = subtitle_result.language.opensubtitles provider = subtitle_result.provider_name - status, quality = Quality.split_composite_status(status) - action = Quality.composite_status(SUBTITLED, quality) - - _logHistoryItem(action, ep_obj, quality, resource, provider) + _log_history_item(SUBTITLED, ep_obj, resource, provider) def log_failed(ep_obj, release, provider=None): @@ -134,7 +126,5 @@ def log_failed(ep_obj, release, provider=None): :param release: Release group :param provider: Provider used for snatch """ - _, quality = Quality.split_composite_status(ep_obj.status) - action = Quality.composite_status(FAILED, quality) - _logHistoryItem(action, ep_obj, quality, release, provider) + _log_history_item(FAILED, ep_obj, release, provider) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 8ae1157f71..b232c41d68 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -738,23 +738,21 @@ def _get_ep_obj(self, series_obj, season, episodes): return root_ep - def _quality_from_status(self, status): - """ - Determine the quality of the file that is being post processed with its status. - - :param status: The status related to the file we are post processing - :return: A quality value found in common.Quality - """ - quality = common.Quality.UNKNOWN - - if status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: - _, quality = common.Quality.split_composite_status(status) - if quality != common.Quality.UNKNOWN: - self.log(u'The snatched status has a quality in it, using that: {0}'.format - (common.Quality.qualityStrings[quality]), logger.DEBUG) - return quality - - return quality + # def _quality_from_status(self, ep_obj): + # """ + # Determine the quality of the file that is being post processed with its status. + # + # :param ep_obj: episode object. + # :return: A quality value found in common.Quality + # """ + # + # if ep_obj.status in (common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST): + # if ep_obj.quality != common.Quality.UNKNOWN: + # self.log(u'The snatched status has a quality in it, using that: {0}'.format + # (common.Quality.qualityStrings[ep_obj.quality]), logger.DEBUG) + # return ep_obj.quality + # + # return common.UNKNOWN def _get_quality(self, ep_obj): """ @@ -794,7 +792,7 @@ def _priority_from_history(self, series_obj, season, episodes, quality): for episode in episodes: # First: check if the episode status is snatched tv_episodes_result = main_db_con.select( - 'SELECT status ' + 'SELECT status, quality ' 'FROM tv_episodes ' 'WHERE indexer = ? ' 'AND showid = ? ' @@ -1018,7 +1016,7 @@ def process(self): # retrieve/create the corresponding Episode objects ep_obj = self._get_ep_obj(series_obj, season, episodes) - _, old_ep_quality = common.Quality.split_composite_status(ep_obj.status) + old_ep_quality = ep_obj.quality # get the quality of the episode we're processing if quality and common.Quality.qualityStrings[quality] != 'Unknown': @@ -1026,7 +1024,8 @@ def process(self): (common.Quality.qualityStrings[quality]), logger.DEBUG) new_ep_quality = quality else: - new_ep_quality = self._quality_from_status(ep_obj.status) + # Fall back to the episode object's quality + new_ep_quality = ep_obj.quality # check snatched history to see if we should set the download as priority self._priority_from_history(series_obj, season, episodes, new_ep_quality) diff --git a/medusa/refiners/tv_episode.py b/medusa/refiners/tv_episode.py index db9baa23cc..df5fd96495 100644 --- a/medusa/refiners/tv_episode.py +++ b/medusa/refiners/tv_episode.py @@ -69,7 +69,7 @@ def refine(video, tv_episode=None, **kwargs): log.debug('Refining using Episode information.') enrich(EPISODE_MAPPING, video, tv_episode) enrich(ADDITIONAL_MAPPING, video, tv_episode, overwrite=False) - guess = Quality.to_guessit(tv_episode.status) + guess = Quality.to_guessit(tv_episode.quality) enrich({'resolution': guess.get('screen_size'), 'format': guess.get('format')}, video, overwrite=False) diff --git a/medusa/search/backlog.py b/medusa/search/backlog.py index 1444e1a696..4af66f0c6b 100644 --- a/medusa/search/backlog.py +++ b/medusa/search/backlog.py @@ -9,7 +9,8 @@ from builtins import object from builtins import str -from medusa import app, common, db, scheduler, ui +from medusa import app, db, scheduler, ui +from medusa.common import UNSET, Quality from medusa.helper.common import episode_num from medusa.logger.adapters.style import BraceAdapter from medusa.search.queue import BacklogQueueItem @@ -167,19 +168,21 @@ def _get_segments(series_obj, from_date): ) # check through the list of statuses to see if we want any - for sql_result in sql_results: - should_search, shold_search_reason = common.Quality.should_search(sql_result[b'status'], series_obj, - sql_result[b'manually_searched']) + for episode in sql_results: + cur_status, cur_quality = int(episode[b'status'] or UNSET), int(episode[b'quality'] or 0) + should_search, should_search_reason = Quality.should_search( + cur_status, cur_quality, series_obj, episode[b'manually_searched'] + ) if not should_search: continue log.debug( u'Found needed backlog episodes for: {show} {ep}. Reason: {reason}', { 'show': series_obj.name, - 'ep': episode_num(sql_result[b'season'], sql_result[b'episode']), - 'reason': shold_search_reason, + 'ep': episode_num(episode[b'season'], episode[b'episode']), + 'reason': should_search_reason, } ) - ep_obj = series_obj.get_episode(sql_result[b'season'], sql_result[b'episode']) + ep_obj = series_obj.get_episode(episode[b'season'], episode[b'episode']) if ep_obj.season not in wanted: wanted[ep_obj.season] = [ep_obj] diff --git a/medusa/search/core.py b/medusa/search/core.py index a4afd38a0a..35f4640ae6 100644 --- a/medusa/search/core.py +++ b/medusa/search/core.py @@ -217,7 +217,7 @@ def snatch_episode(result): sql_l.append(curEpObj.get_sql()) - if curEpObj.splitted_status_status != common.DOWNLOADED: + if curEpObj.status != common.DOWNLOADED: notify_message = curEpObj.formatted_filename(u'%SN - %Sx%0E - %EN - %QN') if all([app.SEEDERS_LEECHERS_IN_NOTIFY, result.seeders not in (-1, None), result.leechers not in (-1, None)]): @@ -392,7 +392,7 @@ def wanted_episodes(series_obj, from_date): con = db.DBConnection() sql_results = con.select( - 'SELECT status, season, episode, manually_searched ' + 'SELECT status, quality, season, episode, manually_searched ' 'FROM tv_episodes ' 'WHERE indexer = ? ' ' AND showid = ?' @@ -402,20 +402,22 @@ def wanted_episodes(series_obj, from_date): ) # check through the list of statuses to see if we want any - for result in sql_results: - _, cur_quality = common.Quality.split_composite_status(int(result[b'status'] or UNSET)) - should_search, should_search_reason = Quality.should_search(result[b'status'], series_obj, result[b'manually_searched']) + for episode in sql_results: + cur_status, cur_quality = int(episode[b'status'] or UNSET), int(episode[b'quality'] or 0) + should_search, should_search_reason = Quality.should_search( + cur_status, cur_quality, series_obj, episode[b'manually_searched'] + ) if not should_search: continue else: log.debug( u'Searching for {show} {ep}. Reason: {reason}', { u'show': series_obj.name, - u'ep': episode_num(result[b'season'], result[b'episode']), + u'ep': episode_num(episode[b'season'], episode[b'episode']), u'reason': should_search_reason, } ) - ep_obj = series_obj.get_episode(result[b'season'], result[b'episode']) + ep_obj = series_obj.get_episode(episode[b'season'], episode[b'episode']) ep_obj.wanted_quality = [i for i in all_qualities if i > cur_quality and i != Quality.UNKNOWN] wanted.append(ep_obj) diff --git a/medusa/search/daily.py b/medusa/search/daily.py index cafb7e0a9b..de499da56d 100644 --- a/medusa/search/daily.py +++ b/medusa/search/daily.py @@ -96,11 +96,11 @@ def run(self, force=False): # pylint:disable=too-many-branches cur_ep = series_obj.get_episode(db_episode[b'season'], db_episode[b'episode']) with cur_ep.lock: - cur_ep.splitted_status_status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED + cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), - 'status': common.statusStrings[cur_ep.splitted_status_status], + 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', } ) diff --git a/medusa/search/manual.py b/medusa/search/manual.py index f1139c380d..abb3082d7d 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -36,9 +36,8 @@ def get_quality_class(ep_obj): """Find the quality class for the episode.""" - _, ep_quality = Quality.split_composite_status(ep_obj.status) - if ep_quality in Quality.cssClassStrings: - quality_class = Quality.cssClassStrings[ep_quality] + if ep_obj.quality in Quality.cssClassStrings: + quality_class = Quality.cssClassStrings[ep_obj.quality] else: quality_class = Quality.cssClassStrings[Quality.UNKNOWN] diff --git a/medusa/search/proper.py b/medusa/search/proper.py index 30ecb7f93b..1aa63800d4 100644 --- a/medusa/search/proper.py +++ b/medusa/search/proper.py @@ -228,7 +228,7 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran # check if we have the episode as DOWNLOADED main_db_con = db.DBConnection() - sql_results = main_db_con.select(b"SELECT status, release_name " + sql_results = main_db_con.select(b"SELECT status, quality, release_name " b"FROM tv_episodes WHERE indexer = ? " b"AND showid = ? AND season = ? " b"AND episode = ? AND status LIKE '%04'", @@ -243,7 +243,7 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran continue # only keep the proper if we have already downloaded an episode with the same quality - _, old_quality = Quality.split_composite_status(int(sql_results[0][b'status'])) + old_quality = int(sql_results[0][b'quality']) if old_quality != best_result.quality: log.info('Ignoring proper because quality is different: {name}', {'name': best_result.name}) if cur_proper.name not in processed_propers_names: diff --git a/medusa/server/api/v1/core.py b/medusa/server/api/v1/core.py index a6e9db892a..bfe8109d4f 100644 --- a/medusa/server/api/v1/core.py +++ b/medusa/server/api/v1/core.py @@ -719,7 +719,7 @@ def run(self): main_db_con = db.DBConnection(row_type='dict') sql_results = main_db_con.select( - 'SELECT name, description, airdate, status, location, file_size, release_name, subtitles ' + 'SELECT name, description, airdate, status, quality, location, file_size, release_name, subtitles ' 'FROM tv_episodes WHERE indexer = ? AND showid = ? AND episode = ? AND season = ?', [INDEXER_TVDBV2, self.indexerid, self.e, self.s]) if not len(sql_results) == 1: @@ -748,7 +748,7 @@ def run(self): else: episode[b'airdate'] = 'Never' - status, quality = Quality.split_composite_status(int(episode[b'status'])) + status, quality = int(episode[b'status']), int(episode[b'quality']) episode[b'status'] = statusStrings[status] episode[b'quality'] = get_quality_string(quality) episode[b'file_size_human'] = pretty_file_size(episode[b'file_size']) @@ -799,10 +799,8 @@ def run(self): # return the correct json value if ep_queue_item.success: - _, quality = Quality.split_composite_status(ep_obj.status) - # TODO: split quality and status? - return _responds(RESULT_SUCCESS, {'quality': get_quality_string(quality)}, - 'Snatched ({0})'.format(get_quality_string(quality))) + return _responds(RESULT_SUCCESS, {'quality': get_quality_string(ep_obj.quality)}, + 'Snatched ({0})'.format(get_quality_string(ep_obj.quality))) return _responds(RESULT_FAILURE, msg='Unable to find episode') @@ -1070,6 +1068,7 @@ def convert_date(history_date): History.date_format ).strftime(dateTimeFormat) + # FIXME: Can't really do anything about this now. History -> action, should also be separated? composite = Quality.split_composite_status(cur_item.action) if cur_type in (statusStrings[composite.status].lower(), None): return { @@ -1182,7 +1181,9 @@ def run(self): for cur_result in sql_results: - cur_ep_cat = cur_show.get_overview(cur_result[b'status'], manually_searched=cur_result[b'manually_searched']) + cur_ep_cat = cur_show.get_overview( + cur_result[b'status'], cur_result[b'quality'], manually_searched=cur_result[b'manually_searched'] + ) if cur_ep_cat and cur_ep_cat in (Overview.WANTED, Overview.QUAL): show_eps.append(cur_result) @@ -2534,12 +2535,12 @@ def run(self): if self.season is None: sql_results = main_db_con.select( - 'SELECT name, episode, airdate, status, release_name, season, location, file_size, subtitles ' + 'SELECT name, episode, airdate, status, quality, release_name, season, location, file_size, subtitles ' 'FROM tv_episodes WHERE indexer = ? AND showid = ?', [INDEXER_TVDBV2, self.indexerid]) seasons = {} for row in sql_results: - status, quality = Quality.split_composite_status(int(row[b'status'])) + status, quality = int(row[b'status']), int(row[b'quality']) row[b'status'] = statusStrings[status] row[b'quality'] = get_quality_string(quality) if try_int(row[b'airdate'], 1) > 693595: # 1900 @@ -2558,7 +2559,7 @@ def run(self): else: sql_results = main_db_con.select( - 'SELECT name, episode, airdate, status, location, file_size, release_name, subtitles' + 'SELECT name, episode, airdate, status, quality, location, file_size, release_name, subtitles' ' FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? ', [INDEXER_TVDBV2, self.indexerid, self.season]) if not sql_results: @@ -2567,7 +2568,7 @@ def run(self): for row in sql_results: cur_episode = int(row[b'episode']) del row[b'episode'] - status, quality = Quality.split_composite_status(int(row[b'status'])) + status, quality = int(row[b'status']), int(row[b'quality']) row[b'status'] = statusStrings[status] row[b'quality'] = get_quality_string(quality) if try_int(row[b'airdate'], 1) > 693595: # 1900 @@ -2680,19 +2681,19 @@ def run(self): episode_qualities_counts_snatch[statusCode] = 0 main_db_con = db.DBConnection(row_type='dict') - sql_results = main_db_con.select('SELECT status, season FROM tv_episodes ' + sql_results = main_db_con.select('SELECT status, quality, season FROM tv_episodes ' 'WHERE season != 0 AND indexer = ? AND showid = ?', [INDEXER_TVDBV2, self.indexerid]) # the main loop that goes through all episodes for row in sql_results: - status, quality = Quality.split_composite_status(int(row[b'status'])) + status, quality = int(row[b'status']), int(row[b'quality']) episode_status_counts_total['total'] += 1 - if status in Quality.DOWNLOADED + Quality.ARCHIVED: + if status in DOWNLOADED + ARCHIVED: episode_qualities_counts_download['total'] += 1 episode_qualities_counts_download[int(row[b'status'])] += 1 - elif status in Quality.SNATCHED + Quality.SNATCHED_PROPER: + elif status in SNATCHED + SNATCHED_PROPER: episode_qualities_counts_snatch['total'] += 1 episode_qualities_counts_snatch[int(row[b'status'])] += 1 elif status == 0: # we don't count NONE = 0 = N/A diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index d9873838b9..9f2bdd0e1f 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1917,9 +1917,11 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire return self._genericMessage('Error', error_message) # statusStrings is a custom type. Which does some "magic" itself. But we want to move away from this. - # FIXME: Always check status with status and quality with quality. - status_with_quality = status + # Currently status is passed from displayShow as a composite status+quality. Therefor we need to separate + # the status from it. + status = Quality.split_composite_status(status).status + quality = Quality.split_composite_status(status).quality if status not in statusStrings: error_message = 'Invalid status' @@ -2011,7 +2013,8 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire # Only in failed_history we set to FAILED. # We need current snatched quality to log 'quality' column in failed action in history if status != FAILED: - ep_obj.status = status_with_quality + ep_obj.status = status + ep_obj.quality = quality # mass add to database sql_l.append(ep_obj.get_sql()) diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index bd4287d05f..f952b07d29 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -21,8 +21,10 @@ ) from medusa.common import ( Overview, - Quality, + DOWNLOADED, SNATCHED, + SNATCHED_PROPER, + SNATCHED_BEST, ) from medusa.helper.common import ( episode_num, @@ -58,7 +60,7 @@ def index(self): def showEpisodeStatuses(indexername, seriesid, whichStatus): status_list = [int(whichStatus)] if status_list[0] == SNATCHED: - status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] main_db_con = db.DBConnection() cur_show_results = main_db_con.select( @@ -86,7 +88,7 @@ def episodeStatuses(self, whichStatus=None): if whichStatus: status_list = [int(whichStatus)] if status_list[0] == SNATCHED: - status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] else: status_list = [] @@ -136,7 +138,7 @@ def episodeStatuses(self, whichStatus=None): def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs): status_list = [int(oldStatus)] if status_list[0] == SNATCHED: - status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] to_change = {} @@ -178,6 +180,7 @@ def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs): @staticmethod def showSubtitleMissed(indexer, seriesid, whichSubs): main_db_con = db.DBConnection() + # TODO: maybe need to check if %4 still applies here. cur_show_results = main_db_con.select( b'SELECT season, episode, name, subtitles ' b'FROM tv_episodes ' @@ -220,6 +223,7 @@ def subtitleMissed(self, whichSubs=None): controller='manage', action='subtitleMissed') main_db_con = db.DBConnection() + # TODO: maybe need to check if %4 still applies here. status_results = main_db_con.select( b'SELECT show_name, tv_shows.show_id, tv_shows.indexer, ' b'tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles ' @@ -281,6 +285,7 @@ def downloadSubtitleMissed(self, *args, **kwargs): # get a list of all the eps we want to download subtitles if they just said 'all' if 'all' in to_download[(cur_indexer_id, cur_series_id)]: main_db_con = db.DBConnection() + # TODO: maybe need to check if %4 still applies here. all_eps_results = main_db_con.select( b'SELECT season, episode ' b'FROM tv_episodes ' @@ -322,10 +327,10 @@ def subtitleMissedPP(self): logger.log(u"Filename '{0}' cannot be parsed to an episode".format(filename), logger.DEBUG) continue - ep_status = Quality.split_composite_status(tv_episode.status).status - if ep_status in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST: + ep_status = tv_episode.status + if ep_status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): status = 'snatched' - elif ep_status in Quality.DOWNLOADED: + elif ep_status in DOWNLOADED: status = 'downloaded' else: continue diff --git a/medusa/subtitles.py b/medusa/subtitles.py index e995470907..9899601210 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -472,7 +472,6 @@ def save_subs(tv_episode, video, found_subtitles, video_path=None): episode = tv_episode.episode episode_name = tv_episode.name show_indexerid = tv_episode.series.indexerid - status = tv_episode.status subtitles_dir = get_subtitles_dir(video_path) saved_subtitles = save_subtitles(video, found_subtitles, directory=_encode(subtitles_dir), single=not app.SUBTITLES_MULTI) @@ -493,7 +492,7 @@ def save_subs(tv_episode, video, found_subtitles, video_path=None): if app.SUBTITLES_HISTORY: logger.debug(u'Logging to history downloaded subtitle from provider %s and language %s', subtitle.provider_name, subtitle.language.opensubtitles) - history.logSubtitle(tv_episode, status, subtitle) + history.logSubtitle(tv_episode, subtitle) # Refresh the subtitles property if tv_episode.location: diff --git a/medusa/tv/series.py b/medusa/tv/series.py index f76a1aefb2..b0890e6697 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -33,10 +33,15 @@ from medusa.black_and_white_list import BlackAndWhiteList from medusa.common import ( ARCHIVED, + DOWNLOADED, IGNORED, + FAILED, Overview, Quality, SKIPPED, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, UNAIRED, UNSET, WANTED, @@ -1746,11 +1751,10 @@ def refresh_dir(self): with cur_ep.lock: # if it used to have a file associated with it and it doesn't anymore then # set it to app.EP_DEFAULT_DELETED_STATUS - if cur_ep.location and cur_ep.status in Quality.DOWNLOADED: + if cur_ep.location and cur_ep.status in DOWNLOADED: if app.EP_DEFAULT_DELETED_STATUS == ARCHIVED: - _, old_quality = Quality.split_composite_status(cur_ep.status) - new_status = Quality.composite_status(ARCHIVED, old_quality) + new_status = ARCHIVED else: new_status = app.EP_DEFAULT_DELETED_STATUS @@ -2129,7 +2133,7 @@ def want_episode(self, season, episode, quality, forced_search=False, main_db_con = db.DBConnection() sql_results = main_db_con.select( b'SELECT ' - b' status, ' + b' status, quality, ' b' manually_searched ' b'FROM ' b' tv_episodes ' @@ -2151,7 +2155,7 @@ def want_episode(self, season, episode, quality, forced_search=False, ) return False - cur_status, cur_quality = Quality.split_composite_status(int(sql_results[0][b'status'])) + cur_status, cur_quality = int(sql_results[0][b'status']), int(sql_results[0][b'quality']) ep_status_text = statusStrings[cur_status] manually_searched = sql_results[0][b'manually_searched'] @@ -2182,11 +2186,13 @@ def want_episode(self, season, episode, quality, forced_search=False, ) return should_replace - def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): + def get_overview(self, ep_status, ep_quality, backlog_mode=False, manually_searched=False): """Get the Overview status from the Episode status. :param ep_status: an Episode status :type ep_status: int + :param ep_quality: an Episode status + :type ep_quality: int :param backlog_mode: if we should return overview for backlogOverview :type backlog_mode: boolean :param manually_searched: if episode was manually searched @@ -2199,7 +2205,7 @@ def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): if backlog_mode: if ep_status == WANTED: return Overview.WANTED - elif Quality.should_search(ep_status, self, manually_searched)[0]: + elif Quality.should_search(ep_status, ep_quality, self, manually_searched)[0]: return Overview.QUAL return Overview.GOOD @@ -2207,19 +2213,19 @@ def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): return Overview.UNAIRED elif ep_status in (SKIPPED, IGNORED): return Overview.SKIPPED - elif ep_status in Quality.WANTED: + elif ep_status in WANTED: return Overview.WANTED - elif ep_status in Quality.ARCHIVED: + elif ep_status in ARCHIVED: return Overview.GOOD - elif ep_status in Quality.FAILED: + elif ep_status in FAILED: return Overview.WANTED - elif ep_status in Quality.SNATCHED: + elif ep_status in SNATCHED: return Overview.SNATCHED - elif ep_status in Quality.SNATCHED_PROPER: + elif ep_status in SNATCHED_PROPER: return Overview.SNATCHED_PROPER - elif ep_status in Quality.SNATCHED_BEST: + elif ep_status in SNATCHED_BEST: return Overview.SNATCHED_BEST - elif ep_status in Quality.DOWNLOADED: + elif ep_status in DOWNLOADED: if Quality.should_search(ep_status, self, manually_searched)[0]: return Overview.QUAL else: @@ -2242,9 +2248,9 @@ def get_backlogged_episodes(self, allowed_qualities, preferred_qualities, includ for ep_obj in ep_list: if not include_wanted and ep_obj.status == WANTED: continue - if Quality.should_search(ep_obj.status, show_obj, ep_obj.manually_searched)[0]: + if Quality.should_search(ep_obj.status, ep_obj.quality, show_obj, ep_obj.manually_searched)[0]: new_backlogged += 1 - if Quality.should_search(ep_obj.status, self, ep_obj.manually_searched)[0]: + if Quality.should_search(ep_obj.status, ep_obj.quality, self, ep_obj.manually_searched)[0]: existing_backlogged += 1 else: new_backlogged = existing_backlogged = -1 @@ -2263,11 +2269,10 @@ def set_all_episodes_archived(self, final_status_only=False): for ep_obj in ep_list: with ep_obj.lock: if ep_obj.status in Quality.DOWNLOADED: - if final_status_only and Quality.should_search(ep_obj.status, self, + if final_status_only and Quality.should_search(ep_obj.status, ep_obj.quality, self, ep_obj.manually_searched)[0]: continue - _, old_quality = Quality.split_composite_status(ep_obj.status) - ep_obj.status = Quality.composite_status(ARCHIVED, old_quality) + ep_obj.status = ARCHIVED sql_list.append(ep_obj.get_sql()) if sql_list: main_db_con = db.DBConnection() diff --git a/tests/test_common.py b/tests/test_common.py index 846e7433e2..fe87c6ca0b 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -201,7 +201,7 @@ def test_from_guessit(self, p): ]) def test_to_guessit(self, p): # Given - quality = Quality.composite_status(DOWNLOADED, p['quality']) + quality = p['quality'] expected = p['expected'] # When From ce656540a0d2c90e3adde0d3df29fde5fa6f4633 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 20 May 2018 00:07:59 +0200 Subject: [PATCH 06/93] Missed quality field in query --- medusa/search/backlog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/search/backlog.py b/medusa/search/backlog.py index 4af66f0c6b..d63bc07c03 100644 --- a/medusa/search/backlog.py +++ b/medusa/search/backlog.py @@ -159,7 +159,7 @@ def _get_segments(series_obj, from_date): con = db.DBConnection() sql_results = con.select( - 'SELECT status, season, episode, manually_searched ' + 'SELECT status, quality, season, episode, manually_searched ' 'FROM tv_episodes ' 'WHERE airdate > ?' ' AND indexer = ? ' From 7208c0d5f288730f03f428b28115db032024fc9b Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 20 May 2018 00:11:58 +0200 Subject: [PATCH 07/93] -1 is not a valid default quality. --- medusa/databases/main_db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 2a7278382a..8b5bcf6924 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -749,7 +749,7 @@ def execute(self): 'hastbn, status, location, file_size, release_name, ' 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' 'is_proper, scene_season, scene_episode, absolute_number, ' - 'scene_absolute_number, version, release_group, manually_searched, -1, -1 ' + 'scene_absolute_number, version, release_group, manually_searched, -1, 0 ' 'FROM tv_episodes;') self.connection.action("DROP TABLE IF EXISTS tv_episodes;") self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") From d9c45af8de7c31f2f16244086a748e4f0f2e6831 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 20 May 2018 00:18:08 +0200 Subject: [PATCH 08/93] Fixed default init of quality. --- medusa/tv/episode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index d738f5e16e..bbf42f9f24 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -628,7 +628,7 @@ def load_from_db(self, season, episode): self.subtitles_lastsearch = sql_results[0][b'subtitles_lastsearch'] self.airdate = date.fromordinal(int(sql_results[0][b'airdate'])) self.status = int(sql_results[0][b'status'] or -1) - self.quality = int(sql_results[0][b'quality'] or -1) + self.quality = int(sql_results[0][b'quality'] or 0) # don't overwrite my location if sql_results[0][b'location']: From c6c351f0c1db43fc852395c3b00b2d68e6a17377 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 20 May 2018 13:13:35 +0200 Subject: [PATCH 09/93] Fix call to overview status. --- medusa/server/web/home/handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 9f2bdd0e1f..1931effbd8 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -936,7 +936,7 @@ def displayShow(self, indexername=None, seriesid=None, ): ep_cats = {} for cur_result in sql_results: - cur_ep_cat = series_obj.get_overview(cur_result[b'status'], manually_searched=cur_result[b'manually_searched']) + cur_ep_cat = series_obj.get_overview(cur_result[b'status'], cur_result[b'quality'], manually_searched=cur_result[b'manually_searched']) if cur_ep_cat: ep_cats['{season}x{episode}'.format(season=cur_result[b'season'], episode=cur_result[b'episode'])] = cur_ep_cat ep_counts[cur_ep_cat] += 1 From d38f701d5c90c902074d3be27dc9956b58c0183f Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 20 May 2018 13:19:01 +0200 Subject: [PATCH 10/93] Adapted the history table. Action will include the status, quality (hasn't changed) but will include quality. Action will only have status, quality has been removed from it. * Changed show/history.py, to include quality. As we can't get it from action anymore. --- medusa/databases/main_db.py | 10 ++++++- medusa/show/history.py | 19 +++++++------- themes-default/slim/views/history.mako | 35 +++++++++++++------------ themes-default/slim/views/inc_defs.mako | 6 ++++- themes/dark/templates/history.mako | 35 +++++++++++++------------ themes/dark/templates/inc_defs.mako | 6 ++++- 6 files changed, 64 insertions(+), 47 deletions(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 8b5bcf6924..1beb07e673 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -765,7 +765,7 @@ def execute(self): # Remove ep_status and ep_quality and add quality field. # Move status from ep_status and quality from ep_quality - log.info(u'Adding data from ep_status and ep_quality fields into status/quality fields the tv_episodes table') + log.info(u'Adding data from ep_status and ep_quality fields to status/quality fields the tv_episodes table') self.connection.action('DROP TABLE IF EXISTS new_tv_episodes;') self.connection.action('CREATE TABLE IF NOT EXISTS new_tv_episodes ' @@ -793,4 +793,12 @@ def execute(self): self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") + log.info(u'Remove the quality from the action field, as this is a composite status') + sql_results = self.connection.select("SELECT action from history GROUP BY action") + + for status in sql_results: + split = common.Quality.split_composite_status(status[b'action']) + self.connection.action("UPDATE history SET action = ? WHERE action = ?", + [split.status, status[b'action']]) + self.inc_minor_version() diff --git a/medusa/show/history.py b/medusa/show/history.py index 2e66125f0b..7d59e380f8 100644 --- a/medusa/show/history.py +++ b/medusa/show/history.py @@ -20,8 +20,7 @@ from builtins import object from collections import namedtuple from datetime import datetime, timedelta - -from medusa.common import Quality +from medusa.common import DOWNLOADED, SNATCHED from medusa.helper.common import try_int from six import itervalues, text_type @@ -60,8 +59,8 @@ def get(self, limit=100, action=None): actions = History._get_actions(action) limit = max(try_int(limit), 0) - common_sql = 'SELECT show_name, h.indexer_id, showid, season, episode, h.quality, ' \ - 'action, provider, resource, date, h.proper_tags, h.manually_searched ' \ + common_sql = 'SELECT show_name, h.indexer_id, showid, season, episode, action, h.quality, ' \ + ' provider, resource, date, h.proper_tags, h.manually_searched ' \ 'FROM history h, tv_shows s ' \ 'WHERE h.showid = s.indexer_id AND h.indexer_id = s.indexer ' filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') ' @@ -110,18 +109,18 @@ def _get_actions(action): result = None if action == 'downloaded': - result = Quality.DOWNLOADED + result = DOWNLOADED elif action == 'snatched': - result = Quality.SNATCHED + result = SNATCHED return result or [] - action_fields = ('action', 'provider', 'resource', 'date', 'proper_tags', 'manually_searched') + action_fields = ('action', 'quality', 'provider', 'resource', 'date', 'proper_tags', 'manually_searched') # A specific action from history Action = namedtuple('Action', action_fields) Action.width = len(action_fields) - index_fields = ('indexer_id', 'show_id', 'season', 'episode', 'quality') + index_fields = ('indexer_id', 'show_id', 'season', 'episode') # An index for an item or compact item from history Index = namedtuple('Index', index_fields) Index.width = len(index_fields) @@ -152,8 +151,7 @@ def index(self): self.indexer_id, self.show_id, self.season, - self.episode, - self.quality, + self.episode ) @property @@ -163,6 +161,7 @@ def cur_action(self): """ return History.Action( self.action, + self.quality, self.provider, self.resource, self.date, diff --git a/themes-default/slim/views/history.mako b/themes-default/slim/views/history.mako index ee07d472d0..d3ac56976a 100644 --- a/themes-default/slim/views/history.mako +++ b/themes-default/slim/views/history.mako @@ -9,7 +9,7 @@ from medusa import providers from medusa.sbdatetime import sbdatetime from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED, DOWNLOADED, SUBTITLED - from medusa.common import Quality, statusStrings, Overview + from medusa.common import statusStrings from medusa.show.history import History from medusa.providers.generic_provider import GenericProvider %> @@ -149,7 +149,6 @@ const startVue = () => { % for hItem in historyResults: - <% composite = Quality.split_composite_status(int(hItem.action)) %> <% airDate = sbdatetime.sbfdatetime(datetime.strptime(str(hItem.date), History.date_format), show_seconds=True) %> @@ -157,11 +156,11 @@ const startVue = () => { ${hItem.show_name} - ${"S%02i" % int(hItem.season)}${"E%02i" % int(hItem.episode)} ${'Proper' if hItem.proper_tags else ''} - - % if composite.status == SUBTITLED: + + % if hItem.action == SUBTITLED: % endif - ${statusStrings[composite.status]} + ${statusStrings[hItem.action]} % if hItem.manually_searched: % endif @@ -169,8 +168,9 @@ const startVue = () => { % endif + - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if hItem.action in [DOWNLOADED, ARCHIVED]: % if hItem.provider != "-1": ${hItem.provider} % else: @@ -178,7 +178,7 @@ const startVue = () => { % endif % else: % if hItem.provider > 0: - % if composite.status in [SNATCHED, FAILED]: + % if hItem.action in [SNATCHED, FAILED]: <% provider = providers.get_provider_class(GenericProvider.make_id(hItem.provider)) %> % if provider is not None: ${provider.name} @@ -191,8 +191,8 @@ const startVue = () => { % endif % endif - ${composite.quality} - ${renderQualityPill(composite.quality)} + ${hItem.quality} + ${renderQualityPill(hItem.quality)} % endfor @@ -230,8 +230,7 @@ const startVue = () => { % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SNATCHED: + % if cur_action.action == SNATCHED: <% provider = providers.get_provider_class(GenericProvider.make_id(cur_action.provider)) %> % if provider is not None: ${provider.name} @@ -245,15 +244,14 @@ const startVue = () => { missing provider % endif % endif - % if composite.status == FAILED: + % if cur_action.action == FAILED: % endif % endfor % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if cur_action.action in [DOWNLOADED, ARCHIVED]: % if cur_action.provider != "-1": ${cur_action.provider} % else: @@ -265,8 +263,7 @@ const startVue = () => { % if app.USE_SUBTITLES: % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SUBTITLED: + % if cur_action.action == SUBTITLED: ${cur_action.provider} / @@ -275,7 +272,11 @@ const startVue = () => { % endfor % endif - ${renderQualityPill(composite.quality)} + + % for cur_action in sorted(hItem.actions, key=lambda x: x.date): + ${renderQualityPill(cur_action.quality, customTitle=statusStrings[cur_action.action])} + % endfor + % endfor diff --git a/themes-default/slim/views/inc_defs.mako b/themes-default/slim/views/inc_defs.mako index 7355ca109e..81d1fdbd1a 100644 --- a/themes-default/slim/views/inc_defs.mako +++ b/themes-default/slim/views/inc_defs.mako @@ -2,7 +2,7 @@ import cgi from medusa.common import Quality, qualityPresets, qualityPresetStrings %> -<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None)"><% +<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute if showTitle: allowed_qualities, preferred_qualities = Quality.split_quality(quality) @@ -21,6 +21,10 @@ title = ' title="' + cgi.escape(title.rstrip(), True) + '"' else: title = "" + + if customTitle: + title = ' title="' + cgi.escape(str(customTitle).rstrip(), True) + '"' + sum_allowed_qualities = quality & 0xFFFF sum_preferred_qualities = quality >> 16 set_hdtv = {Quality.HDTV, Quality.RAWHDTV, Quality.FULLHDTV} diff --git a/themes/dark/templates/history.mako b/themes/dark/templates/history.mako index ee07d472d0..d3ac56976a 100644 --- a/themes/dark/templates/history.mako +++ b/themes/dark/templates/history.mako @@ -9,7 +9,7 @@ from medusa import providers from medusa.sbdatetime import sbdatetime from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED, DOWNLOADED, SUBTITLED - from medusa.common import Quality, statusStrings, Overview + from medusa.common import statusStrings from medusa.show.history import History from medusa.providers.generic_provider import GenericProvider %> @@ -149,7 +149,6 @@ const startVue = () => { % for hItem in historyResults: - <% composite = Quality.split_composite_status(int(hItem.action)) %> <% airDate = sbdatetime.sbfdatetime(datetime.strptime(str(hItem.date), History.date_format), show_seconds=True) %> @@ -157,11 +156,11 @@ const startVue = () => { ${hItem.show_name} - ${"S%02i" % int(hItem.season)}${"E%02i" % int(hItem.episode)} ${'Proper' if hItem.proper_tags else ''} - - % if composite.status == SUBTITLED: + + % if hItem.action == SUBTITLED: % endif - ${statusStrings[composite.status]} + ${statusStrings[hItem.action]} % if hItem.manually_searched: % endif @@ -169,8 +168,9 @@ const startVue = () => { % endif + - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if hItem.action in [DOWNLOADED, ARCHIVED]: % if hItem.provider != "-1": ${hItem.provider} % else: @@ -178,7 +178,7 @@ const startVue = () => { % endif % else: % if hItem.provider > 0: - % if composite.status in [SNATCHED, FAILED]: + % if hItem.action in [SNATCHED, FAILED]: <% provider = providers.get_provider_class(GenericProvider.make_id(hItem.provider)) %> % if provider is not None: ${provider.name} @@ -191,8 +191,8 @@ const startVue = () => { % endif % endif - ${composite.quality} - ${renderQualityPill(composite.quality)} + ${hItem.quality} + ${renderQualityPill(hItem.quality)} % endfor @@ -230,8 +230,7 @@ const startVue = () => { % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SNATCHED: + % if cur_action.action == SNATCHED: <% provider = providers.get_provider_class(GenericProvider.make_id(cur_action.provider)) %> % if provider is not None: ${provider.name} @@ -245,15 +244,14 @@ const startVue = () => { missing provider % endif % endif - % if composite.status == FAILED: + % if cur_action.action == FAILED: % endif % endfor % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if cur_action.action in [DOWNLOADED, ARCHIVED]: % if cur_action.provider != "-1": ${cur_action.provider} % else: @@ -265,8 +263,7 @@ const startVue = () => { % if app.USE_SUBTITLES: % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SUBTITLED: + % if cur_action.action == SUBTITLED: ${cur_action.provider} / @@ -275,7 +272,11 @@ const startVue = () => { % endfor % endif - ${renderQualityPill(composite.quality)} + + % for cur_action in sorted(hItem.actions, key=lambda x: x.date): + ${renderQualityPill(cur_action.quality, customTitle=statusStrings[cur_action.action])} + % endfor + % endfor diff --git a/themes/dark/templates/inc_defs.mako b/themes/dark/templates/inc_defs.mako index 7355ca109e..81d1fdbd1a 100644 --- a/themes/dark/templates/inc_defs.mako +++ b/themes/dark/templates/inc_defs.mako @@ -2,7 +2,7 @@ import cgi from medusa.common import Quality, qualityPresets, qualityPresetStrings %> -<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None)"><% +<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute if showTitle: allowed_qualities, preferred_qualities = Quality.split_quality(quality) @@ -21,6 +21,10 @@ title = ' title="' + cgi.escape(title.rstrip(), True) + '"' else: title = "" + + if customTitle: + title = ' title="' + cgi.escape(str(customTitle).rstrip(), True) + '"' + sum_allowed_qualities = quality & 0xFFFF sum_preferred_qualities = quality >> 16 set_hdtv = {Quality.HDTV, Quality.RAWHDTV, Quality.FULLHDTV} From dba2e97c20a02dcc9fb98e4e2f3e76e6fbab5c45 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 20 May 2018 13:21:05 +0200 Subject: [PATCH 11/93] Synced template changes. --- themes/light/templates/history.mako | 35 ++++++++++++++-------------- themes/light/templates/inc_defs.mako | 6 ++++- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/themes/light/templates/history.mako b/themes/light/templates/history.mako index ee07d472d0..d3ac56976a 100644 --- a/themes/light/templates/history.mako +++ b/themes/light/templates/history.mako @@ -9,7 +9,7 @@ from medusa import providers from medusa.sbdatetime import sbdatetime from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED, DOWNLOADED, SUBTITLED - from medusa.common import Quality, statusStrings, Overview + from medusa.common import statusStrings from medusa.show.history import History from medusa.providers.generic_provider import GenericProvider %> @@ -149,7 +149,6 @@ const startVue = () => { % for hItem in historyResults: - <% composite = Quality.split_composite_status(int(hItem.action)) %> <% airDate = sbdatetime.sbfdatetime(datetime.strptime(str(hItem.date), History.date_format), show_seconds=True) %> @@ -157,11 +156,11 @@ const startVue = () => { ${hItem.show_name} - ${"S%02i" % int(hItem.season)}${"E%02i" % int(hItem.episode)} ${'Proper' if hItem.proper_tags else ''} - - % if composite.status == SUBTITLED: + + % if hItem.action == SUBTITLED: % endif - ${statusStrings[composite.status]} + ${statusStrings[hItem.action]} % if hItem.manually_searched: % endif @@ -169,8 +168,9 @@ const startVue = () => { % endif + - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if hItem.action in [DOWNLOADED, ARCHIVED]: % if hItem.provider != "-1": ${hItem.provider} % else: @@ -178,7 +178,7 @@ const startVue = () => { % endif % else: % if hItem.provider > 0: - % if composite.status in [SNATCHED, FAILED]: + % if hItem.action in [SNATCHED, FAILED]: <% provider = providers.get_provider_class(GenericProvider.make_id(hItem.provider)) %> % if provider is not None: ${provider.name} @@ -191,8 +191,8 @@ const startVue = () => { % endif % endif - ${composite.quality} - ${renderQualityPill(composite.quality)} + ${hItem.quality} + ${renderQualityPill(hItem.quality)} % endfor @@ -230,8 +230,7 @@ const startVue = () => { % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SNATCHED: + % if cur_action.action == SNATCHED: <% provider = providers.get_provider_class(GenericProvider.make_id(cur_action.provider)) %> % if provider is not None: ${provider.name} @@ -245,15 +244,14 @@ const startVue = () => { missing provider % endif % endif - % if composite.status == FAILED: + % if cur_action.action == FAILED: % endif % endfor % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if cur_action.action in [DOWNLOADED, ARCHIVED]: % if cur_action.provider != "-1": ${cur_action.provider} % else: @@ -265,8 +263,7 @@ const startVue = () => { % if app.USE_SUBTITLES: % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SUBTITLED: + % if cur_action.action == SUBTITLED: ${cur_action.provider} / @@ -275,7 +272,11 @@ const startVue = () => { % endfor % endif - ${renderQualityPill(composite.quality)} + + % for cur_action in sorted(hItem.actions, key=lambda x: x.date): + ${renderQualityPill(cur_action.quality, customTitle=statusStrings[cur_action.action])} + % endfor + % endfor diff --git a/themes/light/templates/inc_defs.mako b/themes/light/templates/inc_defs.mako index 7355ca109e..81d1fdbd1a 100644 --- a/themes/light/templates/inc_defs.mako +++ b/themes/light/templates/inc_defs.mako @@ -2,7 +2,7 @@ import cgi from medusa.common import Quality, qualityPresets, qualityPresetStrings %> -<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None)"><% +<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute if showTitle: allowed_qualities, preferred_qualities = Quality.split_quality(quality) @@ -21,6 +21,10 @@ title = ' title="' + cgi.escape(title.rstrip(), True) + '"' else: title = "" + + if customTitle: + title = ' title="' + cgi.escape(str(customTitle).rstrip(), True) + '"' + sum_allowed_qualities = quality & 0xFFFF sum_preferred_qualities = quality >> 16 set_hdtv = {Quality.HDTV, Quality.RAWHDTV, Quality.FULLHDTV} From 5d834c10b4cea02d94217feb400d500079eabfb8 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 20 May 2018 18:28:51 +0200 Subject: [PATCH 12/93] Fix trying to iterate int --- medusa/tv/series.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index b0890e6697..cf6cc92a50 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -2213,19 +2213,19 @@ def get_overview(self, ep_status, ep_quality, backlog_mode=False, manually_searc return Overview.UNAIRED elif ep_status in (SKIPPED, IGNORED): return Overview.SKIPPED - elif ep_status in WANTED: + elif ep_status == WANTED: return Overview.WANTED - elif ep_status in ARCHIVED: + elif ep_status == ARCHIVED: return Overview.GOOD - elif ep_status in FAILED: + elif ep_status == FAILED: return Overview.WANTED - elif ep_status in SNATCHED: + elif ep_status == SNATCHED: return Overview.SNATCHED - elif ep_status in SNATCHED_PROPER: + elif ep_status == SNATCHED_PROPER: return Overview.SNATCHED_PROPER - elif ep_status in SNATCHED_BEST: + elif ep_status == SNATCHED_BEST: return Overview.SNATCHED_BEST - elif ep_status in DOWNLOADED: + elif ep_status == DOWNLOADED: if Quality.should_search(ep_status, self, manually_searched)[0]: return Overview.QUAL else: From 36c0c08969363e432b802eb5c9b06683a1e503a0 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 20 May 2018 18:32:04 +0200 Subject: [PATCH 13/93] Update should_search. Requires 4 arguments now --- medusa/tv/series.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index cf6cc92a50..770f0e1bc1 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -2226,7 +2226,7 @@ def get_overview(self, ep_status, ep_quality, backlog_mode=False, manually_searc elif ep_status == SNATCHED_BEST: return Overview.SNATCHED_BEST elif ep_status == DOWNLOADED: - if Quality.should_search(ep_status, self, manually_searched)[0]: + if Quality.should_search(ep_status, ep_quality, self, manually_searched)[0]: return Overview.QUAL else: return Overview.GOOD From ffb764be318e4d167c4e2df0fe0e17335ffc1cd1 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 20 May 2018 19:37:36 +0200 Subject: [PATCH 14/93] Fix schedule page --- medusa/show/coming_episodes.py | 30 +++++++++++++++++------------- 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/medusa/show/coming_episodes.py b/medusa/show/coming_episodes.py index 237a3c961c..afa47724af 100644 --- a/medusa/show/coming_episodes.py +++ b/medusa/show/coming_episodes.py @@ -1,7 +1,5 @@ # coding=utf-8 # This file is part of Medusa. -# - # # Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -25,10 +23,14 @@ from medusa import app from medusa.common import ( + ARCHIVED, + DOWNLOADED, IGNORED, - Quality, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, UNAIRED, - WANTED, + WANTED ) from medusa.db import DBConnection from medusa.helper.common import dateFormat, timeFormat @@ -71,12 +73,14 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() - qualities_list = Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED] + status_list = [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, + ARCHIVED, IGNORED] db = DBConnection() fields_to_select = ', '.join( - ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', - 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] + ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', + 'indexer_id', 'name', 'network', 'paused', 's.quality', 'runtime', 'season', 'show_name', + 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + @@ -86,13 +90,13 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' - 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', - [today, next_week] + qualities_list + 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', + [today, next_week] + status_list ) done_shows_list = [int(result[b'showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) - placeholder2 = ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER)) + placeholder2 = ','.join(['?'] * len([DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER])) # FIXME: This inner join is not multi indexer friendly. results += db.select( @@ -109,7 +113,7 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', - done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + done_shows_list + [next_week] + [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER] ) results += db.select( @@ -120,8 +124,8 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' - 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', - [today, recently, WANTED, UNAIRED] + qualities_list + 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', + [today, recently, WANTED, UNAIRED] + status_list ) results = [dict(result) for result in results] From c7c3cf3cfb7ebe9048f84ddf81d2f195e653f07b Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 20 May 2018 19:58:02 +0200 Subject: [PATCH 15/93] Fix backlog overview --- medusa/server/web/manage/handler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index f952b07d29..c79c8dcbf3 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -409,7 +409,7 @@ def backlogOverview(self): sql_results = main_db_con.select( """ - SELECT e.status, e.season, e.episode, e.name, e.airdate, e.manually_searched + SELECT e.status, e.quality, e.season, e.episode, e.name, e.airdate, e.manually_searched FROM tv_episodes as e WHERE e.season IS NOT NULL AND e.indexer = ? AND e.showid = ? @@ -420,7 +420,7 @@ def backlogOverview(self): filtered_episodes = [] backlogged_episodes = [dict(row) for row in sql_results] for cur_result in backlogged_episodes: - cur_ep_cat = cur_show.get_overview(cur_result[b'status'], backlog_mode=True, + cur_ep_cat = cur_show.get_overview(cur_result[b'status'], cur_result[b'quality'], backlog_mode=True, manually_searched=cur_result[b'manually_searched']) if cur_ep_cat: if cur_ep_cat in selected_backlog_status and cur_result[b'airdate'] != 1: From f82335044d424397090b0a92df325de352385c05 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 21 May 2018 18:45:38 +0200 Subject: [PATCH 16/93] Fix queries with references to Quality.STATUS. Replace other occurrences of Quality.STATUS. --- medusa/common.py | 2 +- medusa/helpers/__init__.py | 4 +-- medusa/post_processor.py | 10 ++----- medusa/process_tv.py | 2 +- medusa/scene_numbering.py | 44 ++++++++++++++++++++++------- medusa/search/proper.py | 16 +++++------ medusa/server/api/v1/core.py | 4 ++- medusa/server/api/v2/stats.py | 10 +++++-- medusa/server/web/home/handler.py | 17 +++++------ medusa/server/web/manage/handler.py | 20 ++++++------- medusa/show/show.py | 12 +++++--- medusa/subtitles.py | 2 +- medusa/trakt_checker.py | 28 +++++++++--------- medusa/tv/episode.py | 3 +- 14 files changed, 100 insertions(+), 74 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index bbe49b8f6f..55d51b9cc5 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -666,7 +666,7 @@ def should_replace(ep_status, old_quality, new_quality, allowed_qualities, prefe :param search_type: The search type, that started this method :return: True if the old quality should be replaced with new quality. """ - if ep_status and ep_status not in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER: + if ep_status and ep_status not in [DOWNLOADED, SNATCHED, SNATCHED_PROPER]: if not force: return False, 'Episode status is not DOWNLOADED|SNATCHED|SNATCHED PROPER. Ignoring new quality' diff --git a/medusa/helpers/__init__.py b/medusa/helpers/__init__.py index e5377da438..2180f75acf 100644 --- a/medusa/helpers/__init__.py +++ b/medusa/helpers/__init__.py @@ -1771,7 +1771,7 @@ def is_already_processed_media(full_filename): """Check if resource was already processed.""" main_db_con = db.DBConnection() history_result = main_db_con.select('SELECT action FROM history ' - "WHERE action LIKE '%04' " + "WHERE action = '4' " 'AND resource LIKE ?', ['%' + full_filename]) return bool(history_result) @@ -1796,7 +1796,7 @@ def is_info_hash_processed(info_hash): 'd.season = s.season AND ' 'd.episode = s.episode AND ' 'd.quality = s.quality ' - 'WHERE d.action LIKE "%04"', + 'WHERE d.action = "4"', [info_hash]) return bool(history_result) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index b232c41d68..f1881c1e71 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -798,9 +798,7 @@ def _priority_from_history(self, series_obj, season, episodes, quality): 'AND showid = ? ' 'AND season = ? ' 'AND episode = ? ' - "AND (status LIKE '%02' " - "OR status LIKE '%09' " - "OR status LIKE '%12')", + "AND status IN ('2', '9', '12') ", [series_obj.indexer, series_obj.series_id, season, episode] ) @@ -814,9 +812,7 @@ def _priority_from_history(self, series_obj, season, episodes, quality): 'AND showid = ? ' 'AND season = ? ' 'AND episode = ? ' - "AND (action LIKE '%02' " - "OR action LIKE '%09' " - "OR action LIKE '%12') " + "AND action IN ('2', '9', '12') " 'ORDER BY date DESC', [series_obj.indexer, series_obj.series_id, season, episode]) @@ -838,7 +834,7 @@ def _priority_from_history(self, series_obj, season, episodes, quality): 'AND season = ? ' 'AND episode = ? ' 'AND quality = ? ' - "AND action LIKE '%04' " + "AND action = '4' " 'ORDER BY date DESC', [series_obj.indexer, series_obj.series_id, season, episode, quality]) diff --git a/medusa/process_tv.py b/medusa/process_tv.py index 907578babc..2aee1c35a2 100644 --- a/medusa/process_tv.py +++ b/medusa/process_tv.py @@ -498,7 +498,7 @@ def already_postprocessed(self, video_file): main_db_con = db.DBConnection() history_result = main_db_con.select( 'SELECT * FROM history ' - "WHERE action LIKE '%04' " + "WHERE action = '4' " # DOWNLOADED 'AND resource LIKE ?', ['%' + video_file]) diff --git a/medusa/scene_numbering.py b/medusa/scene_numbering.py index 57f3200ba3..56494b7a58 100644 --- a/medusa/scene_numbering.py +++ b/medusa/scene_numbering.py @@ -225,8 +225,12 @@ def find_xem_numbering(series_obj, season, episode): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT scene_season, scene_episode FROM tv_episodes WHERE indexer = ? and showid = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0", - [series_obj.indexer, series_obj.series_id, season, episode]) + "SELECT scene_season, scene_episode " + "FROM tv_episodes " + "WHERE indexer = ? and showid = ? and season = ? " + "and episode = ? and (scene_season or scene_episode) != 0", + [series_obj.indexer, series_obj.series_id, season, episode] + ) if rows: return int(rows[0][b"scene_season"]), int(rows[0][b"scene_episode"]) @@ -248,7 +252,10 @@ def find_xem_absolute_numbering(series_obj, absolute_number): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and absolute_number = ? and scene_absolute_number != 0", + "SELECT scene_absolute_number " + "FROM tv_episodes " + "WHERE indexer = ? and showid = ? " + "and absolute_number = ? and scene_absolute_number != 0", [series_obj.indexer, series_obj.series_id, absolute_number]) if rows: @@ -271,7 +278,10 @@ def get_indexer_numbering_for_xem(series_obj, sceneSeason, sceneEpisode): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT season, episode FROM tv_episodes WHERE indexer = ? and showid = ? and scene_season = ? and scene_episode = ?", + "SELECT season, episode " + "FROM tv_episodes " + "WHERE indexer = ? and showid = ? " + "and scene_season = ? and scene_episode = ?", [series_obj.indexer, series_obj.series_id, sceneSeason, sceneEpisode]) if rows: @@ -296,11 +306,17 @@ def get_indexer_absolute_numbering_for_xem(series_obj, sceneAbsoluteNumber, scen main_db_con = db.DBConnection() if scene_season is None: rows = main_db_con.select( - "SELECT absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number = ?", + "SELECT absolute_number " + "FROM tv_episodes " + "WHERE indexer = ? AND showid = ? " + "AND scene_absolute_number = ?", [series_obj.indexer, series_obj.series_id, sceneAbsoluteNumber]) else: rows = main_db_con.select( - "SELECT absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number = ? and scene_season = ?", + "SELECT absolute_number " + "FROM tv_episodes " + "WHERE indexer = ? " + "AND showid = ? AND scene_absolute_number = ? and scene_season = ?", [series_obj.indexer, series_obj.series_id, sceneAbsoluteNumber, scene_season]) if rows: @@ -349,8 +365,11 @@ def get_xem_numbering_for_show(series_obj, refresh_data=True): main_db_con = db.DBConnection() rows = main_db_con.select( - 'SELECT season, episode, scene_season, scene_episode FROM tv_episodes ' - 'WHERE indexer = ? and showid = ? and (scene_season or scene_episode) != 0 ORDER BY season, episode', + 'SELECT season, episode, scene_season, scene_episode ' + 'FROM tv_episodes ' + 'WHERE indexer = ? AND showid = ? ' + 'AND (scene_season or scene_episode) != 0 ' + 'ORDER BY season, episode', [series_obj.indexer, series_obj.series_id] ) @@ -404,7 +423,10 @@ def get_xem_absolute_numbering_for_show(series_obj): result = {} main_db_con = db.DBConnection() rows = main_db_con.select( - 'SELECT absolute_number, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ORDER BY absolute_number', + 'SELECT absolute_number, scene_absolute_number ' + 'FROM tv_episodes ' + 'WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ' + 'ORDER BY absolute_number', [series_obj.indexer, series_obj.series_id]) for row in rows: @@ -517,7 +539,9 @@ def fix_xem_numbering(series_obj): # pylint:disable=too-many-locals, too-many-b main_db_con = db.DBConnection() rows = main_db_con.select( - 'SELECT season, episode, absolute_number, scene_season, scene_episode, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ?', + 'SELECT season, episode, absolute_number, scene_season, scene_episode, scene_absolute_number ' + 'FROM tv_episodes ' + 'WHERE indexer = ? AND showid = ?', [series_obj.indexer, series_obj.series_id]) last_absolute_number = None diff --git a/medusa/search/proper.py b/medusa/search/proper.py index 1aa63800d4..e1a33847e1 100644 --- a/medusa/search/proper.py +++ b/medusa/search/proper.py @@ -15,7 +15,7 @@ from builtins import str from medusa import app, db, helpers -from medusa.common import Quality, cpu_presets +from medusa.common import cpu_presets, DOWNLOADED, SUBTITLED from medusa.helper.common import enabled_providers from medusa.helper.exceptions import AuthException, ex from medusa.logger.adapters.style import BraceAdapter @@ -96,20 +96,19 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran main_db_con = db.DBConnection() if not app.POSTPONE_IF_NO_SUBS: # Get the recently aired (last 2 days) shows from DB - search_q_params = ','.join('?' for _ in Quality.DOWNLOADED) recently_aired = main_db_con.select( b'SELECT indexer, showid, season, episode, status, airdate' b' FROM tv_episodes' b' WHERE airdate >= ?' - b' AND status IN ({0})'.format(search_q_params), - [search_date.toordinal()] + Quality.DOWNLOADED + b' AND status = ?', + [search_date.toordinal(), DOWNLOADED] ) else: # Get recently subtitled episodes (last 2 days) from DB # Episode status becomes downloaded only after found subtitles last_subtitled = search_date.strftime(History.date_format) recently_aired = main_db_con.select(b'SELECT indexer_id AS indexer, showid, season, episode FROM history ' - b"WHERE date >= ? AND action LIKE '%10'", [last_subtitled]) + b"WHERE date >= ? AND action = ?", [last_subtitled, SUBTITLED]) if not recently_aired: log.info('No recently aired new episodes, nothing to search for') @@ -231,11 +230,12 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran sql_results = main_db_con.select(b"SELECT status, quality, release_name " b"FROM tv_episodes WHERE indexer = ? " b"AND showid = ? AND season = ? " - b"AND episode = ? AND status LIKE '%04'", + b"AND episode = ? AND status = ?", [best_result.indexer, best_result.series.indexerid, best_result.actual_season, - best_result.actual_episodes[0]]) + best_result.actual_episodes[0], + DOWNLOADED]) if not sql_results: log.info("Ignoring proper because this episode doesn't have 'DOWNLOADED' status: {name}", { 'name': best_result.name @@ -339,7 +339,7 @@ def _download_propers(self, proper_list): b'AND episode = ? ' b'AND quality = ? ' b'AND date >= ? ' - b"AND (action LIKE '%02' OR action LIKE '%04' OR action LIKE '%09' OR action LIKE '%12')", + b"AND action in ('2', '4', '9', '12')", [cur_proper.indexerid, cur_proper.actual_season, cur_proper.actual_episode, cur_proper.quality, history_limit.strftime(History.date_format)]) diff --git a/medusa/server/api/v1/core.py b/medusa/server/api/v1/core.py index bfe8109d4f..d986abe74f 100644 --- a/medusa/server/api/v1/core.py +++ b/medusa/server/api/v1/core.py @@ -890,7 +890,7 @@ def _ep_result(result_code, ep, msg=''): continue # allow the user to force setting the status for an already downloaded episode - if ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED and not self.force: + if ep_obj.status in [DOWNLOADED, ARCHIVED] and not self.force: ep_results.append( _ep_result( RESULT_FAILURE, ep_obj, @@ -2666,6 +2666,7 @@ def run(self): # add all the downloaded qualities episode_qualities_counts_download = {'total': 0} + # TODO: replace Quality status with normal status. But no idea what this does? medariox? for statusCode in Quality.DOWNLOADED + Quality.ARCHIVED: status, quality = Quality.split_composite_status(statusCode) if quality in [Quality.NONE]: @@ -2674,6 +2675,7 @@ def run(self): # add all snatched qualities episode_qualities_counts_snatch = {'total': 0} + # TODO: replace Quality status with normal status. But no idea what this does? medariox? for statusCode in Quality.SNATCHED + Quality.SNATCHED_PROPER: status, quality = Quality.split_composite_status(statusCode) if quality in [Quality.NONE]: diff --git a/medusa/server/api/v2/stats.py b/medusa/server/api/v2/stats.py index e86f102b01..4d766cafcf 100644 --- a/medusa/server/api/v2/stats.py +++ b/medusa/server/api/v2/stats.py @@ -6,9 +6,13 @@ from medusa import db from medusa.common import ( + ARCHIVED, + DOWNLOADED, FAILED, - Quality, SKIPPED, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, UNAIRED, WANTED ) @@ -36,8 +40,8 @@ def get(self, identifier, path_param=None): """ main_db_con = db.DBConnection() - snatched = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST - downloaded = Quality.DOWNLOADED + Quality.ARCHIVED + snatched = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] + downloaded = [DOWNLOADED, ARCHIVED] # FIXME: This inner join is not multi indexer friendly. sql_result = main_db_con.select( diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 1931effbd8..4f5ef4e81e 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -175,8 +175,8 @@ def index(self): def show_statistics(): main_db_con = db.DBConnection() - snatched = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST - downloaded = Quality.DOWNLOADED + Quality.ARCHIVED + snatched = [SNATCHED. SNATCHED_PROPER, SNATCHED_BEST] + downloaded = [DOWNLOADED, ARCHIVED] # FIXME: This inner join is not multi indexer friendly. sql_result = main_db_con.select( @@ -1291,7 +1291,7 @@ def titler(x): b'AND showid = ? ' b'AND season = ? ' b'AND episode = ? ' - b'AND (action LIKE \'%02\' OR action LIKE \'%04\' OR action LIKE \'%09\' OR action LIKE \'%11\' OR action LIKE \'%12\') ' + b"AND action in ('2', '4', '9', '11', '12') " # SNATCHED, DOWN, SNATCH_PROP, FAILED, SNATCH_BEST b'ORDER BY date DESC', [indexer_id, series_id, season, episode] ) @@ -1982,24 +1982,24 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire series=series_obj.name, episode=cur_ep), logger.WARNING) continue - snatched_qualities = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + snatched_qualities = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] if status == DOWNLOADED and not ( - ep_obj.status in snatched_qualities + Quality.DOWNLOADED + ep_obj.status in snatched_qualities + [DOWNLOADED] and os.path.isfile(ep_obj.location)): logger.log('Refusing to change status of {series} {episode} to DOWNLOADED' ' because it\'s not SNATCHED/DOWNLOADED'.format( series=series_obj.name, episode=cur_ep), logger.WARNING) continue - if status == FAILED and ep_obj.status not in snatched_qualities + Quality.DOWNLOADED + Quality.ARCHIVED: + if status == FAILED and ep_obj.status not in snatched_qualities + [DOWNLOADED, ARCHIVED]: logger.log('Refusing to change status of {series} {episode} to FAILED' ' because it\'s not SNATCHED/DOWNLOADED/ARCHIVED'.format( series=series_obj.name, episode=cur_ep), logger.WARNING) continue if status == WANTED: - if ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED: + if ep_obj.status in [DOWNLOADED, ARCHIVED]: logger.log('Removing release_name of {series} {episode} as episode was changed to WANTED'.format( series=series_obj.name, episode=cur_ep), logger.DEBUG) ep_obj.release_name = '' @@ -2150,7 +2150,8 @@ def doRename(self, indexername=None, seriesid=None, eps=None): ep_info = cur_ep.split('x') - # this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database + # this is probably the worst possible way to deal with double eps + # but I've kinda painted myself into a corner here with this stupid database ep_result = main_db_con.select( b'SELECT location ' b'FROM tv_episodes ' diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index c79c8dcbf3..4cbfa94e2b 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -180,15 +180,14 @@ def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs): @staticmethod def showSubtitleMissed(indexer, seriesid, whichSubs): main_db_con = db.DBConnection() - # TODO: maybe need to check if %4 still applies here. cur_show_results = main_db_con.select( b'SELECT season, episode, name, subtitles ' b'FROM tv_episodes ' b'WHERE indexer = ? ' b'AND showid = ? ' b'AND season != 0 ' - b'AND status LIKE \'%4\' ' - b'AND location != \'\'', + b"AND status = '4' " + b"AND location != ''", [int(indexer), int(seriesid)] ) @@ -223,15 +222,14 @@ def subtitleMissed(self, whichSubs=None): controller='manage', action='subtitleMissed') main_db_con = db.DBConnection() - # TODO: maybe need to check if %4 still applies here. status_results = main_db_con.select( b'SELECT show_name, tv_shows.show_id, tv_shows.indexer, ' b'tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles ' b'FROM tv_episodes, tv_shows ' b'WHERE tv_shows.subtitles = 1 ' - b'AND tv_episodes.status LIKE \'%4\' ' + b"AND tv_episodes.status = '4' " b'AND tv_episodes.season != 0 ' - b'AND tv_episodes.location != \'\' ' + b"AND tv_episodes.location != '' " b'AND tv_episodes.showid = tv_shows.indexer_id ' b'AND tv_episodes.indexer = tv_shows.indexer ' b'ORDER BY show_name' @@ -285,15 +283,14 @@ def downloadSubtitleMissed(self, *args, **kwargs): # get a list of all the eps we want to download subtitles if they just said 'all' if 'all' in to_download[(cur_indexer_id, cur_series_id)]: main_db_con = db.DBConnection() - # TODO: maybe need to check if %4 still applies here. all_eps_results = main_db_con.select( b'SELECT season, episode ' b'FROM tv_episodes ' - b'WHERE status LIKE \'%4\' ' + b"WHERE status = '4' " b'AND season != 0 ' b'AND indexer = ? ' b'AND showid = ? ' - b'AND location != \'\'', + b"AND location != ''", [cur_indexer_id, cur_series_id] ) to_download[(cur_indexer_id, cur_series_id)] = [str(x[b'season']) + 'x' + str(x[b'episode']) for x in all_eps_results] @@ -408,8 +405,9 @@ def backlogOverview(self): ep_cats = {} sql_results = main_db_con.select( - """ - SELECT e.status, e.quality, e.season, e.episode, e.name, e.airdate, e.manually_searched + b""" + SELECT e.status, e.quality, e.season, + e.episode, e.name, e.airdate, e.manually_searched FROM tv_episodes as e WHERE e.season IS NOT NULL AND e.indexer = ? AND e.showid = ? diff --git a/medusa/show/show.py b/medusa/show/show.py index b678128ecf..28b1c5ec6f 100644 --- a/medusa/show/show.py +++ b/medusa/show/show.py @@ -24,8 +24,12 @@ from medusa import app from medusa.common import ( - Quality, + ARCHIVED, + DOWNLOADED, SKIPPED, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, WANTED, ) from medusa.db import DBConnection @@ -151,12 +155,12 @@ def overall_stats(): shows = app.showList today = date.today().toordinal() - downloaded_status = Quality.DOWNLOADED + Quality.ARCHIVED - snatched_status = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + downloaded_status = [DOWNLOADED, ARCHIVED] + snatched_status = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] total_status = [SKIPPED, WANTED] results = db.select( - 'SELECT airdate, status ' + 'SELECT airdate, status, quality' 'FROM tv_episodes ' 'WHERE season > 0 ' 'AND episode > 0 ' diff --git a/medusa/subtitles.py b/medusa/subtitles.py index 9899601210..bf9be0ddf1 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -992,7 +992,7 @@ def dhm(td): "WHERE " "s.subtitles = 1 " "AND s.paused = 0 " - "AND e.status LIKE '%4' " + "AND e.status = '4' " "AND e.season > 0 " "AND e.location != '' " "AND age {} 30 " diff --git a/medusa/trakt_checker.py b/medusa/trakt_checker.py index 5f43b7af6e..19505885b7 100644 --- a/medusa/trakt_checker.py +++ b/medusa/trakt_checker.py @@ -10,7 +10,7 @@ from builtins import str from medusa import app, db, ui -from medusa.common import Quality, SKIPPED, WANTED +from medusa.common import ARCHIVED, DOWNLOADED, SKIPPED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, WANTED from medusa.helper.common import episode_num from medusa.helpers import get_title_without_year from medusa.indexers.indexer_config import EXTERNAL_IMDB, EXTERNAL_TRAKT, indexerConfig @@ -223,17 +223,17 @@ def remove_episode_trakt_collection(self, filter_show=None): params = [] main_db_con = db.DBConnection() - selection_status = ['?' for _ in Quality.DOWNLOADED + Quality.ARCHIVED] + status = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name,' \ b'e.season, e.episode, e.status ' \ b'FROM tv_episodes AS e, tv_shows AS s WHERE e.indexer = s.indexer AND ' \ b's.indexer_id = e.showid and e.location = "" ' \ - b'AND e.status in ({0})'.format(','.join(selection_status)) + b'AND e.status in ({0})'.format(','.join(['?']*len(status))) if filter_show: sql_selection += b' AND s.indexer_id = ? AND e.indexer = ?' params = [filter_show.series_id, filter_show.indexer] - sql_result = main_db_con.select(sql_selection, Quality.DOWNLOADED + Quality.ARCHIVED + params) + sql_result = main_db_con.select(sql_selection, status + params) episodes = [dict(e) for e in sql_result] if episodes: @@ -274,13 +274,13 @@ def add_episode_trakt_collection(self): if app.TRAKT_SYNC and app.USE_TRAKT: main_db_con = db.DBConnection() - selection_status = ['?' for _ in Quality.DOWNLOADED + Quality.ARCHIVED] + status = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid ' \ - b"AND e.status in ({0}) AND e.location <> ''".format(','.join(selection_status)) + b"AND e.status in ({0}) AND e.location <> ''".format(','.join(['?']*len(status))) - sql_result = main_db_con.select(sql_selection, Quality.DOWNLOADED + Quality.ARCHIVED) + sql_result = main_db_con.select(sql_selection, status) episodes = [dict(e) for e in sql_result] if episodes: @@ -337,12 +337,11 @@ def remove_episode_watchlist(self): if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() - status = Quality.DOWNLOADED + Quality.ARCHIVED - selection_status = [b'?' for _ in status] + status = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer ' \ - b'AND s.indexer_id = e.showid AND e.status in ({0})'.format(b','.join(selection_status)) + b'AND s.indexer_id = e.showid AND e.status in ({0})'.format(b','.join([b'?']*len(status))) sql_result = main_db_con.select(sql_selection, status) episodes = [dict(i) for i in sql_result] @@ -382,12 +381,11 @@ def add_episode_watchlist(self): if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() - status = Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + [WANTED] - selection_status = [b'?' for _ in status] + status = [SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, WANTED] sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid AND s.paused = 0 ' \ - b'AND e.status in ({0})'.format(b','.join(selection_status)) + b'AND e.status in ({0})'.format(b','.join([b'?']*len(status))) sql_result = main_db_con.select(sql_selection, status) episodes = [dict(i) for i in sql_result] @@ -489,6 +487,7 @@ def sync_trakt_shows(self): show_name = trakt_show['title'] show = None + indexer = None for i in indexerConfig: trakt_indexer = get_trakt_indexer(i) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) @@ -516,8 +515,7 @@ def sync_trakt_shows(self): else: self.add_show(trakt_default_indexer, indexer_id, show_name, WANTED) - if int(app.TRAKT_METHOD_ADD) == 1: - # FIXME: Referenced before assigment + if int(app.TRAKT_METHOD_ADD) == 1 and indexer: new_show = Show.find_by_id(app.showList, indexer, indexer_id) if new_show: diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index bbf42f9f24..9807ac0d4c 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -916,8 +916,7 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season ) # We only change the episode's status if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED elif helpers.is_media_file(self.location): - if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + \ - Quality.ARCHIVED + Quality.SNATCHED_BEST: + if self.status not in [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED]: old_status = self.status self.status = Quality.status_from_name(self.location, anime=self.series.is_anime) log.debug( From 8fac878bf9f50e394af816a86c60ff37f606a758 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 21 May 2018 19:37:30 +0200 Subject: [PATCH 17/93] Fixed some typos. --- medusa/server/web/home/handler.py | 2 +- medusa/show/show.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 4f5ef4e81e..265ecb8480 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -175,7 +175,7 @@ def index(self): def show_statistics(): main_db_con = db.DBConnection() - snatched = [SNATCHED. SNATCHED_PROPER, SNATCHED_BEST] + snatched = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] downloaded = [DOWNLOADED, ARCHIVED] # FIXME: This inner join is not multi indexer friendly. diff --git a/medusa/show/show.py b/medusa/show/show.py index 28b1c5ec6f..26e77cc7c4 100644 --- a/medusa/show/show.py +++ b/medusa/show/show.py @@ -160,7 +160,7 @@ def overall_stats(): total_status = [SKIPPED, WANTED] results = db.select( - 'SELECT airdate, status, quality' + 'SELECT airdate, status, quality ' 'FROM tv_episodes ' 'WHERE season > 0 ' 'AND episode > 0 ' From 3383c5b502a8d74fb7508cc6dc089702ce20100e Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 21 May 2018 19:38:17 +0200 Subject: [PATCH 18/93] Reset quality UNKONWN to NONE. --- medusa/databases/main_db.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 1beb07e673..da4fe94512 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -39,6 +39,7 @@ def check(self): # self.convert_archived_to_compound() self.fix_subtitle_reference() self.clean_null_indexer_mappings() + self.fix_remove_status_unknown() def clean_null_indexer_mappings(self): log.debug(u'Checking for null indexer mappings') @@ -258,6 +259,10 @@ def fix_subtitles_codes(self): def fix_show_nfo_lang(self): self.connection.action("UPDATE tv_shows SET lang = '' WHERE lang = 0 or lang = '0'") + def fix_remove_status_unknown(self): + log.info(u'Remove status UNKONWN from tv_episodes') + self.connection.select("UPDATE tv_episodes SET quality = 0 WHERE quality = 32768") + def backupDatabase(version): log.info(u'Backing up database before upgrade') @@ -765,7 +770,7 @@ def execute(self): # Remove ep_status and ep_quality and add quality field. # Move status from ep_status and quality from ep_quality - log.info(u'Adding data from ep_status and ep_quality fields to status/quality fields the tv_episodes table') + log.info(u'Adding data from ep_status and ep_quality fields to status/quality fields in the tv_episodes table') self.connection.action('DROP TABLE IF EXISTS new_tv_episodes;') self.connection.action('CREATE TABLE IF NOT EXISTS new_tv_episodes ' From 273434bdea8d5203ab3c961d7a147d7022f47e7d Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 21 May 2018 20:09:43 +0200 Subject: [PATCH 19/93] Fixed get_overview() calls. --- medusa/search/manual.py | 2 +- medusa/server/web/home/handler.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/search/manual.py b/medusa/search/manual.py index abb3082d7d..8b2ee98cdc 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -104,7 +104,7 @@ def get_episodes(search_thread, searchstatus): 'status': statusStrings[ep.status], 'quality': get_quality_class(ep), 'overview': Overview.overviewStrings[series_obj.get_overview( - ep.status, + ep.status, ep.quality, manually_searched=ep.manually_searched )], }) diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 265ecb8480..6066b313a8 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1376,7 +1376,7 @@ def titler(x): ep_cats = {} for cur_result in sql_results: - cur_ep_cat = series_obj.get_overview(cur_result[b'status'], + cur_ep_cat = series_obj.get_overview(cur_result[b'status'], cur_result[b'quality'], manually_searched=cur_result[b'manually_searched']) if cur_ep_cat: ep_cats['{season}x{episode}'.format(season=cur_result[b'season'], From 7e25812ff2a1669b3a9a4a35e5517838707f893a Mon Sep 17 00:00:00 2001 From: P0psicles Date: Mon, 21 May 2018 22:28:06 +0200 Subject: [PATCH 20/93] Removed some more composite_status method calls. --- medusa/history.py | 24 ++++++++++++------------ medusa/naming.py | 15 ++++++++++----- medusa/post_processor.py | 3 ++- medusa/search/core.py | 6 ++++-- medusa/server/api/v1/core.py | 8 +++----- medusa/server/web/home/handler.py | 4 ++-- 6 files changed, 33 insertions(+), 27 deletions(-) diff --git a/medusa/history.py b/medusa/history.py index daf8eabf60..467b73299e 100644 --- a/medusa/history.py +++ b/medusa/history.py @@ -50,30 +50,30 @@ def _log_history_item(action, ep_obj, resource, provider, version=-1, proper_tag resource, provider, version, proper_tags, manually_searched, info_hash, size]) -def log_snatch(searchResult): +def log_snatch(search_result): """ Log history of snatch - :param searchResult: search result object + :param search_result: search result object """ - for ep_obj in searchResult.episodes: + for ep_obj in search_result.episodes: - quality = searchResult.quality - version = searchResult.version - proper_tags = '|'.join(searchResult.proper_tags) - manually_searched = searchResult.manually_searched - info_hash = searchResult.hash.lower() if searchResult.hash else None - size = searchResult.size + version = search_result.version + proper_tags = '|'.join(search_result.proper_tags) + manually_searched = search_result.manually_searched + info_hash = search_result.hash.lower() if search_result.hash else None + size = search_result.size - providerClass = searchResult.provider + providerClass = search_result.provider if providerClass is not None: provider = providerClass.name else: provider = "unknown" - action = Quality.composite_status(SNATCHED, searchResult.quality) + action = SNATCHED + ep_obj.quality = search_result.quality - resource = searchResult.name + resource = search_result.name _log_history_item(action, ep_obj, resource, provider, version, proper_tags, manually_searched, info_hash, size) diff --git a/medusa/naming.py b/medusa/naming.py index 6dd3e3b30d..5dbb16af75 100644 --- a/medusa/naming.py +++ b/medusa/naming.py @@ -103,7 +103,8 @@ def __init__(self, season, episode, absolute_number, name): # pylint: disable=s self.scene_episode = episode self.scene_absolute_number = absolute_number self.airdate = datetime.date(2010, 3, 9) - self.status = Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV) + self.status = common.DOWNLOADED + self.quality = common.Quality.SDTV self.release_name = 'Show.Name.S02E03.HDTV.x264-RLSGROUP' self.is_proper = True self.series = TVShow() @@ -241,7 +242,8 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): ep = TVEpisode(2, 3, 3, "Ep Name") # pylint: disable=protected-access - ep.status = Quality.composite_status(DOWNLOADED, Quality.HDTV) + ep.status = DOWNLOADED + ep.quality = Quality.HDTV ep.airdate = datetime.date(2011, 3, 9) if abd: @@ -266,7 +268,8 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): ep.release_name = 'Show.Name.003-004.HDTV.x264-RLSGROUP' secondEp = TVEpisode(2, 4, 4, "Ep Name (2)") - secondEp.status = Quality.composite_status(DOWNLOADED, Quality.HDTV) + secondEp.status = DOWNLOADED + secondEp.quality = Quality.HDTV secondEp.release_name = ep.release_name ep.related_episodes.append(secondEp) @@ -274,11 +277,13 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): ep.release_name = 'Show.Name.S02E03E04E05.HDTV.x264-RLSGROUP' secondEp = TVEpisode(2, 4, 4, "Ep Name (2)") - secondEp.status = Quality.composite_status(DOWNLOADED, Quality.HDTV) + secondEp.status = DOWNLOADED + secondEp.quality = Quality.HDTV secondEp.release_name = ep.release_name thirdEp = TVEpisode(2, 5, 5, "Ep Name (3)") - thirdEp.status = Quality.composite_status(DOWNLOADED, Quality.HDTV) + thirdEp.status = DOWNLOADED + thirdEp.quality = Quality.HDTV thirdEp.release_name = ep.release_name ep.related_episodes.append(secondEp) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index f1881c1e71..66cdca7a1e 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -1150,7 +1150,8 @@ def process(self): else: cur_ep.release_name = u'' - cur_ep.status = common.Quality.composite_status(common.DOWNLOADED, new_ep_quality) + cur_ep.status = common.DOWNLOADED + cur_ep.quality = new_ep_quality cur_ep.subtitles = u'' diff --git a/medusa/search/core.py b/medusa/search/core.py index 4ff6b18ed7..3605af8162 100644 --- a/medusa/search/core.py +++ b/medusa/search/core.py @@ -189,9 +189,11 @@ def snatch_episode(result): for curEpObj in result.episodes: with curEpObj.lock: if is_first_best_match(result): - curEpObj.status = Quality.composite_status(SNATCHED_BEST, result.quality) + curEpObj.status = SNATCHED_BEST + curEpObj.quality = result.quality else: - curEpObj.status = Quality.composite_status(end_status, result.quality) + curEpObj.status = end_status + curEpObj.quality = result.quality # Reset all others fields to the snatched status # New snatch by default doesn't have nfo/tbn curEpObj.hasnfo = False diff --git a/medusa/server/api/v1/core.py b/medusa/server/api/v1/core.py index d986abe74f..f4eea195f5 100644 --- a/medusa/server/api/v1/core.py +++ b/medusa/server/api/v1/core.py @@ -1068,20 +1068,18 @@ def convert_date(history_date): History.date_format ).strftime(dateTimeFormat) - # FIXME: Can't really do anything about this now. History -> action, should also be separated? - composite = Quality.split_composite_status(cur_item.action) - if cur_type in (statusStrings[composite.status].lower(), None): + if cur_type in (statusStrings[cur_type.status].lower(), None): return { 'date': convert_date(cur_item.date), 'episode': cur_item.episode, 'indexerid': cur_item.show_id, 'provider': cur_item.provider, - 'quality': get_quality_string(composite.quality), + 'quality': get_quality_string(cur_item.quality), 'resource': os.path.basename(cur_item.resource), 'resource_path': os.path.dirname(cur_item.resource), 'season': cur_item.season, 'show_name': cur_item.show_name, - 'status': statusStrings[composite.status], + 'status': statusStrings[cur_item.status], # Add tvdbid for backward compatibility # TODO: Make this actual tvdb id for other indexers 'tvdbid': cur_item.show_id, diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 6066b313a8..d1e7c72cb3 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1297,7 +1297,7 @@ def titler(x): ) episode_history = [dict(row) for row in episode_status_result] for i in episode_history: - i['status'], i['quality'] = Quality.split_composite_status(i['action']) + i['status'] = i['action'] i['action_date'] = sbdatetime.sbfdatetime(datetime.strptime(str(i['date']), History.date_format), show_seconds=True) i['resource_file'] = os.path.basename(i['resource']) i['pretty_size'] = pretty_file_size(i['size']) if i['size'] > -1 else 'N/A' @@ -1916,7 +1916,7 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire else: return self._genericMessage('Error', error_message) - # statusStrings is a custom type. Which does some "magic" itself. But we want to move away from this. + # FIXME: statusStrings is a custom type. Which does some "magic" itself. But we want to move away from this. # Currently status is passed from displayShow as a composite status+quality. Therefor we need to separate # the status from it. From bb8a8c50a5642ff021358638cf70f5df2218832f Mon Sep 17 00:00:00 2001 From: P0psicles Date: Tue, 22 May 2018 18:56:17 +0200 Subject: [PATCH 21/93] Fix snatchSelection page. Missed quality in select. --- medusa/server/web/home/handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index d1e7c72cb3..530d2f4fe7 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1285,7 +1285,7 @@ def titler(x): try: main_db_con = db.DBConnection() episode_status_result = main_db_con.action( - b'SELECT date, action, provider, resource, size ' + b'SELECT date, action, quality, provider, resource, size ' b'FROM history ' b'WHERE indexer_id = ? ' b'AND showid = ? ' From 32834930c285ffdc0a1fe0bd9d931d6f6c43368a Mon Sep 17 00:00:00 2001 From: P0psicles Date: Tue, 22 May 2018 18:59:57 +0200 Subject: [PATCH 22/93] Changed sql query DDL, to uppercase. --- medusa/databases/main_db.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index da4fe94512..d140a1cb07 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -43,7 +43,7 @@ def check(self): def clean_null_indexer_mappings(self): log.debug(u'Checking for null indexer mappings') - query = "SELECT * from indexer_mapping where mindexer_id = ''" + query = "SELECT * from indexer_mapping WHERE mindexer_id = ''" sql_results = self.connection.select(query) if sql_results: @@ -799,7 +799,7 @@ def execute(self): self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") log.info(u'Remove the quality from the action field, as this is a composite status') - sql_results = self.connection.select("SELECT action from history GROUP BY action") + sql_results = self.connection.select("SELECT action FROM history GROUP BY action") for status in sql_results: split = common.Quality.split_composite_status(status[b'action']) From d6cb232d146deb2b4e7c6f21ac92c30769fa0288 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Tue, 22 May 2018 19:14:26 +0200 Subject: [PATCH 23/93] Convert all status on each startup to status/quality. This to keep the db sane, while changing all the composite statusses. --- medusa/databases/main_db.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index d140a1cb07..841e285fe8 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -40,6 +40,7 @@ def check(self): self.fix_subtitle_reference() self.clean_null_indexer_mappings() self.fix_remove_status_unknown() + self.fix_status_qualities() def clean_null_indexer_mappings(self): log.debug(u'Checking for null indexer mappings') @@ -259,7 +260,27 @@ def fix_subtitles_codes(self): def fix_show_nfo_lang(self): self.connection.action("UPDATE tv_shows SET lang = '' WHERE lang = 0 or lang = '0'") + def fix_status_qualities(self): + """ + Check for a status bigger then 12, and translate to a status + quality, + as these are old composite statussus. + This can be removed when all code that creates composite statussus has been migrated. + Until then this can be used to keep the DB sane. + """ + log.info(u'Convert composite statussus in tv_episodes to status + quality.') + sql_results = self.connection.select("SELECT status FROM tv_episodes where status > 12 GROUP BY status") + for status in sql_results: + log.info(u'Split composite status in to ep_status and ep_quality for %s', status[b'status']) + split = common.Quality.split_composite_status(status[b'status']) + self.connection.select( + "UPDATE tv_episodes SET status = ?, quality = ? WHERE status = ?", + [split.status, split.quality, status[b'status']] + ) + + self.connection.select("") + def fix_remove_status_unknown(self): + """Changes any `UNKNOWN` quality to 0.""" log.info(u'Remove status UNKONWN from tv_episodes') self.connection.select("UPDATE tv_episodes SET quality = 0 WHERE quality = 32768") From 64aac181722335f440b596aee5572e704580be47 Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 23 May 2018 18:17:13 +0200 Subject: [PATCH 24/93] Finish conversion of APIv1 funny code --- medusa/server/api/v1/core.py | 73 +++++++++++++++--------------------- 1 file changed, 31 insertions(+), 42 deletions(-) diff --git a/medusa/server/api/v1/core.py b/medusa/server/api/v1/core.py index f4eea195f5..43cc4aac1f 100644 --- a/medusa/server/api/v1/core.py +++ b/medusa/server/api/v1/core.py @@ -2657,83 +2657,72 @@ def run(self): # show stats episode_status_counts_total = {'total': 0} - for status in statusStrings: - if status in [UNSET, DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED]: + for status_code in statusStrings: + if status_code in [UNSET, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED]: continue - episode_status_counts_total[status] = 0 + episode_status_counts_total[status_code] = 0 # add all the downloaded qualities episode_qualities_counts_download = {'total': 0} - # TODO: replace Quality status with normal status. But no idea what this does? medariox? - for statusCode in Quality.DOWNLOADED + Quality.ARCHIVED: - status, quality = Quality.split_composite_status(statusCode) - if quality in [Quality.NONE]: - continue - episode_qualities_counts_download[statusCode] = 0 + for status_code in (DOWNLOADED, ARCHIVED): + episode_qualities_counts_download[status_code] = 0 # add all snatched qualities episode_qualities_counts_snatch = {'total': 0} - # TODO: replace Quality status with normal status. But no idea what this does? medariox? - for statusCode in Quality.SNATCHED + Quality.SNATCHED_PROPER: - status, quality = Quality.split_composite_status(statusCode) - if quality in [Quality.NONE]: - continue - episode_qualities_counts_snatch[statusCode] = 0 + for status_code in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): + episode_qualities_counts_snatch[status_code] = 0 main_db_con = db.DBConnection(row_type='dict') sql_results = main_db_con.select('SELECT status, quality, season FROM tv_episodes ' 'WHERE season != 0 AND indexer = ? AND showid = ?', [INDEXER_TVDBV2, self.indexerid]) + # the main loop that goes through all episodes for row in sql_results: status, quality = int(row[b'status']), int(row[b'quality']) episode_status_counts_total['total'] += 1 + episode_status_counts_total[status][quality] += 1 - if status in DOWNLOADED + ARCHIVED: + if status in (DOWNLOADED, ARCHIVED): episode_qualities_counts_download['total'] += 1 - episode_qualities_counts_download[int(row[b'status'])] += 1 - elif status in SNATCHED + SNATCHED_PROPER: + episode_qualities_counts_download[status][quality] += 1 + elif status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): episode_qualities_counts_snatch['total'] += 1 - episode_qualities_counts_snatch[int(row[b'status'])] += 1 - elif status == 0: # we don't count NONE = 0 = N/A - pass - else: - episode_status_counts_total[status] += 1 + episode_qualities_counts_snatch[status][quality] += 1 # the outgoing container episodes_stats = {'downloaded': {}} # turning codes into strings - for statusCode in episode_qualities_counts_download: - if statusCode == 'total': - episodes_stats['downloaded']['total'] = episode_qualities_counts_download[statusCode] + for status in episode_qualities_counts_download: + if status == 'total': + episodes_stats['downloaded']['total'] = episode_qualities_counts_download[status] continue - status, quality = Quality.split_composite_status(int(statusCode)) - status_string = Quality.qualityStrings[quality].lower().replace(' ', '_').replace('(', '').replace(')', '') - episodes_stats['downloaded'][status_string] = episode_qualities_counts_download[statusCode] + quality = episode_qualities_counts_download[status] + quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_') + episodes_stats['downloaded'][quality_string] = episode_qualities_counts_download[status] episodes_stats['snatched'] = {} # turning codes into strings # and combining proper and normal - for statusCode in episode_qualities_counts_snatch: - if statusCode == 'total': - episodes_stats['snatched']['total'] = episode_qualities_counts_snatch[statusCode] + for status in episode_qualities_counts_snatch: + if status == 'total': + episodes_stats['snatched']['total'] = episode_qualities_counts_snatch[status] continue - status, quality = Quality.split_composite_status(int(statusCode)) - status_string = Quality.qualityStrings[quality].lower().replace(' ', '_').replace('(', '').replace(')', '') + quality = episode_qualities_counts_download[status] + quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_') if Quality.qualityStrings[quality] in episodes_stats['snatched']: - episodes_stats['snatched'][status_string] += episode_qualities_counts_snatch[statusCode] + episodes_stats['snatched'][quality_string] += episode_qualities_counts_snatch[status] else: - episodes_stats['snatched'][status_string] = episode_qualities_counts_snatch[statusCode] + episodes_stats['snatched'][quality_string] = episode_qualities_counts_snatch[status] # episodes_stats["total"] = {} - for statusCode in episode_status_counts_total: - if statusCode == 'total': - episodes_stats['total'] = episode_status_counts_total[statusCode] + for status in episode_status_counts_total: + if status == 'total': + episodes_stats['total'] = episode_status_counts_total[status] continue - status_string = statusStrings[statusCode].lower().replace(' ', '_').replace('(', '').replace( - ')', '') - episodes_stats[status_string] = episode_status_counts_total[statusCode] + status_string = Quality.statusPrefixes[status].lower().replace(' ', '_').replace('(', '').replace(')', '') + episodes_stats[status_string] = episode_status_counts_total[status] return _responds(RESULT_SUCCESS, episodes_stats) From e9bc1a2da6e9d29481953f8b4cbaefc52f4760cd Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 23 May 2018 19:04:10 +0200 Subject: [PATCH 25/93] Remove last few split_composite_status --- medusa/databases/main_db.py | 8 +++----- medusa/server/web/home/handler.py | 9 --------- themes-default/slim/views/displayShow.mako | 19 +++++++++---------- .../slim/views/manage_backlogOverview.mako | 15 ++++++--------- themes/dark/templates/displayShow.mako | 19 +++++++++---------- .../templates/manage_backlogOverview.mako | 15 ++++++--------- themes/light/templates/displayShow.mako | 19 +++++++++---------- .../templates/manage_backlogOverview.mako | 15 ++++++--------- 8 files changed, 48 insertions(+), 71 deletions(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 841e285fe8..10b6c36218 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -262,9 +262,9 @@ def fix_show_nfo_lang(self): def fix_status_qualities(self): """ - Check for a status bigger then 12, and translate to a status + quality, - as these are old composite statussus. - This can be removed when all code that creates composite statussus has been migrated. + Check for a status bigger than 12, and translate to a status + quality, + as these are old composite statuses. + This can be removed when all code that creates composite statuses has been migrated. Until then this can be used to keep the DB sane. """ log.info(u'Convert composite statussus in tv_episodes to status + quality.') @@ -277,8 +277,6 @@ def fix_status_qualities(self): [split.status, split.quality, status[b'status']] ) - self.connection.select("") - def fix_remove_status_unknown(self): """Changes any `UNKNOWN` quality to 0.""" log.info(u'Remove status UNKONWN from tv_episodes') diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 530d2f4fe7..9e275bec91 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1916,13 +1916,6 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire else: return self._genericMessage('Error', error_message) - # FIXME: statusStrings is a custom type. Which does some "magic" itself. But we want to move away from this. - # Currently status is passed from displayShow as a composite status+quality. Therefor we need to separate - # the status from it. - - status = Quality.split_composite_status(status).status - quality = Quality.split_composite_status(status).quality - if status not in statusStrings: error_message = 'Invalid status' if direct: @@ -2011,10 +2004,8 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire ep_obj.manually_searched = False # Only in failed_history we set to FAILED. - # We need current snatched quality to log 'quality' column in failed action in history if status != FAILED: ep_obj.status = status - ep_obj.quality = quality # mass add to database sql_l.append(ep_obj.get_sql()) diff --git a/themes-default/slim/views/displayShow.mako b/themes-default/slim/views/displayShow.mako index 0590d815e9..774d42ecbd 100644 --- a/themes-default/slim/views/displayShow.mako +++ b/themes-default/slim/views/displayShow.mako @@ -236,9 +236,9 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc if int(epResult['status']) in [DOWNLOADED, ARCHIVED] else ''} - % if epResult["file_size"] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: ${pretty_file_size(epResult["file_size"])} % endif @@ -256,7 +256,7 @@ const startVue = () => { % endif - % if app.DOWNLOAD_URL and epResult['location'] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if app.DOWNLOAD_URL and epResult['location'] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: <% filename = epResult['location'] for rootDir in app.ROOT_DIRS: @@ -269,7 +269,7 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: % if flag != 'und': ${flag} @@ -280,12 +280,11 @@ const startVue = () => { % endif % endfor - <% cur_status, cur_quality = Quality.split_composite_status(int(epResult["status"])) %> - % if cur_quality != Quality.NONE: - ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} - % else: - ${statusStrings[cur_status]} - % endif + <% + cur_status = int(epResult['status']) + cur_quality = int(epResult['quality']) + %> + ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % if int(epResult["season"]) != 0: % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: diff --git a/themes-default/slim/views/manage_backlogOverview.mako b/themes-default/slim/views/manage_backlogOverview.mako index 983e70bbc7..36d2aa5b6c 100644 --- a/themes-default/slim/views/manage_backlogOverview.mako +++ b/themes-default/slim/views/manage_backlogOverview.mako @@ -144,17 +144,14 @@ const startVue = () => { % for cur_result in showSQLResults[(cur_show.indexer, cur_show.series_id)]: <% - old_status, old_quality = Quality.split_composite_status(cur_result['status']) - archived_status = Quality.composite_status(ARCHIVED, old_quality) + old_status = cur_result['status'] + old_quality = cur_result['quality'] + archived_status = ARCHIVED %> - - ${cur_result["episode_string"]} + + ${cur_result['episode_string']} - % if old_quality != Quality.NONE: - ${statusStrings[old_status]} ${renderQualityPill(old_quality)} - % else: - ${statusStrings[old_status]} - % endif + ${statusStrings[old_status]} ${renderQualityPill(old_quality)} ${cur_result["name"]} diff --git a/themes/dark/templates/displayShow.mako b/themes/dark/templates/displayShow.mako index 0590d815e9..774d42ecbd 100644 --- a/themes/dark/templates/displayShow.mako +++ b/themes/dark/templates/displayShow.mako @@ -236,9 +236,9 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc if int(epResult['status']) in [DOWNLOADED, ARCHIVED] else ''} - % if epResult["file_size"] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: ${pretty_file_size(epResult["file_size"])} % endif @@ -256,7 +256,7 @@ const startVue = () => { % endif - % if app.DOWNLOAD_URL and epResult['location'] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if app.DOWNLOAD_URL and epResult['location'] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: <% filename = epResult['location'] for rootDir in app.ROOT_DIRS: @@ -269,7 +269,7 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: % if flag != 'und': ${flag} @@ -280,12 +280,11 @@ const startVue = () => { % endif % endfor - <% cur_status, cur_quality = Quality.split_composite_status(int(epResult["status"])) %> - % if cur_quality != Quality.NONE: - ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} - % else: - ${statusStrings[cur_status]} - % endif + <% + cur_status = int(epResult['status']) + cur_quality = int(epResult['quality']) + %> + ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % if int(epResult["season"]) != 0: % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: diff --git a/themes/dark/templates/manage_backlogOverview.mako b/themes/dark/templates/manage_backlogOverview.mako index 983e70bbc7..36d2aa5b6c 100644 --- a/themes/dark/templates/manage_backlogOverview.mako +++ b/themes/dark/templates/manage_backlogOverview.mako @@ -144,17 +144,14 @@ const startVue = () => { % for cur_result in showSQLResults[(cur_show.indexer, cur_show.series_id)]: <% - old_status, old_quality = Quality.split_composite_status(cur_result['status']) - archived_status = Quality.composite_status(ARCHIVED, old_quality) + old_status = cur_result['status'] + old_quality = cur_result['quality'] + archived_status = ARCHIVED %> - - ${cur_result["episode_string"]} + + ${cur_result['episode_string']} - % if old_quality != Quality.NONE: - ${statusStrings[old_status]} ${renderQualityPill(old_quality)} - % else: - ${statusStrings[old_status]} - % endif + ${statusStrings[old_status]} ${renderQualityPill(old_quality)} ${cur_result["name"]} diff --git a/themes/light/templates/displayShow.mako b/themes/light/templates/displayShow.mako index 0590d815e9..774d42ecbd 100644 --- a/themes/light/templates/displayShow.mako +++ b/themes/light/templates/displayShow.mako @@ -236,9 +236,9 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc if int(epResult['status']) in [DOWNLOADED, ARCHIVED] else ''} - % if epResult["file_size"] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: ${pretty_file_size(epResult["file_size"])} % endif @@ -256,7 +256,7 @@ const startVue = () => { % endif - % if app.DOWNLOAD_URL and epResult['location'] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if app.DOWNLOAD_URL and epResult['location'] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: <% filename = epResult['location'] for rootDir in app.ROOT_DIRS: @@ -269,7 +269,7 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: % if flag != 'und': ${flag} @@ -280,12 +280,11 @@ const startVue = () => { % endif % endfor - <% cur_status, cur_quality = Quality.split_composite_status(int(epResult["status"])) %> - % if cur_quality != Quality.NONE: - ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} - % else: - ${statusStrings[cur_status]} - % endif + <% + cur_status = int(epResult['status']) + cur_quality = int(epResult['quality']) + %> + ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % if int(epResult["season"]) != 0: % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: diff --git a/themes/light/templates/manage_backlogOverview.mako b/themes/light/templates/manage_backlogOverview.mako index 983e70bbc7..36d2aa5b6c 100644 --- a/themes/light/templates/manage_backlogOverview.mako +++ b/themes/light/templates/manage_backlogOverview.mako @@ -144,17 +144,14 @@ const startVue = () => { % for cur_result in showSQLResults[(cur_show.indexer, cur_show.series_id)]: <% - old_status, old_quality = Quality.split_composite_status(cur_result['status']) - archived_status = Quality.composite_status(ARCHIVED, old_quality) + old_status = cur_result['status'] + old_quality = cur_result['quality'] + archived_status = ARCHIVED %> - - ${cur_result["episode_string"]} + + ${cur_result['episode_string']} - % if old_quality != Quality.NONE: - ${statusStrings[old_status]} ${renderQualityPill(old_quality)} - % else: - ${statusStrings[old_status]} - % endif + ${statusStrings[old_status]} ${renderQualityPill(old_quality)} ${cur_result["name"]} From cf631cfba5ba3b581c605d73e1d0f0bb75e7b8b2 Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 23 May 2018 21:04:56 +0200 Subject: [PATCH 26/93] Completely remove Quality.{status} from mako --- medusa/common.py | 262 +++++++----------- themes-default/slim/views/displayShow.mako | 6 +- .../slim/views/manage_episodeStatuses.mako | 28 +- .../slim/views/partials/showheader.mako | 2 +- themes/dark/templates/displayShow.mako | 6 +- .../templates/manage_episodeStatuses.mako | 28 +- .../dark/templates/partials/showheader.mako | 2 +- themes/light/templates/displayShow.mako | 6 +- .../templates/manage_episodeStatuses.mako | 28 +- .../light/templates/partials/showheader.mako | 2 +- 10 files changed, 159 insertions(+), 211 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index 55d51b9cc5..47f32fb796 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -90,14 +90,14 @@ NOTIFY_SNATCH_PROPER = 8 notifyStrings = { - NOTIFY_SNATCH: "Started Download", - NOTIFY_DOWNLOAD: "Download Finished", - NOTIFY_SUBTITLE_DOWNLOAD: "Subtitle Download Finished", - NOTIFY_GIT_UPDATE: "Medusa Updated", - NOTIFY_GIT_UPDATE_TEXT: "Medusa Updated To Commit#: ", - NOTIFY_LOGIN: "Medusa new login", - NOTIFY_LOGIN_TEXT: "New login from IP: {0}. http://geomaplookup.net/?ip={0}", - NOTIFY_SNATCH_PROPER: "Started PROPER Download" + NOTIFY_SNATCH: 'Started Download', + NOTIFY_DOWNLOAD: 'Download Finished', + NOTIFY_SUBTITLE_DOWNLOAD: 'Subtitle Download Finished', + NOTIFY_GIT_UPDATE: 'Medusa Updated', + NOTIFY_GIT_UPDATE_TEXT: 'Medusa Updated To Commit#: ', + NOTIFY_LOGIN: 'Medusa new login', + NOTIFY_LOGIN_TEXT: 'New login from IP: {0}. http://geomaplookup.net/?ip={0}', + NOTIFY_SNATCH_PROPER: 'Started Proper Download' } # Episode statuses @@ -122,12 +122,28 @@ NAMING_LIMITED_EXTEND_E_PREFIXED = 32 MULTI_EP_STRINGS = { - NAMING_REPEAT: "Repeat", - NAMING_SEPARATED_REPEAT: "Repeat (Separated)", - NAMING_DUPLICATE: "Duplicate", - NAMING_EXTEND: "Extend", - NAMING_LIMITED_EXTEND: "Extend (Limited)", - NAMING_LIMITED_EXTEND_E_PREFIXED: "Extend (Limited, E-prefixed)" + NAMING_REPEAT: 'Repeat', + NAMING_SEPARATED_REPEAT: 'Repeat (Separated)', + NAMING_DUPLICATE: 'Duplicate', + NAMING_EXTEND: 'Extend', + NAMING_LIMITED_EXTEND: 'Extend (Limited)', + NAMING_LIMITED_EXTEND_E_PREFIXED: 'Extend (Limited, E-prefixed)' +} + + +statusStrings = { + ARCHIVED: 'Archived', + DOWNLOADED: 'Downloaded', + FAILED: 'Failed', + IGNORED: 'Ignored', + SKIPPED: 'Skipped', + SNATCHED: 'Snatched', + SNATCHED_BEST: 'Snatched (Best)', + SNATCHED_PROPER: 'Snatched (Proper)', + SUBTITLED: 'Subtitled', + UNAIRED: 'Unaired', + UNSET: 'Unset', + WANTED: 'Wanted' } @@ -159,87 +175,72 @@ class Quality(object): UNKNOWN = 1 << 15 # 32768 qualityStrings = { - NONE: "N/A", - UNKNOWN: "Unknown", - SDTV: "SDTV", - SDDVD: "SD DVD", - HDTV: "720p HDTV", - RAWHDTV: "RawHD", - FULLHDTV: "1080p HDTV", - HDWEBDL: "720p WEB-DL", - FULLHDWEBDL: "1080p WEB-DL", - HDBLURAY: "720p BluRay", - FULLHDBLURAY: "1080p BluRay", - UHD_4K_TV: "4K UHD TV", - UHD_8K_TV: "8K UHD TV", - UHD_4K_WEBDL: "4K UHD WEB-DL", - UHD_8K_WEBDL: "8K UHD WEB-DL", - UHD_4K_BLURAY: "4K UHD BluRay", - UHD_8K_BLURAY: "8K UHD BluRay", + NONE: 'N/A', + UNKNOWN: 'Unknown', + SDTV: 'SDTV', + SDDVD: 'SD DVD', + HDTV: '720p HDTV', + RAWHDTV: 'RawHD', + FULLHDTV: '1080p HDTV', + HDWEBDL: '720p WEB-DL', + FULLHDWEBDL: '1080p WEB-DL', + HDBLURAY: '720p BluRay', + FULLHDBLURAY: '1080p BluRay', + UHD_4K_TV: '4K UHD TV', + UHD_8K_TV: '8K UHD TV', + UHD_4K_WEBDL: '4K UHD WEB-DL', + UHD_8K_WEBDL: '8K UHD WEB-DL', + UHD_4K_BLURAY: '4K UHD BluRay', + UHD_8K_BLURAY: '8K UHD BluRay', } sceneQualityStrings = { - NONE: "N/A", - UNKNOWN: "Unknown", - SDTV: "", - SDDVD: "", - HDTV: "720p", - RAWHDTV: "1080i", - FULLHDTV: "1080p", - HDWEBDL: "720p", - FULLHDWEBDL: "1080p", - HDBLURAY: "720p BluRay", - FULLHDBLURAY: "1080p BluRay", - UHD_4K_TV: "2160p", - UHD_8K_TV: "4320p", - UHD_4K_WEBDL: "2160p", - UHD_8K_WEBDL: "4320p", - UHD_4K_BLURAY: "2160p BluRay", - UHD_8K_BLURAY: "4320p BluRay", + NONE: 'N/A', + UNKNOWN: 'Unknown', + SDTV: '', + SDDVD: '', + HDTV: '720p', + RAWHDTV: '1080i', + FULLHDTV: '1080p', + HDWEBDL: '720p', + FULLHDWEBDL: '1080p', + HDBLURAY: '720p BluRay', + FULLHDBLURAY: '1080p BluRay', + UHD_4K_TV: '2160p', + UHD_8K_TV: '4320p', + UHD_4K_WEBDL: '2160p', + UHD_8K_WEBDL: '4320p', + UHD_4K_BLURAY: '2160p BluRay', + UHD_8K_BLURAY: '4320p BluRay', } combinedQualityStrings = { - ANYHDTV: "HDTV", - ANYWEBDL: "WEB-DL", - ANYBLURAY: "BluRay" + ANYHDTV: 'HDTV', + ANYWEBDL: 'WEB-DL', + ANYBLURAY: 'BluRay' } cssClassStrings = { - NONE: "N/A", - UNKNOWN: "Unknown", - SDTV: "SDTV", - SDDVD: "SDDVD", - HDTV: "HD720p", - RAWHDTV: "RawHD", - FULLHDTV: "HD1080p", - HDWEBDL: "HD720p", - FULLHDWEBDL: "HD1080p", - HDBLURAY: "HD720p", - FULLHDBLURAY: "HD1080p", - UHD_4K_TV: "UHD-4K", - UHD_8K_TV: "UHD-8K", - UHD_4K_WEBDL: "UHD-4K", - UHD_8K_WEBDL: "UHD-8K", - UHD_4K_BLURAY: "UHD-4K", - UHD_8K_BLURAY: "UHD-8K", - ANYHDTV: "any-hd", - ANYWEBDL: "any-hd", - ANYBLURAY: "any-hd" - } - - statusPrefixes = { - UNSET: "Unset", - UNAIRED: "Unaired", - WANTED: "Wanted", - SKIPPED: "Skipped", - IGNORED: "Ignored", - SUBTITLED: "Subtitled", - DOWNLOADED: "Downloaded", - SNATCHED: "Snatched", - SNATCHED_PROPER: "Snatched (Proper)", - FAILED: "Failed", - SNATCHED_BEST: "Snatched (Best)", - ARCHIVED: "Archived" + NONE: 'N/A', + UNKNOWN: 'Unknown', + SDTV: 'SDTV', + SDDVD: 'SDDVD', + HDTV: 'HD720p', + RAWHDTV: 'RawHD', + FULLHDTV: 'HD1080p', + HDWEBDL: 'HD720p', + FULLHDWEBDL: 'HD1080p', + HDBLURAY: 'HD720p', + FULLHDBLURAY: 'HD1080p', + UHD_4K_TV: 'UHD-4K', + UHD_8K_TV: 'UHD-8K', + UHD_4K_WEBDL: 'UHD-4K', + UHD_8K_WEBDL: 'UHD-8K', + UHD_4K_BLURAY: 'UHD-4K', + UHD_8K_BLURAY: 'UHD-8K', + ANYHDTV: 'any-hd', + ANYWEBDL: 'any-hd', + ANYBLURAY: 'any-hd' } @staticmethod @@ -447,8 +448,8 @@ def quality_from_file_meta(file_path): # TODO: Use knowledge information like 'resolution' base_filename = os.path.basename(file_path) - bluray = re.search(r"blue?-?ray|hddvd|b[rd](rip|mux)", base_filename, re.I) is not None - webdl = re.search(r"web.?dl|web(rip|mux|hd)", base_filename, re.I) is not None + bluray = re.search(r'blue?-?ray|hddvd|b[rd](rip|mux)', base_filename, re.I) is not None + webdl = re.search(r'web.?dl|web(rip|mux|hd)', base_filename, re.I) is not None ret = Quality.UNKNOWN if 3240 < height: @@ -850,70 +851,17 @@ def to_guessit_screen_size(quality): ) qualityPresetStrings = { - SD: "SD", - HD: "HD", - HD720p: "HD720p", - HD1080p: "HD1080p", - UHD: "UHD", - UHD_4K: "UHD-4K", - UHD_8K: "UHD-8K", - ANY: "Any", + SD: 'SD', + HD: 'HD', + HD720p: 'HD720p', + HD1080p: 'HD1080p', + UHD: 'UHD', + UHD_4K: 'UHD-4K', + UHD_8K: 'UHD-8K', + ANY: 'Any', } -class StatusStrings(dict): - """Dictionary containing strings for status codes.""" - - # todo: Make views return Qualities too - statuses = list(Quality.statusPrefixes) - qualities = list(Quality.qualityStrings) - - def __missing__(self, key): - """ - If the key is not found try to determine a status from Quality. - - :param key: A numeric key - :raise KeyError: if the key is invalid and can't be determined from Quality - """ - try: - key = int(key) - except (TypeError, ValueError): - raise ValueError(key) - - current = Quality.split_composite_status(key) - if current.quality in self.qualities: - return '{status} ({quality})'.format( - status=self[current.status], - quality=Quality.qualityStrings[current.quality] - ) - else: # the key wasn't found in qualities either - raise KeyError(key) # ... so the key is invalid - - def __contains__(self, key): - try: - key = int(key) - return key in self.statuses or key in self.qualities - except (TypeError, ValueError): - raise ValueError(key) - - -# Assign strings to statuses -statusStrings = StatusStrings({ - UNSET: "Unset", - UNAIRED: "Unaired", - SNATCHED: "Snatched", - DOWNLOADED: "Downloaded", - SKIPPED: "Skipped", - SNATCHED_PROPER: "Snatched (Proper)", - WANTED: "Wanted", - ARCHIVED: "Archived", - IGNORED: "Ignored", - SUBTITLED: "Subtitled", - FAILED: "Failed", - SNATCHED_BEST: "Snatched (Best)" -}) - - class Overview(object): UNAIRED = UNAIRED # 1 SNATCHED = SNATCHED # 2 @@ -927,16 +875,16 @@ class Overview(object): QUAL = 50 overviewStrings = { - SKIPPED: "skipped", - WANTED: "wanted", - QUAL: "qual", - GOOD: "good", - UNAIRED: "unaired", - SNATCHED: "snatched", + SKIPPED: 'skipped', + WANTED: 'wanted', + QUAL: 'qual', + GOOD: 'good', + UNAIRED: 'unaired', + SNATCHED: 'snatched', # we can give these a different class later, otherwise # breaks checkboxes in displayShow for showing different statuses - SNATCHED_BEST: "snatched", - SNATCHED_PROPER: "snatched" + SNATCHED_BEST: 'snatched', + SNATCHED_PROPER: 'snatched' } diff --git a/themes-default/slim/views/displayShow.mako b/themes-default/slim/views/displayShow.mako index 774d42ecbd..b4829dc9c3 100644 --- a/themes-default/slim/views/displayShow.mako +++ b/themes-default/slim/views/displayShow.mako @@ -4,7 +4,7 @@ import urllib import ntpath from medusa import app, helpers, subtitles, sbdatetime, network_timezones - from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED + from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa.common import Quality, qualityPresets, statusStrings, Overview from medusa.helper.common import pretty_file_size from medusa.indexers.indexer_api import indexerApi @@ -287,7 +287,7 @@ const startVue = () => { ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % if int(epResult["season"]) != 0: - % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: + % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): retry % else: search @@ -296,7 +296,7 @@ const startVue = () => { % else: search % endif - % if int(epResult["status"]) not in Quality.SNATCHED + Quality.SNATCHED_PROPER and app.USE_SUBTITLES and show.subtitles and epResult["location"]: + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and (int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST)): search subtitles % endif diff --git a/themes-default/slim/views/manage_episodeStatuses.mako b/themes-default/slim/views/manage_episodeStatuses.mako index 84565e913b..ca82f4a6d6 100644 --- a/themes-default/slim/views/manage_episodeStatuses.mako +++ b/themes-default/slim/views/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa import common + from medusa.common import statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> @@ -26,14 +26,14 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): % if whichStatus: -

None of your episodes have status ${common.statusStrings[whichStatus]}

+

None of your episodes have status ${statusStrings[whichStatus]}


% endif
Manage episodes with status @@ -42,28 +42,28 @@ Manage episodes with status -

Shows containing ${common.statusStrings[whichStatus]} episodes

+

Shows containing ${statusStrings[whichStatus]} episodes


<% - if int(whichStatus) in [common.IGNORED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST] + common.Quality.DOWNLOADED + common.Quality.ARCHIVED: + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): row_class = "good" else: - row_class = common.Overview.overviewStrings[int(whichStatus)] + row_class = Overview.overviewStrings[int(whichStatus)] %> Set checked shows/episodes to diff --git a/themes-default/slim/views/partials/showheader.mako b/themes-default/slim/views/partials/showheader.mako index 28a2352dd2..df55c6daa6 100644 --- a/themes-default/slim/views/partials/showheader.mako +++ b/themes-default/slim/views/partials/showheader.mako @@ -271,7 +271,7 @@ % if not app.USE_FAILED_DOWNLOADS: <% availableStatus.remove(FAILED) %> % endif - % for cur_status in availableStatus + Quality.DOWNLOADED + Quality.ARCHIVED: + % for cur_status in availableStatus + [DOWNLOADED, ARCHIVED]: % if cur_status not in [DOWNLOADED, ARCHIVED]: % endif diff --git a/themes/dark/templates/displayShow.mako b/themes/dark/templates/displayShow.mako index 774d42ecbd..b4829dc9c3 100644 --- a/themes/dark/templates/displayShow.mako +++ b/themes/dark/templates/displayShow.mako @@ -4,7 +4,7 @@ import urllib import ntpath from medusa import app, helpers, subtitles, sbdatetime, network_timezones - from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED + from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa.common import Quality, qualityPresets, statusStrings, Overview from medusa.helper.common import pretty_file_size from medusa.indexers.indexer_api import indexerApi @@ -287,7 +287,7 @@ const startVue = () => { ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % if int(epResult["season"]) != 0: - % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: + % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): retry % else: search @@ -296,7 +296,7 @@ const startVue = () => { % else: search % endif - % if int(epResult["status"]) not in Quality.SNATCHED + Quality.SNATCHED_PROPER and app.USE_SUBTITLES and show.subtitles and epResult["location"]: + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and (int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST)): search subtitles % endif diff --git a/themes/dark/templates/manage_episodeStatuses.mako b/themes/dark/templates/manage_episodeStatuses.mako index 84565e913b..ca82f4a6d6 100644 --- a/themes/dark/templates/manage_episodeStatuses.mako +++ b/themes/dark/templates/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa import common + from medusa.common import statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> @@ -26,14 +26,14 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): % if whichStatus: -

None of your episodes have status ${common.statusStrings[whichStatus]}

+

None of your episodes have status ${statusStrings[whichStatus]}


% endif Manage episodes with status @@ -42,28 +42,28 @@ Manage episodes with status -

Shows containing ${common.statusStrings[whichStatus]} episodes

+

Shows containing ${statusStrings[whichStatus]} episodes


<% - if int(whichStatus) in [common.IGNORED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST] + common.Quality.DOWNLOADED + common.Quality.ARCHIVED: + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): row_class = "good" else: - row_class = common.Overview.overviewStrings[int(whichStatus)] + row_class = Overview.overviewStrings[int(whichStatus)] %> Set checked shows/episodes to diff --git a/themes/dark/templates/partials/showheader.mako b/themes/dark/templates/partials/showheader.mako index 28a2352dd2..df55c6daa6 100644 --- a/themes/dark/templates/partials/showheader.mako +++ b/themes/dark/templates/partials/showheader.mako @@ -271,7 +271,7 @@ % if not app.USE_FAILED_DOWNLOADS: <% availableStatus.remove(FAILED) %> % endif - % for cur_status in availableStatus + Quality.DOWNLOADED + Quality.ARCHIVED: + % for cur_status in availableStatus + [DOWNLOADED, ARCHIVED]: % if cur_status not in [DOWNLOADED, ARCHIVED]: % endif diff --git a/themes/light/templates/displayShow.mako b/themes/light/templates/displayShow.mako index 774d42ecbd..b4829dc9c3 100644 --- a/themes/light/templates/displayShow.mako +++ b/themes/light/templates/displayShow.mako @@ -4,7 +4,7 @@ import urllib import ntpath from medusa import app, helpers, subtitles, sbdatetime, network_timezones - from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED + from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa.common import Quality, qualityPresets, statusStrings, Overview from medusa.helper.common import pretty_file_size from medusa.indexers.indexer_api import indexerApi @@ -287,7 +287,7 @@ const startVue = () => { ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % if int(epResult["season"]) != 0: - % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: + % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): retry % else: search @@ -296,7 +296,7 @@ const startVue = () => { % else: search % endif - % if int(epResult["status"]) not in Quality.SNATCHED + Quality.SNATCHED_PROPER and app.USE_SUBTITLES and show.subtitles and epResult["location"]: + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and (int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST)): search subtitles % endif diff --git a/themes/light/templates/manage_episodeStatuses.mako b/themes/light/templates/manage_episodeStatuses.mako index 84565e913b..ca82f4a6d6 100644 --- a/themes/light/templates/manage_episodeStatuses.mako +++ b/themes/light/templates/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa import common + from medusa.common import statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> @@ -26,14 +26,14 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): % if whichStatus: -

None of your episodes have status ${common.statusStrings[whichStatus]}

+

None of your episodes have status ${statusStrings[whichStatus]}


% endif Manage episodes with status @@ -42,28 +42,28 @@ Manage episodes with status -

Shows containing ${common.statusStrings[whichStatus]} episodes

+

Shows containing ${statusStrings[whichStatus]} episodes


<% - if int(whichStatus) in [common.IGNORED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST] + common.Quality.DOWNLOADED + common.Quality.ARCHIVED: + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): row_class = "good" else: - row_class = common.Overview.overviewStrings[int(whichStatus)] + row_class = Overview.overviewStrings[int(whichStatus)] %> Set checked shows/episodes to diff --git a/themes/light/templates/partials/showheader.mako b/themes/light/templates/partials/showheader.mako index 28a2352dd2..df55c6daa6 100644 --- a/themes/light/templates/partials/showheader.mako +++ b/themes/light/templates/partials/showheader.mako @@ -271,7 +271,7 @@ % if not app.USE_FAILED_DOWNLOADS: <% availableStatus.remove(FAILED) %> % endif - % for cur_status in availableStatus + Quality.DOWNLOADED + Quality.ARCHIVED: + % for cur_status in availableStatus + [DOWNLOADED, ARCHIVED]: % if cur_status not in [DOWNLOADED, ARCHIVED]: % endif From c61d582d8f3fa65b5a84d4a79041e410dee42fc5 Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 23 May 2018 21:09:15 +0200 Subject: [PATCH 27/93] Missed one quality compare in series --- medusa/tv/series.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index 770f0e1bc1..c9fa2b3d14 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -2268,7 +2268,7 @@ def set_all_episodes_archived(self, final_status_only=False): sql_list = [] for ep_obj in ep_list: with ep_obj.lock: - if ep_obj.status in Quality.DOWNLOADED: + if ep_obj.status == DOWNLOADED: if final_status_only and Quality.should_search(ep_obj.status, ep_obj.quality, self, ep_obj.manually_searched)[0]: continue From 8b990d69c80859792c63aafc55abd269809e9a32 Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 23 May 2018 21:44:52 +0200 Subject: [PATCH 28/93] Fix status check in subtitles --- medusa/subtitles.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/subtitles.py b/medusa/subtitles.py index bf9be0ddf1..444df78050 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -38,7 +38,7 @@ from medusa import app, db, helpers, history from medusa.cache import cache, memory_cache -from medusa.common import Quality, cpu_presets +from medusa.common import cpu_presets, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa.helper.common import dateTimeFormat, episode_num, remove_extension, subtitle_extensions from medusa.helper.exceptions import ex from medusa.helpers import is_media_file, is_rar_file @@ -867,7 +867,7 @@ def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-bra logger.debug(u'%s cannot be parsed to an episode', filename) continue - if tv_episode.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST: + if tv_episode.status not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): continue if not tv_episode.series.subtitles: From 602f05d057986ef8523d703ff8536a2650829076 Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 23 May 2018 21:48:04 +0200 Subject: [PATCH 29/93] Reorder subtitles imports --- medusa/subtitles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/subtitles.py b/medusa/subtitles.py index 444df78050..f2e8f469a0 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -38,7 +38,7 @@ from medusa import app, db, helpers, history from medusa.cache import cache, memory_cache -from medusa.common import cpu_presets, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST +from medusa.common import SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, cpu_presets from medusa.helper.common import dateTimeFormat, episode_num, remove_extension, subtitle_extensions from medusa.helper.exceptions import ex from medusa.helpers import is_media_file, is_rar_file From 8a31599d7a365bc7f64c0148ee9dcb6682ed700b Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 24 May 2018 16:34:42 +0200 Subject: [PATCH 30/93] Make UNKNOWN the lowest possible quality --- medusa/common.py | 149 ++++-------------- medusa/databases/failed_db.py | 2 +- medusa/databases/main_db.py | 85 ++++++++-- medusa/post_processor.py | 2 - medusa/server/web/home/handler.py | 3 +- medusa/tv/episode.py | 31 +--- medusa/tv/series.py | 2 +- .../slim/views/partials/showheader.mako | 16 +- .../views/vue-components/quality-chooser.mako | 4 +- .../dark/templates/partials/showheader.mako | 16 +- .../vue-components/quality-chooser.mako | 4 +- .../light/templates/partials/showheader.mako | 16 +- .../vue-components/quality-chooser.mako | 4 +- 13 files changed, 145 insertions(+), 189 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index 47f32fb796..f5d5d9ff9f 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -148,34 +148,30 @@ class Quality(object): - """Determine quality and set status codes.""" - - NONE = 0 # 0 - SDTV = 1 # 1 - SDDVD = 1 << 1 # 2 - HDTV = 1 << 2 # 4 - RAWHDTV = 1 << 3 # 8 -- 720p/1080i mpeg2 (trollhd releases) - FULLHDTV = 1 << 4 # 16 -- 1080p HDTV (QCF releases) - HDWEBDL = 1 << 5 # 32 - FULLHDWEBDL = 1 << 6 # 64 -- 1080p web-dl - HDBLURAY = 1 << 7 # 128 - FULLHDBLURAY = 1 << 8 # 256 - UHD_4K_TV = 1 << 9 # 512 -- 2160p aka 4K UHD aka UHD-1 - UHD_4K_WEBDL = 1 << 10 # 1024 - UHD_4K_BLURAY = 1 << 11 # 2048 - UHD_8K_TV = 1 << 12 # 4096 -- 4320p aka 8K UHD aka UHD-2 - UHD_8K_WEBDL = 1 << 13 # 8192 - UHD_8K_BLURAY = 1 << 14 # 16384 - ANYHDTV = HDTV | FULLHDTV # 20 - ANYWEBDL = HDWEBDL | FULLHDWEBDL # 96 - ANYBLURAY = HDBLURAY | FULLHDBLURAY # 384 - - # put these bits at the other end of the spectrum, - # far enough out that they shouldn't interfere - UNKNOWN = 1 << 15 # 32768 + + NA = 0 # 0 + UNKNOWN = 1 # 1 + SDTV = 1 << 1 # 2 + SDDVD = 1 << 2 # 4 + HDTV = 1 << 3 # 8 -- 720p/1080i mpeg2 (trollhd releases) + RAWHDTV = 1 << 4 # 16 -- 1080p HDTV (QCF releases) + FULLHDTV = 1 << 5 # 32 + HDWEBDL = 1 << 6 # 64 -- 1080p web-dl + FULLHDWEBDL = 1 << 7 # 128 + HDBLURAY = 1 << 8 # 256 + FULLHDBLURAY = 1 << 9 # 512 -- 2160p aka 4K UHD aka UHD-1 + UHD_4K_TV = 1 << 10 # 1024 + UHD_4K_WEBDL = 1 << 11 # 2048 + UHD_4K_BLURAY = 1 << 12 # 4096 -- 4320p aka 8K UHD aka UHD-2 + UHD_8K_TV = 1 << 13 # 8192 + UHD_8K_WEBDL = 1 << 14 # 16384 + UHD_8K_BLURAY = 1 << 15 # 32768 + ANYHDTV = HDTV | FULLHDTV # 40 + ANYWEBDL = HDWEBDL | FULLHDWEBDL # 192 + ANYBLURAY = HDBLURAY | FULLHDBLURAY # 768 qualityStrings = { - NONE: 'N/A', + NA: 'N/A', UNKNOWN: 'Unknown', SDTV: 'SDTV', SDDVD: 'SD DVD', @@ -195,7 +191,7 @@ class Quality(object): } sceneQualityStrings = { - NONE: 'N/A', + NA: 'N/A', UNKNOWN: 'Unknown', SDTV: '', SDDVD: '', @@ -221,7 +217,7 @@ class Quality(object): } cssClassStrings = { - NONE: 'N/A', + NA: 'na', UNKNOWN: 'Unknown', SDTV: 'SDTV', SDDVD: 'SDDVD', @@ -243,23 +239,6 @@ class Quality(object): ANYBLURAY: 'any-hd' } - @staticmethod - def _get_status_strings(status): - """ - Return string values associated with Status prefix. - - :param status: Status prefix to resolve - :return: Human readable status value - """ - to_return = {} - for quality in Quality.qualityStrings: - if quality is not None: - stat = Quality.statusPrefixes[status] - qual = Quality.qualityStrings[quality] - comp = Quality.composite_status(status, quality) - to_return[comp] = '%s (%s)' % (stat, qual) - return to_return - @staticmethod def combine_qualities(allowed_qualities, preferred_qualities): any_quality = 0 @@ -272,13 +251,9 @@ def combine_qualities(allowed_qualities, preferred_qualities): @staticmethod def split_quality(quality): - if quality is None: - quality = Quality.NONE allowed_qualities = [] preferred_qualities = [] for cur_qual in Quality.qualityStrings: - if cur_qual is None: - cur_qual = Quality.NONE if cur_qual & quality: allowed_qualities.append(cur_qual) if cur_qual << 16 & quality: @@ -289,14 +264,14 @@ def split_quality(quality): @staticmethod def name_quality(name, anime=False, extend=True): """ - Return The quality from an episode File renamed by the application. + Return the quality from an episode filename. - If no quality is achieved it will try scene_quality regex + If no quality is achieved it will try scene_quality regex. :param name: to parse :param anime: Boolean to indicate if the show we're resolving is Anime :param extend: boolean to extend methods to try - :return: Quality prefix + :return: Quality """ # Try Scene names first quality = Quality.scene_quality(name, anime) @@ -312,7 +287,7 @@ def name_quality(name, anime=False, extend=True): @staticmethod def scene_quality(name, anime=False): """ - Return The quality from the Scene episode File. + Return the quality from the episode filename with the regex. :param name: Episode filename to analyse :param anime: Boolean to indicate if the show we're resolving is Anime @@ -467,16 +442,6 @@ def quality_from_file_meta(file_path): composite_status_quality = namedtuple('composite_status', ['status', 'quality']) - @staticmethod - def composite_status(status, quality): - if quality is None: - quality = Quality.NONE - return status + 100 * quality - - @staticmethod - def quality_downloaded(status): - return (status - DOWNLOADED) // 100 - @staticmethod def split_composite_status(status): """ @@ -493,7 +458,7 @@ def split_composite_status(status): if status > q * 100: return Quality.composite_status_quality(status - q * 100, q) - return Quality.composite_status_quality(status, Quality.NONE) + return Quality.composite_status_quality(status, Quality.UNKNOWN) @staticmethod def scene_quality_from_name(name, quality): @@ -560,18 +525,6 @@ def scene_quality_from_name(name, quality): return rel_type + codec - @staticmethod - def status_from_name(name, anime=False): - """ - Get a status object from filename. - - :param name: Filename to check - :param anime: boolean to enable anime parsing - :return: Composite status/quality object - """ - quality = Quality.name_quality(name, anime) - return Quality.composite_status(DOWNLOADED, quality) - guessit_map = { '720p': { 'HDTV': HDTV, @@ -609,7 +562,6 @@ def status_from_name(name, anime=False): def should_search(cur_status, cur_quality, show_obj, manually_searched): """Return true if that episodes should be search for a better quality. - If cur_quality is Quality.NONE, it will return True as its a invalid quality If cur_quality is Quality.UNKNOWN it will return True only if is not in Allowed (Unknown can be in Allowed) :param cur_status: current status of the episode @@ -654,7 +606,6 @@ def should_replace(ep_status, old_quality, new_quality, allowed_qualities, prefe if preferred quality, then new quality should be higher than existing one AND not be in preferred If new quality is already in preferred then is already final quality. Force (forced search) bypass episode status only or unknown quality - If old quality is Quality.NONE, it will be replaced :param ep_status: current status of the episode :param old_quality: current quality of the episode @@ -667,15 +618,10 @@ def should_replace(ep_status, old_quality, new_quality, allowed_qualities, prefe :param search_type: The search type, that started this method :return: True if the old quality should be replaced with new quality. """ - if ep_status and ep_status not in [DOWNLOADED, SNATCHED, SNATCHED_PROPER]: + if ep_status and ep_status not in (DOWNLOADED, SNATCHED, SNATCHED_PROPER): if not force: return False, 'Episode status is not DOWNLOADED|SNATCHED|SNATCHED PROPER. Ignoring new quality' - # If existing quality is UNKNOWN but Preferred is set, UNKNOWN should be replaced. - if old_quality == Quality.UNKNOWN: - if not (force or preferred_qualities): - return False, 'Existing quality is UNKNOWN. Ignoring new quality' - if manually_searched: if not force: # We only allow replace a manual searched episode if is a forced search @@ -694,17 +640,15 @@ def should_replace(ep_status, old_quality, new_quality, allowed_qualities, prefe # If old quality is no longer wanted quality and new quality is wanted, we should replace. return True, 'Existing quality is no longer in any wanted quality lists. Accepting new quality' - if force and download_current_quality: + if download_current_quality and force and new_quality == old_quality: # If we already downloaded quality, just redownload it as long is still part of the wanted qualities - return new_quality == old_quality, 'Redownloading same quality' + return True, 'Re-downloading same quality' if preferred_qualities: # Don't replace because old quality is already best quality. if old_quality in preferred_qualities: return False, 'Existing quality is already a preferred quality. Ignoring new quality' - # Old quality is not final. Check if we should replace: - # Replace if preferred quality if new_quality in preferred_qualities: return True, 'New quality is preferred. Accepting new quality' @@ -761,11 +705,11 @@ def from_guessit(guess): @staticmethod def to_guessit(quality): - """Return a guessit dict containing 'screen_size and format' from a Quality (status). + """Return a guessit dict containing 'screen_size and format' from a Quality. This was previously a composite status. But status/quality have been separated into their own attributes. - :param quality: a quality status + :param quality: a quality :type quality: int :return: dict {'screen_size': , 'format': } :rtype: dict (str, str) @@ -807,28 +751,6 @@ def to_guessit_screen_size(quality): if quality & key: return value - DOWNLOADED = None - SNATCHED = None - SNATCHED_PROPER = None - FAILED = None - SNATCHED_BEST = None - ARCHIVED = None - - -Quality.DOWNLOADED = [Quality.composite_status(DOWNLOADED, x) for x in Quality.qualityStrings if x is not None] -Quality.SNATCHED = [Quality.composite_status(SNATCHED, x) for x in Quality.qualityStrings if x is not None] -Quality.SNATCHED_BEST = [Quality.composite_status(SNATCHED_BEST, x) for x in Quality.qualityStrings if x is not None] -Quality.SNATCHED_PROPER = [Quality.composite_status(SNATCHED_PROPER, x) for x in Quality.qualityStrings if x is not None] -Quality.FAILED = [Quality.composite_status(FAILED, x) for x in Quality.qualityStrings if x is not None] -Quality.ARCHIVED = [Quality.composite_status(ARCHIVED, x) for x in Quality.qualityStrings if x is not None] -Quality.WANTED = [Quality.composite_status(WANTED, x) for x in Quality.qualityStrings if x is not None] - -Quality.DOWNLOADED.sort() -Quality.SNATCHED.sort() -Quality.SNATCHED_BEST.sort() -Quality.SNATCHED_PROPER.sort() -Quality.FAILED.sort() -Quality.ARCHIVED.sort() HD720p = Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], []) HD1080p = Quality.combine_qualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], []) @@ -840,9 +762,6 @@ def to_guessit_screen_size(quality): UHD = Quality.combine_qualities([UHD_4K, UHD_8K], []) ANY = Quality.combine_qualities([SD, HD, UHD], []) -# legacy template, cant remove due to reference in main_db upgrade? -BEST = Quality.combine_qualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV]) - qualityPresets = ( ANY, SD, diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index 32c4140697..92b4721580 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -62,7 +62,7 @@ def test(self): return self.hasColumn('history', 'old_status') def execute(self): - self.addColumn('history', 'old_status', 'NUMERIC', Quality.NONE) + self.addColumn('history', 'old_status', 'NUMERIC', Quality.NA) self.addColumn('history', 'showid', 'NUMERIC', '-1') self.addColumn('history', 'season', 'NUMERIC', '-1') self.addColumn('history', 'episode', 'NUMERIC', '-1') diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 10b6c36218..b1a085c953 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -22,7 +22,7 @@ MAX_DB_VERSION = 44 # Used to check when checking for updates -CURRENT_MINOR_DB_VERSION = 10 +CURRENT_MINOR_DB_VERSION = 11 class MainSanityCheck(db.DBSanityCheck): @@ -39,8 +39,8 @@ def check(self): # self.convert_archived_to_compound() self.fix_subtitle_reference() self.clean_null_indexer_mappings() - self.fix_remove_status_unknown() self.fix_status_qualities() + self.update_status_unknown() def clean_null_indexer_mappings(self): log.debug(u'Checking for null indexer mappings') @@ -264,23 +264,24 @@ def fix_status_qualities(self): """ Check for a status bigger than 12, and translate to a status + quality, as these are old composite statuses. + This can be removed when all code that creates composite statuses has been migrated. Until then this can be used to keep the DB sane. """ - log.info(u'Convert composite statussus in tv_episodes to status + quality.') + log.info(u'Convert composite statuses in tv_episodes to status + quality.') sql_results = self.connection.select("SELECT status FROM tv_episodes where status > 12 GROUP BY status") for status in sql_results: - log.info(u'Split composite status in to ep_status and ep_quality for %s', status[b'status']) + log.info(u'Split composite status into status and quality for %s', status[b'status']) split = common.Quality.split_composite_status(status[b'status']) self.connection.select( "UPDATE tv_episodes SET status = ?, quality = ? WHERE status = ?", [split.status, split.quality, status[b'status']] ) - def fix_remove_status_unknown(self): - """Changes any `UNKNOWN` quality to 0.""" - log.info(u'Remove status UNKONWN from tv_episodes') - self.connection.select("UPDATE tv_episodes SET quality = 0 WHERE quality = 32768") + def update_status_unknown(self): + """Changes any `UNKNOWN` quality to 1.""" + log.info(u'Update status UNKONWN from tv_episodes') + self.connection.select("UPDATE tv_episodes SET quality = 1 WHERE quality = 65536") def backupDatabase(version): @@ -779,9 +780,8 @@ def execute(self): self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") - log.info(u'Split composite status in to ep_status and ep_quality') + log.info(u'Split composite status into ep_status and ep_quality') sql_results = self.connection.select("SELECT status from tv_episodes GROUP BY status") - for status in sql_results: split = common.Quality.split_composite_status(status[b'status']) self.connection.action("UPDATE tv_episodes SET ep_status = ?, ep_quality = ? WHERE status = ?", @@ -819,10 +819,73 @@ def execute(self): log.info(u'Remove the quality from the action field, as this is a composite status') sql_results = self.connection.select("SELECT action FROM history GROUP BY action") - for status in sql_results: split = common.Quality.split_composite_status(status[b'action']) self.connection.action("UPDATE history SET action = ? WHERE action = ?", [split.status, status[b'action']]) self.inc_minor_version() + + +class ShiftQualities(AddSeparatedStatusQualityFields): + """Shift all qualities one place to the left.""" + + def test(self): + """Test if the version is at least 44.11""" + return self.connection.version >= (44, 11) + + def execute(self): + backupDatabase(self.connection.version) + + self.shift_tv_qualities() + self.shift_episode_qualities() + self.shift_history_qualities() + self.inc_minor_version() + + def shift_tv_qualities(self): + """ + Shift all qualities << 1. + + This makes it possible to set UNKNOWN as 1, making it the lowest quality. + """ + log.info('Shift qualities in tv_shows one place to the left.') + sql_results = self.connection.select("SELECT quality FROM tv_shows") + for result in sql_results: + quality = result[b'quality'] + new_quality = quality << 1 + self.connection.select( + "UPDATE tv_shows SET quality = ? WHERE quality = ?", + [new_quality, quality] + ) + + def shift_episode_qualities(self): + """ + Shift all qualities << 1. + + This makes it possible to set UNKNOWN as 1, making it the lowest quality. + """ + log.info('Shift qualities in tv_episodes one place to the left.') + sql_results = self.connection.select("SELECT quality FROM tv_episodes WHERE quality != 0") + for result in sql_results: + quality = result[b'quality'] + new_quality = quality << 1 + self.connection.select( + "UPDATE tv_episodes SET quality = ? WHERE quality = ?", + [new_quality, quality] + ) + + def shift_history_qualities(self): + """ + Shift all qualities << 1. + + This makes it possible to set UNKNOWN as 1, making it the lowest quality. + """ + log.info('Shift qualities in history one place to the left.') + sql_results = self.connection.select("SELECT quality FROM history") + for result in sql_results: + quality = result[b'quality'] + new_quality = quality << 1 + self.connection.select( + "UPDATE history SET quality = ? WHERE quality = ?", + [new_quality, quality] + ) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 66cdca7a1e..372854a6ed 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -890,8 +890,6 @@ def _should_process(current_quality, new_quality, allowed, preferred): :param preferred: Qualities that are preferred :return: Tuple with Boolean if the quality should be processed and String with reason if should process or not """ - if current_quality is common.Quality.NONE: - return False, 'There is no current quality. Skipping as we can only replace existing qualities' if new_quality in preferred: if current_quality in preferred: if new_quality > current_quality: diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 9e275bec91..6c31d9396a 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1916,6 +1916,7 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire else: return self._genericMessage('Error', error_message) + status = int(status) if status not in statusStrings: error_message = 'Invalid status' if direct: @@ -1981,7 +1982,7 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire ep_obj.status in snatched_qualities + [DOWNLOADED] and os.path.isfile(ep_obj.location)): logger.log('Refusing to change status of {series} {episode} to DOWNLOADED' - ' because it\'s not SNATCHED/DOWNLOADED'.format( + ' because it\'s not SNATCHED/DOWNLOADED or the file is missing'.format( series=series_obj.name, episode=cur_ep), logger.WARNING) continue diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 9807ac0d4c..f5df1d69d7 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -245,7 +245,7 @@ def __init__(self, series, season, episode, filepath=''): self.episode = episode self.absolute_number = 0 self.description = '' - self.subtitles = list() + self.subtitles = [] self.subtitles_searchcount = 0 self.subtitles_lastsearch = str(datetime.min) self.airdate = date.fromordinal(1) @@ -914,21 +914,10 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season 'status': statusStrings[self.status], } ) - # We only change the episode's status if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED + # We only change the episode's state if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED elif helpers.is_media_file(self.location): if self.status not in [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED]: - old_status = self.status - self.status = Quality.status_from_name(self.location, anime=self.series.is_anime) - log.debug( - '{id}: {series} {ep} status changed from {old_status} to {new_status}' - ' as current status is not SNATCHED|DOWNLOADED|ARCHIVED', { - 'id': self.series.series_id, - 'series': self.series.name, - 'ep': episode_num(season, episode), - 'old_status': statusStrings[old_status], - 'new_status': statusStrings[self.status], - } - ) + self.update_state(self.location) else: log.debug( '{id}: {series} {ep} status untouched: {status}', { @@ -965,15 +954,7 @@ def __load_from_nfo(self, location): if self.location != '': if self.status == UNSET and helpers.is_media_file(self.location): - self.status = Quality.status_from_name(self.location, anime=self.series.is_anime) - log.debug( - '{id}: {series} {ep} status changed from UNSET to {new_status}', { - 'id': self.series.series_id, - 'series': self.series.name, - 'ep': episode_num(self.season, self.episode), - 'new_status': statusStrings[self.status], - } - ) + self.update_state(self.location) nfo_file = replace_extension(self.location, 'nfo') log.debug('{id}: Using NFO name {nfo}', @@ -1986,8 +1967,8 @@ def airdate_modify_stamp(self): } ) - def update_status(self, filepath): - """Update the episode status according to the file information. + def update_state(self, filepath): + """Update the episode state (status and quality) according to the file information. The status should only be changed if either the size or the filename changed. :param filepath: Path to the new episode file. diff --git a/medusa/tv/series.py b/medusa/tv/series.py index c9fa2b3d14..a36336bcc1 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -1327,7 +1327,7 @@ def make_ep_from_file(self, filepath): continue else: - cur_ep.update_status(filepath) + cur_ep.update_state(filepath) with cur_ep.lock: cur_ep.check_for_meta_files() diff --git a/themes-default/slim/views/partials/showheader.mako b/themes-default/slim/views/partials/showheader.mako index df55c6daa6..1d3c55b421 100644 --- a/themes-default/slim/views/partials/showheader.mako +++ b/themes-default/slim/views/partials/showheader.mako @@ -173,12 +173,12 @@ % if show.quality in qualityPresets: ${renderQualityPill(show.quality)} % else: - % if allowed_qualities: - Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} - % endif - % if preferred_qualities: - Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} - % endif + % if allowed_qualities: + Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} + % endif + % if preferred_qualities: + Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} + % endif % endif % if show.network and show.airs: Originally Airs: ${show.airs} ${"" if network_timezones.test_timeformat(show.airs) else "(invalid Timeformat)"} on ${show.network} @@ -269,12 +269,10 @@ <% availableStatus = [WANTED, SKIPPED, IGNORED, FAILED] %> % if not app.USE_FAILED_DOWNLOADS: - <% availableStatus.remove(FAILED) %> + <% availableStatus.remove(FAILED) %> % endif % for cur_status in availableStatus + [DOWNLOADED, ARCHIVED]: - % if cur_status not in [DOWNLOADED, ARCHIVED]: - % endif % endfor diff --git a/themes-default/slim/views/vue-components/quality-chooser.mako b/themes-default/slim/views/vue-components/quality-chooser.mako index cff809612e..809d4fa2fb 100644 --- a/themes-default/slim/views/vue-components/quality-chooser.mako +++ b/themes-default/slim/views/vue-components/quality-chooser.mako @@ -128,11 +128,11 @@ Vue.component('quality-chooser', { }, allowedQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE}); + .filter(val => val > ${Quality.NA}); }, preferredQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE} && val < ${Quality.UNKNOWN}); + .filter(val => val > ${Quality.NA}); } }, asyncComputed: { diff --git a/themes/dark/templates/partials/showheader.mako b/themes/dark/templates/partials/showheader.mako index df55c6daa6..1d3c55b421 100644 --- a/themes/dark/templates/partials/showheader.mako +++ b/themes/dark/templates/partials/showheader.mako @@ -173,12 +173,12 @@ % if show.quality in qualityPresets: ${renderQualityPill(show.quality)} % else: - % if allowed_qualities: - Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} - % endif - % if preferred_qualities: - Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} - % endif + % if allowed_qualities: + Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} + % endif + % if preferred_qualities: + Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} + % endif % endif % if show.network and show.airs: Originally Airs: ${show.airs} ${"" if network_timezones.test_timeformat(show.airs) else "(invalid Timeformat)"} on ${show.network} @@ -269,12 +269,10 @@ <% availableStatus = [WANTED, SKIPPED, IGNORED, FAILED] %> % if not app.USE_FAILED_DOWNLOADS: - <% availableStatus.remove(FAILED) %> + <% availableStatus.remove(FAILED) %> % endif % for cur_status in availableStatus + [DOWNLOADED, ARCHIVED]: - % if cur_status not in [DOWNLOADED, ARCHIVED]: - % endif % endfor diff --git a/themes/dark/templates/vue-components/quality-chooser.mako b/themes/dark/templates/vue-components/quality-chooser.mako index cff809612e..809d4fa2fb 100644 --- a/themes/dark/templates/vue-components/quality-chooser.mako +++ b/themes/dark/templates/vue-components/quality-chooser.mako @@ -128,11 +128,11 @@ Vue.component('quality-chooser', { }, allowedQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE}); + .filter(val => val > ${Quality.NA}); }, preferredQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE} && val < ${Quality.UNKNOWN}); + .filter(val => val > ${Quality.NA}); } }, asyncComputed: { diff --git a/themes/light/templates/partials/showheader.mako b/themes/light/templates/partials/showheader.mako index df55c6daa6..1d3c55b421 100644 --- a/themes/light/templates/partials/showheader.mako +++ b/themes/light/templates/partials/showheader.mako @@ -173,12 +173,12 @@ % if show.quality in qualityPresets: ${renderQualityPill(show.quality)} % else: - % if allowed_qualities: - Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} - % endif - % if preferred_qualities: - Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} - % endif + % if allowed_qualities: + Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} + % endif + % if preferred_qualities: + Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} + % endif % endif % if show.network and show.airs: Originally Airs: ${show.airs} ${"" if network_timezones.test_timeformat(show.airs) else "(invalid Timeformat)"} on ${show.network} @@ -269,12 +269,10 @@ <% availableStatus = [WANTED, SKIPPED, IGNORED, FAILED] %> % if not app.USE_FAILED_DOWNLOADS: - <% availableStatus.remove(FAILED) %> + <% availableStatus.remove(FAILED) %> % endif % for cur_status in availableStatus + [DOWNLOADED, ARCHIVED]: - % if cur_status not in [DOWNLOADED, ARCHIVED]: - % endif % endfor diff --git a/themes/light/templates/vue-components/quality-chooser.mako b/themes/light/templates/vue-components/quality-chooser.mako index cff809612e..809d4fa2fb 100644 --- a/themes/light/templates/vue-components/quality-chooser.mako +++ b/themes/light/templates/vue-components/quality-chooser.mako @@ -128,11 +128,11 @@ Vue.component('quality-chooser', { }, allowedQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE}); + .filter(val => val > ${Quality.NA}); }, preferredQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE} && val < ${Quality.UNKNOWN}); + .filter(val => val > ${Quality.NA}); } }, asyncComputed: { From 9fc29b025d6faac2226de7a4f2550bcc6203dd2e Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 24 May 2018 17:12:25 +0200 Subject: [PATCH 31/93] Hide N/A from displayshow and backlog, fix quality comments --- medusa/common.py | 18 +++++++++--------- themes-default/slim/views/displayShow.mako | 6 +++++- .../slim/views/manage_backlogOverview.mako | 6 +++++- themes/dark/templates/displayShow.mako | 6 +++++- .../dark/templates/manage_backlogOverview.mako | 6 +++++- themes/light/templates/displayShow.mako | 6 +++++- .../templates/manage_backlogOverview.mako | 6 +++++- 7 files changed, 39 insertions(+), 15 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index f5d5d9ff9f..cc4dd0aaa4 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -153,17 +153,17 @@ class Quality(object): UNKNOWN = 1 # 1 SDTV = 1 << 1 # 2 SDDVD = 1 << 2 # 4 - HDTV = 1 << 3 # 8 -- 720p/1080i mpeg2 (trollhd releases) - RAWHDTV = 1 << 4 # 16 -- 1080p HDTV (QCF releases) - FULLHDTV = 1 << 5 # 32 - HDWEBDL = 1 << 6 # 64 -- 1080p web-dl - FULLHDWEBDL = 1 << 7 # 128 + HDTV = 1 << 3 # 8 + RAWHDTV = 1 << 4 # 16 -- 720p/1080i mpeg2 + FULLHDTV = 1 << 5 # 32 -- 1080p HDTV + HDWEBDL = 1 << 6 # 64 + FULLHDWEBDL = 1 << 7 # 128 -- 1080p web-dl HDBLURAY = 1 << 8 # 256 - FULLHDBLURAY = 1 << 9 # 512 -- 2160p aka 4K UHD aka UHD-1 - UHD_4K_TV = 1 << 10 # 1024 + FULLHDBLURAY = 1 << 9 # 512 + UHD_4K_TV = 1 << 10 # 1024 -- 2160p aka 4K UHD aka UHD-1 UHD_4K_WEBDL = 1 << 11 # 2048 - UHD_4K_BLURAY = 1 << 12 # 4096 -- 4320p aka 8K UHD aka UHD-2 - UHD_8K_TV = 1 << 13 # 8192 + UHD_4K_BLURAY = 1 << 12 # 4096 + UHD_8K_TV = 1 << 13 # 8192 -- 4320p aka 8K UHD aka UHD-2 UHD_8K_WEBDL = 1 << 14 # 16384 UHD_8K_BLURAY = 1 << 15 # 32768 ANYHDTV = HDTV | FULLHDTV # 40 diff --git a/themes-default/slim/views/displayShow.mako b/themes-default/slim/views/displayShow.mako index b4829dc9c3..787035eec7 100644 --- a/themes-default/slim/views/displayShow.mako +++ b/themes-default/slim/views/displayShow.mako @@ -284,7 +284,11 @@ const startVue = () => { cur_status = int(epResult['status']) cur_quality = int(epResult['quality']) %> - ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} + % if cur_quality != Quality.NA: + ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} + % else: + ${statusStrings[cur_status]} + % endif % if int(epResult["season"]) != 0: % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): diff --git a/themes-default/slim/views/manage_backlogOverview.mako b/themes-default/slim/views/manage_backlogOverview.mako index 36d2aa5b6c..3f89da76fb 100644 --- a/themes-default/slim/views/manage_backlogOverview.mako +++ b/themes-default/slim/views/manage_backlogOverview.mako @@ -151,7 +151,11 @@ const startVue = () => { ${cur_result['episode_string']} - ${statusStrings[old_status]} ${renderQualityPill(old_quality)} + % if old_quality != Quality.NA: + ${statusStrings[old_status]} ${renderQualityPill(old_quality)} + % else: + ${statusStrings[old_status]} + % endif ${cur_result["name"]} diff --git a/themes/dark/templates/displayShow.mako b/themes/dark/templates/displayShow.mako index b4829dc9c3..787035eec7 100644 --- a/themes/dark/templates/displayShow.mako +++ b/themes/dark/templates/displayShow.mako @@ -284,7 +284,11 @@ const startVue = () => { cur_status = int(epResult['status']) cur_quality = int(epResult['quality']) %> - ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} + % if cur_quality != Quality.NA: + ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} + % else: + ${statusStrings[cur_status]} + % endif % if int(epResult["season"]) != 0: % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): diff --git a/themes/dark/templates/manage_backlogOverview.mako b/themes/dark/templates/manage_backlogOverview.mako index 36d2aa5b6c..3f89da76fb 100644 --- a/themes/dark/templates/manage_backlogOverview.mako +++ b/themes/dark/templates/manage_backlogOverview.mako @@ -151,7 +151,11 @@ const startVue = () => { ${cur_result['episode_string']} - ${statusStrings[old_status]} ${renderQualityPill(old_quality)} + % if old_quality != Quality.NA: + ${statusStrings[old_status]} ${renderQualityPill(old_quality)} + % else: + ${statusStrings[old_status]} + % endif ${cur_result["name"]} diff --git a/themes/light/templates/displayShow.mako b/themes/light/templates/displayShow.mako index b4829dc9c3..787035eec7 100644 --- a/themes/light/templates/displayShow.mako +++ b/themes/light/templates/displayShow.mako @@ -284,7 +284,11 @@ const startVue = () => { cur_status = int(epResult['status']) cur_quality = int(epResult['quality']) %> - ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} + % if cur_quality != Quality.NA: + ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} + % else: + ${statusStrings[cur_status]} + % endif % if int(epResult["season"]) != 0: % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): diff --git a/themes/light/templates/manage_backlogOverview.mako b/themes/light/templates/manage_backlogOverview.mako index 36d2aa5b6c..3f89da76fb 100644 --- a/themes/light/templates/manage_backlogOverview.mako +++ b/themes/light/templates/manage_backlogOverview.mako @@ -151,7 +151,11 @@ const startVue = () => { ${cur_result['episode_string']} - ${statusStrings[old_status]} ${renderQualityPill(old_quality)} + % if old_quality != Quality.NA: + ${statusStrings[old_status]} ${renderQualityPill(old_quality)} + % else: + ${statusStrings[old_status]} + % endif ${cur_result["name"]} From b874a78be345a5729500e83fe4a2ec5df32f26e1 Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 24 May 2018 18:13:54 +0200 Subject: [PATCH 32/93] Make default quality explicit --- medusa/tv/episode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index f5df1d69d7..511def30d9 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -252,7 +252,7 @@ def __init__(self, series, season, episode, filepath=''): self.hasnfo = False self.hastbn = False self.status = UNSET - self.quality = 0 + self.quality = Quality.NA self.file_size = 0 self.release_name = '' self.is_proper = False From 98aa5e6952a918b8c75746a92f7628116926d92b Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 24 May 2018 20:27:16 +0200 Subject: [PATCH 33/93] Rename update_state to update_status_quality --- medusa/tv/episode.py | 8 ++++---- medusa/tv/series.py | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 511def30d9..ddea713194 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -917,7 +917,7 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season # We only change the episode's state if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED elif helpers.is_media_file(self.location): if self.status not in [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED]: - self.update_state(self.location) + self.update_status_quality(self.location) else: log.debug( '{id}: {series} {ep} status untouched: {status}', { @@ -954,7 +954,7 @@ def __load_from_nfo(self, location): if self.location != '': if self.status == UNSET and helpers.is_media_file(self.location): - self.update_state(self.location) + self.update_status_quality(self.location) nfo_file = replace_extension(self.location, 'nfo') log.debug('{id}: Using NFO name {nfo}', @@ -1967,8 +1967,8 @@ def airdate_modify_stamp(self): } ) - def update_state(self, filepath): - """Update the episode state (status and quality) according to the file information. + def update_status_quality(self, filepath): + """Update the episode status and quality according to the file information. The status should only be changed if either the size or the filename changed. :param filepath: Path to the new episode file. diff --git a/medusa/tv/series.py b/medusa/tv/series.py index a36336bcc1..8eb0fc1544 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -1327,7 +1327,7 @@ def make_ep_from_file(self, filepath): continue else: - cur_ep.update_state(filepath) + cur_ep.update_status_quality(filepath) with cur_ep.lock: cur_ep.check_for_meta_files() From 3fabd8d75230ec682926445bbfd3b851191a7529 Mon Sep 17 00:00:00 2001 From: Dario Date: Fri, 25 May 2018 12:58:20 +0200 Subject: [PATCH 34/93] Fix old statusPrefixes reference --- medusa/server/api/v1/core.py | 2 +- medusa/tv/episode.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/medusa/server/api/v1/core.py b/medusa/server/api/v1/core.py index 43cc4aac1f..77d952867a 100644 --- a/medusa/server/api/v1/core.py +++ b/medusa/server/api/v1/core.py @@ -2721,7 +2721,7 @@ def run(self): if status == 'total': episodes_stats['total'] = episode_status_counts_total[status] continue - status_string = Quality.statusPrefixes[status].lower().replace(' ', '_').replace('(', '').replace(')', '') + status_string = statusStrings[status].lower().replace(' ', '_').replace('(', '').replace(')', '') episodes_stats[status_string] = episode_status_counts_total[status] return _responds(RESULT_SUCCESS, episodes_stats) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index ddea713194..2f1fe7258d 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -2014,8 +2014,8 @@ def update_status_quality(self, filepath): "{name}: Setting the status from '{status_old}' to '{status_new}' and" " quality '{quality_old}' to '{quality_new}' based on file: {filepath}", { 'name': self.series.name, - 'status_old': Quality.statusPrefixes[old_status], - 'status_new': Quality.statusPrefixes[new_status], + 'status_old': statusStrings[old_status], + 'status_new': statusStrings[new_status], 'quality_old': Quality.qualityStrings[old_quality], 'quality_new': Quality.qualityStrings[new_quality], 'filepath': filepath, @@ -2026,7 +2026,7 @@ def update_status_quality(self, filepath): "{name}: Not changing current status '{status_old}' or" " quality '{quality_old}' based on file: {filepath}", { 'name': self.series.name, - 'status_old': Quality.statusPrefixes[old_status], + 'status_old': statusStrings[old_status], 'quality_old': Quality.qualityStrings[old_quality], 'filepath': filepath, } From d514320a0afba93b466ed61016bd2aeed6959836 Mon Sep 17 00:00:00 2001 From: Dario Date: Fri, 25 May 2018 13:34:12 +0200 Subject: [PATCH 35/93] Update comment --- medusa/tv/episode.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 2f1fe7258d..4e43b6af90 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -914,7 +914,7 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season 'status': statusStrings[self.status], } ) - # We only change the episode's state if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED + # Update the episode's status/quality if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED elif helpers.is_media_file(self.location): if self.status not in [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED]: self.update_status_quality(self.location) From ca577f29efef4562538875e1d0d8aafe382d40bb Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 27 May 2018 00:20:05 +0200 Subject: [PATCH 36/93] Add GROUP BY, to make the transition queries a little bit more efficient. --- medusa/databases/main_db.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index b1a085c953..b58ca69aa0 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -849,7 +849,7 @@ def shift_tv_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in tv_shows one place to the left.') - sql_results = self.connection.select("SELECT quality FROM tv_shows") + sql_results = self.connection.select("SELECT quality FROM tv_shows GROUP BY quality") for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 @@ -865,7 +865,7 @@ def shift_episode_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in tv_episodes one place to the left.') - sql_results = self.connection.select("SELECT quality FROM tv_episodes WHERE quality != 0") + sql_results = self.connection.select("SELECT quality FROM tv_episodes WHERE quality != 0 GROUP BY quality") for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 @@ -881,7 +881,7 @@ def shift_history_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in history one place to the left.') - sql_results = self.connection.select("SELECT quality FROM history") + sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality") for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 From 6aa5a9476b9fc3b99974d95797399afe028a6e11 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 27 May 2018 00:51:38 +0200 Subject: [PATCH 37/93] Transform failed.db/history: old_status to status + quality fields --- medusa/databases/failed_db.py | 36 +++++++++++++++++++++++++++++++++++ medusa/failed_history.py | 10 +++++----- 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index 92b4721580..249cab23a0 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -111,3 +111,39 @@ def execute(self): self.connection.action( 'UPDATE history SET indexer_id = ? WHERE showid = ?', [indexer_id, series_id] ) + +class UpdateHistoryTableQuality(AddIndexerIds): + """ + Add the quality field and separate + """ + def test(self): + """Test if the table history already has the column quality.""" + return self.hasColumn('history', 'quality') + + def execute(self): + """ + Add columns status and quality. + + Translate composite status/quality from old_status to the new fields. + """ + log.info(u'Transforming old_status (composite) to separated fields status + quality.') + self.connection.action('DROP TABLE IF EXISTS new_history;') + + self.connection.action('CREATE TABLE new_history (date NUMERIC, size NUMERIC, release TEXT, provider TEXT, ' + 'status NUMERIC DEFAULT -1, quality NUMERIC DEFAULT 0, showid NUMERIC DEFAULT -1, ' + 'season NUMERIC DEFAULT -1, episode NUMERIC DEFAULT -1, indexer_id NUMERIC)') + + self.connection.action('INSERT INTO new_history (date, size, release, provider, ' + 'status, quality, showid, season, episode, indexer_id) ' + 'SELECT date, size, release, provider, ' + 'old_status, -1, showid, season, episode, indexer_id ' + 'FROM history;') + self.connection.action('DROP TABLE IF EXISTS history;') + self.connection.action('ALTER TABLE new_history RENAME TO history;') + self.connection.action('DROP TABLE IF EXISTS new_history;') + + sql_results = self.connection.select('SELECT status FROM history GROUP BY status') + for result in sql_results: + split = Quality.split_composite_status(result[b'status']) + self.connection.action('UPDATE history SET status = ?, quality = ? WHERE status = ?', + [split.status, split.quality, result[b'status']]) diff --git a/medusa/failed_history.py b/medusa/failed_history.py index ca85003421..5dac98c390 100644 --- a/medusa/failed_history.py +++ b/medusa/failed_history.py @@ -145,7 +145,7 @@ def revert_episode(ep_obj): """Restore the episodes of a failed download to their original state.""" failed_db_con = db.DBConnection('failed.db') sql_results = failed_db_con.select( - 'SELECT episode, old_status ' + 'SELECT episode, status, quality ' 'FROM history ' 'WHERE showid=?' ' AND indexer_id=?' @@ -160,7 +160,7 @@ def revert_episode(ep_obj): (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode))) with ep_obj.lock: if ep_obj.episode in history_eps: - ep_obj.status = history_eps[ep_obj.episode]['old_status'] + ep_obj.status = history_eps[ep_obj.episode]['status'] logger.log(u'Episode have a previous status to revert. Setting it back to {0}'.format (statusStrings[ep_obj.status]), logger.DEBUG) else: @@ -220,10 +220,10 @@ def log_snatch(search_result): failed_db_con.action( 'INSERT INTO history ' '(date, size, release, provider, showid,' - ' season, episode, old_status, indexer_id)' - 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)', + ' season, episode, status, quality, indexer_id)' + 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [log_date, search_result.size, release, provider, show_obj.indexerid, - episode.season, episode.episode, episode.status, show_obj.indexer] + episode.season, episode.episode, episode.status, episode.quality, show_obj.indexer] ) From 0216f5892bf8ffeaab5cb35eb385bfe7c63c44de Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 27 May 2018 16:12:26 +0200 Subject: [PATCH 38/93] Fix bug in failed.db history migration. --- medusa/databases/failed_db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index 249cab23a0..bc68948aaa 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -59,7 +59,7 @@ class HistoryStatus(History): """Store episode status before snatch to revert to if necessary.""" def test(self): - return self.hasColumn('history', 'old_status') + return self.hasColumn('history', 'old_status') and self.hasColumn('history', 'showid') def execute(self): self.addColumn('history', 'old_status', 'NUMERIC', Quality.NA) From f71787f83ccb8cad464e7131ed5c9443c9fe8400 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 27 May 2018 18:44:09 +0200 Subject: [PATCH 39/93] Finish failed_db upgrade --- medusa/databases/failed_db.py | 57 ++++++++++++++++++++++++++++++++-- medusa/databases/main_db.py | 46 ++++++++++++--------------- medusa/databases/utils.py | 24 ++++++++++++++ medusa/db.py | 37 +++++++++++----------- medusa/server/api/v2/config.py | 2 +- 5 files changed, 117 insertions(+), 49 deletions(-) create mode 100644 medusa/databases/utils.py diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index bc68948aaa..fb10f9cf0d 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -6,6 +6,7 @@ from medusa import db from medusa.common import Quality +from medusa.databases import utils from medusa.logger.adapters.style import BraceAdapter @@ -59,11 +60,11 @@ class HistoryStatus(History): """Store episode status before snatch to revert to if necessary.""" def test(self): - return self.hasColumn('history', 'old_status') and self.hasColumn('history', 'showid') + return self.hasColumn('history', 'showid') def execute(self): - self.addColumn('history', 'old_status', 'NUMERIC', Quality.NA) self.addColumn('history', 'showid', 'NUMERIC', '-1') + self.addColumn('history', 'old_status', 'NUMERIC', Quality.NA) self.addColumn('history', 'season', 'NUMERIC', '-1') self.addColumn('history', 'episode', 'NUMERIC', '-1') @@ -112,15 +113,23 @@ def execute(self): 'UPDATE history SET indexer_id = ? WHERE showid = ?', [indexer_id, series_id] ) + class UpdateHistoryTableQuality(AddIndexerIds): """ - Add the quality field and separate + Add the quality field and separate status from quality """ + def test(self): """Test if the table history already has the column quality.""" return self.hasColumn('history', 'quality') def execute(self): + utils.backup_database(self.connection.path, self.connection.version) + + self.translate_status() + self.inc_major_version() + + def translate_status(self): """ Add columns status and quality. @@ -147,3 +156,45 @@ def execute(self): split = Quality.split_composite_status(result[b'status']) self.connection.action('UPDATE history SET status = ?, quality = ? WHERE status = ?', [split.status, split.quality, result[b'status']]) + + def inc_major_version(self): + major_version, minor_version = self.connection.version + major_version += 1 + self.connection.action("UPDATE db_version SET db_version = ?", [major_version]) + return self.connection.version + + +class ShiftQualities(UpdateHistoryTableQuality): + """Shift all qualities one place to the left.""" + + def test(self): + """Test if the version is at least 3.""" + return self.connection.version >= (3, None) + + def execute(self): + utils.backup_database(self.connection.path, self.connection.version) + + self.shift_history_qualities() + self.update_status_unknown() + self.inc_major_version() + + def shift_history_qualities(self): + """ + Shift all qualities << 1. + + This makes it possible to set UNKNOWN as 1, making it the lowest quality. + """ + log.info('Shift qualities in history one place to the left.') + sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality") + for result in sql_results: + quality = result[b'quality'] + new_quality = quality << 1 + self.connection.select( + "UPDATE history SET quality = ? WHERE quality = ?", + [new_quality, quality] + ) + + def update_status_unknown(self): + """Changes any `UNKNOWN` quality to 1.""" + log.info(u'Update status UNKONWN from tv_episodes') + self.connection.select("UPDATE history SET quality = 1 WHERE quality = 65536") diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index b58ca69aa0..7d7258c68f 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -7,7 +7,8 @@ import sys import warnings -from medusa import common, db, helpers, subtitles +from medusa import common, db, subtitles +from medusa.databases import utils from medusa.helper.common import dateTimeFormat from medusa.indexers.indexer_config import STATUS_MAP from medusa.logger.adapters.style import BraceAdapter @@ -185,11 +186,11 @@ def fix_missing_table_indexes(self): def fix_unaired_episodes(self): - curDate = datetime.date.today() + cur_date = datetime.date.today() sql_results = self.connection.select( "SELECT episode_id FROM tv_episodes WHERE (airdate > ? or airdate = 1) AND status in (?, ?) AND season > 0", - [curDate.toordinal(), common.SKIPPED, common.WANTED]) + [cur_date.toordinal(), common.SKIPPED, common.WANTED]) for cur_unaired in sql_results: log.info(u'Fixing unaired episode status for episode_id: {0!s}', @@ -284,15 +285,6 @@ def update_status_unknown(self): self.connection.select("UPDATE tv_episodes SET quality = 1 WHERE quality = 65536") -def backupDatabase(version): - log.info(u'Backing up database before upgrade') - if not helpers.backup_versioned_file(db.dbFilename(), version): - log.error(u'Database backup failed, abort upgrading database') - sys.exit(1) - else: - log.info(u'Proceeding with upgrade') - - # ====================== # = Main DB Migrations = # ====================== @@ -357,7 +349,7 @@ def test(self): return self.checkDBVersion() >= 40 def execute(self): - backupDatabase(self.checkDBVersion()) + utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Adding column version to tv_episodes and history') self.addColumn("tv_episodes", "version", "NUMERIC", "-1") @@ -372,7 +364,7 @@ def test(self): return self.checkDBVersion() >= 41 def execute(self): - backupDatabase(self.checkDBVersion()) + utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Adding column default_ep_status to tv_shows') self.addColumn("tv_shows", "default_ep_status", "NUMERIC", "-1") @@ -385,7 +377,7 @@ def test(self): return self.checkDBVersion() >= 42 def execute(self): - backupDatabase(self.checkDBVersion()) + utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Converting column indexer and default_ep_status field types to numeric') self.connection.action("DROP TABLE IF EXISTS tmp_tv_shows") @@ -418,7 +410,7 @@ def inc_minor_version(self): return self.connection.version def execute(self): - backupDatabase(self.checkDBVersion()) + utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Add minor version numbers to database') self.addColumn(b'db_version', b'db_minor_version') @@ -445,7 +437,7 @@ def execute(self): """ Updates the version until 44.1 """ - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Test major and minor version updates database') self.inc_major_version() @@ -467,7 +459,7 @@ def execute(self): """ Updates the version until 44.2 and adds proper_tags column """ - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) if not self.hasColumn('history', 'proper_tags'): log.info(u'Adding column proper_tags to history') @@ -493,7 +485,7 @@ def execute(self): """ Updates the version until 44.3 and adds manually_searched columns """ - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) if not self.hasColumn('history', 'manually_searched'): log.info(u'Adding column manually_searched to history') @@ -519,7 +511,7 @@ def test(self): return self.connection.version >= (44, 4) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column info_hash in history') if not self.hasColumn("history", "info_hash"): @@ -537,7 +529,7 @@ def test(self): return self.connection.version >= (44, 5) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column plot in imdb_info') if not self.hasColumn('imdb_info', 'plot'): @@ -559,7 +551,7 @@ def test(self): return self.connection.version >= (44, 6) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u"Adding column size in history") if not self.hasColumn("history", "size"): @@ -576,7 +568,7 @@ def test(self): return self.connection.version >= (44, 7) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding PK to mindexer column in indexer_mapping table') self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") @@ -598,7 +590,7 @@ def test(self): return self.connection.version >= (44, 8) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Make indexer and indexer_id as INTEGER in tv_episodes table') self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") @@ -632,7 +624,7 @@ def test(self): return self.connection.version >= (44, 9) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column indexer_id in history') if not self.hasColumn('history', 'indexer_id'): @@ -740,7 +732,7 @@ def test(self): return self.connection.version >= (44, 10) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Dropping the unique index on idx_sta_epi_air') self.connection.action('DROP INDEX IF EXISTS idx_sta_epi_air') @@ -835,7 +827,7 @@ def test(self): return self.connection.version >= (44, 11) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) self.shift_tv_qualities() self.shift_episode_qualities() diff --git a/medusa/databases/utils.py b/medusa/databases/utils.py new file mode 100644 index 0000000000..e14bdadf44 --- /dev/null +++ b/medusa/databases/utils.py @@ -0,0 +1,24 @@ +# coding=utf-8 + +"""General database utility functions.""" +from __future__ import unicode_literals + +import logging +import sys + +from medusa import helpers +from medusa.logger.adapters.style import BraceAdapter + + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + + +def backup_database(path, version): + """Back up the database.""" + log.info('Backing up database before upgrade') + if not helpers.backup_versioned_file(path, version): + log.error('Database backup failed, abort upgrading database') + sys.exit(1) + else: + log.info('Proceeding with upgrade') diff --git a/medusa/db.py b/medusa/db.py index 2cb928c2c4..1056a997f3 100644 --- a/medusa/db.py +++ b/medusa/db.py @@ -1,6 +1,5 @@ # coding=utf-8 # Author: Nic Wolfe - # # This file is part of Medusa. # @@ -38,20 +37,6 @@ db_locks = {} -def dbFilename(filename=None, suffix=None): - """ - @param filename: The sqlite database filename to use. If not specified, - will be made to be application db file - @param suffix: The suffix to append to the filename. A '.' will be added - automatically, i.e. suffix='v0' will make dbfile.db.v0 - @return: the correct location of the database file. - """ - filename = filename or app.APPLICATION_DB - if suffix: - filename = "%s.%s" % (filename, suffix) - return os.path.join(app.DATA_DIR, filename) - - class DBConnection(object): def __init__(self, filename=None, suffix=None, row_type=None): @@ -63,7 +48,7 @@ def __init__(self, filename=None, suffix=None, row_type=None): if self.filename not in db_cons or not db_cons[self.filename]: db_locks[self.filename] = threading.Lock() - self.connection = sqlite3.connect(dbFilename(self.filename, self.suffix), 20, check_same_thread=False) + self.connection = sqlite3.connect(self.path, 20, check_same_thread=False) self.connection.text_factory = DBConnection._unicode_text_factory db_cons[self.filename] = self.connection @@ -80,11 +65,27 @@ def __init__(self, filename=None, suffix=None, row_type=None): self._set_row_factory() except sqlite3.OperationalError: - logger.log(u'Please check your database owner/permissions: {}'.format(dbFilename(self.filename, self.suffix)), logger.WARNING) + logger.log(u'Please check your database owner/permissions: {}'.format( + self.path, logger.WARNING)) except Exception as e: logger.log(u"DB error: " + ex(e), logger.ERROR) raise + @property + def path(self): + """ + @param filename: The sqlite database filename to use. If not specified, + will be made to be application db file + @param suffix: The suffix to append to the filename. A '.' will be added + automatically, i.e. suffix='v0' will make dbfile.db.v0 + @return: the path to the database file. + """ + filename = self.filename + if self.suffix: + filename = '%s.%s' % (filename, self.suffix) + + return os.path.join(app.DATA_DIR, filename) + def _set_row_factory(self): """ once lock is aquired we can configure the connection for @@ -452,7 +453,7 @@ def restoreDatabase(version): """ from medusa import helpers logger.log(u"Restoring database before trying upgrade again") - if not helpers.restore_versioned_file(dbFilename(suffix='v' + str(version)), version): + if not helpers.restore_versioned_file(DBConnection.path(suffix='v' + str(version)), version): logger.log_error_and_exit(u"Database restore failed, abort upgrading database") return False else: diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 11fb884acb..77dad00afb 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -111,7 +111,7 @@ def get(self, identifier, path_param=None): config_data['localUser'] = app.OS_USER or 'Unknown' config_data['programDir'] = app.PROG_DIR config_data['configFile'] = app.CONFIG_FILE - config_data['dbFilename'] = db.dbFilename() + config_data['dbFilename'] = db.DBConnection.path config_data['cacheDir'] = app.CACHE_DIR config_data['logDir'] = app.LOG_DIR config_data['appArgs'] = app.MY_ARGS From 78e97d8298bde54347a54cae2c4accd530139312 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 27 May 2018 18:53:30 +0200 Subject: [PATCH 40/93] Small fix --- medusa/db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/db.py b/medusa/db.py index 1056a997f3..33c8934f06 100644 --- a/medusa/db.py +++ b/medusa/db.py @@ -453,7 +453,7 @@ def restoreDatabase(version): """ from medusa import helpers logger.log(u"Restoring database before trying upgrade again") - if not helpers.restore_versioned_file(DBConnection.path(suffix='v' + str(version)), version): + if not helpers.restore_versioned_file(DBConnection.path, version): logger.log_error_and_exit(u"Database restore failed, abort upgrading database") return False else: From 61ca57cc2c51510d5b3543ff1c4e43644ab5ca45 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 27 May 2018 21:41:32 +0200 Subject: [PATCH 41/93] Remove unused functions --- medusa/db.py | 16 ---------- medusa/helpers/__init__.py | 61 -------------------------------------- 2 files changed, 77 deletions(-) diff --git a/medusa/db.py b/medusa/db.py index 33c8934f06..220d754353 100644 --- a/medusa/db.py +++ b/medusa/db.py @@ -444,22 +444,6 @@ def prettyName(class_name): return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)]) -def restoreDatabase(version): - """ - Restores a database to a previous version (backup file of version must still exist) - - :param version: Version to restore to - :return: True if restore succeeds, False if it fails - """ - from medusa import helpers - logger.log(u"Restoring database before trying upgrade again") - if not helpers.restore_versioned_file(DBConnection.path, version): - logger.log_error_and_exit(u"Database restore failed, abort upgrading database") - return False - else: - return True - - def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG) diff --git a/medusa/helpers/__init__.py b/medusa/helpers/__init__.py index 2180f75acf..b38885b314 100644 --- a/medusa/helpers/__init__.py +++ b/medusa/helpers/__init__.py @@ -918,67 +918,6 @@ def backup_versioned_file(old_file, version): return True -def restore_versioned_file(backup_file, version): - """Restore a file version to original state. - - For example sickbeard.db.v41 passed with version int(41), will translate back to sickbeard.db. - sickbeard.db.v41. passed with version tuple(41,2), will translate back to sickbeard.db. - - :param backup_file: File to restore - :param version: Version of file to restore - :return: True on success, False on failure - """ - num_tries = 0 - - with suppress(TypeError): - version = '.'.join([str(i) for i in version]) if not isinstance(version, str) else version - - new_file, _ = backup_file[0:backup_file.find(u'v{version}'.format(version=version))] - restore_file = backup_file - - if not os.path.isfile(new_file): - log.debug(u"Not restoring, {file} doesn't exist", {'file': new_file}) - return False - - try: - log.debug(u'Trying to backup {file} to {file}.r{version} before ' - u'restoring backup', {'file': new_file, 'version': version}) - - shutil.move(new_file, new_file + '.' + 'r' + str(version)) - except OSError as error: - log.warning(u'Error while trying to backup DB file {name} before' - u' proceeding with restore: {error!r}', - {'name': restore_file, 'error': error}) - return False - - while not os.path.isfile(new_file): - if not os.path.isfile(restore_file): - log.debug(u'Not restoring, {file} does not exist', - {'file': restore_file}) - break - - try: - log.debug(u'Trying to restore file {old} to {new}', - {'old': restore_file, 'new': new_file}) - shutil.copy(restore_file, new_file) - log.debug(u"Restore done") - break - except OSError as error: - log.warning(u'Error while trying to restore file {name}.' - u' Error: {msg!r}', - {'name': restore_file, 'msg': error}) - num_tries += 1 - time.sleep(1) - log.debug(u'Trying again. Attempt #: {0}', num_tries) - - if num_tries >= 10: - log.warning(u'Unable to restore file {old} to {new}', - {'old': restore_file, 'new': new_file}) - return False - - return True - - def get_lan_ip(): """Return IP of system.""" try: From 087a8fd3b5b2851d4a0c98ff33f068ae5094ae13 Mon Sep 17 00:00:00 2001 From: Dario Date: Mon, 28 May 2018 18:51:21 +0200 Subject: [PATCH 42/93] Clear provider tables for cache.db, lint sql a bit, finish db upgrade --- medusa/databases/cache_db.py | 41 ++++++++++++++-- medusa/databases/failed_db.py | 20 ++++---- medusa/databases/main_db.py | 89 +++++++++++++++++++---------------- medusa/db.py | 1 + 4 files changed, 95 insertions(+), 56 deletions(-) diff --git a/medusa/databases/cache_db.py b/medusa/databases/cache_db.py index 48d0dc0fce..5fdbb0da03 100644 --- a/medusa/databases/cache_db.py +++ b/medusa/databases/cache_db.py @@ -3,6 +3,7 @@ from __future__ import unicode_literals from medusa import db +from medusa.databases import utils # Add new migrations at the bottom of the list @@ -15,7 +16,8 @@ def execute(self): queries = [ ("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);",), ("CREATE TABLE lastSearch (provider TEXT, time NUMERIC);",), - ("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER, show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);",), + ("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER," + " show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);",), ("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);",), ("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);",), ("CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER);",), @@ -94,8 +96,10 @@ def test(self): def execute(self): self.connection.action("DROP TABLE IF EXISTS tmp_scene_exceptions;") self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions;") - self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER, show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);") - self.connection.action("INSERT INTO scene_exceptions SELECT exception_id, tvdb_id as indexer_id, show_name, season, custom FROM tmp_scene_exceptions;") + self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER," + " show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);") + self.connection.action("INSERT INTO scene_exceptions SELECT exception_id, tvdb_id as indexer_id, show_name," + " season, custom FROM tmp_scene_exceptions;") self.connection.action("DROP TABLE tmp_scene_exceptions;") @@ -155,4 +159,33 @@ def execute(self): self.addColumn('scene_names', 'indexer', 'NUMERIC', -1) # clean up null values from the scene_exceptions_table - self.connection.action("DELETE FROM scene_exceptions WHERE indexer = '' or indexer is null;") + self.connection.action("DELETE FROM scene_exceptions WHERE indexer = '' OR indexer IS NULL;") + + +class ClearProviderTables(AddIndexerIds): + """Clear provider cache items by deleting their tables.""" + + def test(self): + """Test if the version is at least 2.""" + return self.connection.version >= (2, None) + + def execute(self): + utils.backup_database(self.connection.path, self.connection.version) + + self.clear_provider_tables() + self.inc_major_version() + + def clear_provider_tables(self): + providers = self.connection.select( + "SELECT name FROM sqlite_master WHERE type='table' AND name NOT IN ('lastUpdate'," + " 'lastSearch', 'scene_names', 'network_timezones', 'scene_exceptions_refresh'," + " 'db_version', 'scene_exceptions', 'last_update');") + + for provider in providers: + self.connection.action("DELETE FROM {name};".format(name=provider[b'name'])) + + def inc_major_version(self): + major_version, minor_version = self.connection.version + major_version += 1 + self.connection.action("UPDATE db_version SET db_version = ?;", [major_version]) + return self.connection.version diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index fb10f9cf0d..09683ae64d 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -94,7 +94,7 @@ def execute(self): if series[b'indexer_id'] not in series_dict: series_dict[series[b'indexer_id']] = series[b'indexer'] - query = 'SELECT showid FROM history WHERE indexer_id is null' + query = 'SELECT showid FROM history WHERE indexer_id IS NULL' results = self.connection.select(query) if not results: return @@ -115,9 +115,7 @@ def execute(self): class UpdateHistoryTableQuality(AddIndexerIds): - """ - Add the quality field and separate status from quality - """ + """Add the quality field and separate status from quality.""" def test(self): """Test if the table history already has the column quality.""" @@ -151,16 +149,16 @@ def translate_status(self): self.connection.action('ALTER TABLE new_history RENAME TO history;') self.connection.action('DROP TABLE IF EXISTS new_history;') - sql_results = self.connection.select('SELECT status FROM history GROUP BY status') + sql_results = self.connection.select('SELECT status FROM history GROUP BY status;') for result in sql_results: split = Quality.split_composite_status(result[b'status']) - self.connection.action('UPDATE history SET status = ?, quality = ? WHERE status = ?', + self.connection.action('UPDATE history SET status = ?, quality = ? WHERE status = ?;', [split.status, split.quality, result[b'status']]) def inc_major_version(self): major_version, minor_version = self.connection.version major_version += 1 - self.connection.action("UPDATE db_version SET db_version = ?", [major_version]) + self.connection.action("UPDATE db_version SET db_version = ?;", [major_version]) return self.connection.version @@ -185,16 +183,16 @@ def shift_history_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in history one place to the left.') - sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality") + sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality ORDER BY quality DESC;") for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 - self.connection.select( - "UPDATE history SET quality = ? WHERE quality = ?", + self.connection.action( + "UPDATE history SET quality = ? WHERE quality = ?;", [new_quality, quality] ) def update_status_unknown(self): """Changes any `UNKNOWN` quality to 1.""" log.info(u'Update status UNKONWN from tv_episodes') - self.connection.select("UPDATE history SET quality = 1 WHERE quality = 65536") + self.connection.action("UPDATE history SET quality = 1 WHERE quality = 65536;") diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 7d7258c68f..0c74181748 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -56,9 +56,9 @@ def clean_null_indexer_mappings(self): def update_old_propers(self): # This is called once when we create proper_tags columns log.debug(u'Checking for old propers without proper tags') - query = "SELECT resource FROM history WHERE (proper_tags is null or proper_tags is '') " + \ + query = "SELECT resource FROM history WHERE (proper_tags IS NULL OR proper_tags = '') " + \ "AND (action LIKE '%2' OR action LIKE '%9') AND " + \ - "(resource LIKE '%REPACK%' or resource LIKE '%PROPER%' or resource LIKE '%REAL%')" + "(resource LIKE '%REPACK%' OR resource LIKE '%PROPER%' OR resource LIKE '%REAL%')" sql_results = self.connection.select(query) if sql_results: for sql_result in sql_results: @@ -148,7 +148,9 @@ def fix_duplicate_episodes(self): def fix_orphan_episodes(self): sql_results = self.connection.select( - "SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL") + "SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes" + " LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id" + " WHERE tv_shows.indexer_id IS NULL;") for cur_orphan in sql_results: log.debug(u'Orphan episode detected! episode_id: {0!s}' @@ -160,28 +162,23 @@ def fix_orphan_episodes(self): def fix_missing_table_indexes(self): if not self.connection.select("PRAGMA index_info('idx_tv_episodes_showid_airdate')"): - log.info(u'Missing idx_tv_episodes_showid_airdate for TV Episodes' - u' table detected!, fixing...') + log.info(u'Missing idx_tv_episodes_showid_airdate for TV Episodes table detected, fixing...') self.connection.action("CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid, airdate);") if not self.connection.select("PRAGMA index_info('idx_showid')"): - log.info(u'Missing idx_showid for TV Episodes table detected!,' - u' fixing...') + log.info(u'Missing idx_showid for TV Episodes table detected, fixing...') self.connection.action("CREATE INDEX idx_showid ON tv_episodes (showid);") if not self.connection.select("PRAGMA index_info('idx_status')"): - log.info(u'Missing idx_status for TV Episodes table detected!,' - u' fixing...') + log.info(u'Missing idx_status for TV Episodes table detected, fixing...') self.connection.action("CREATE INDEX idx_status ON tv_episodes (status, quality, season, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_air')"): - log.info(u'Missing idx_sta_epi_air for TV Episodes table' - u' detected!, fixing...') + log.info(u'Missing idx_sta_epi_air for TV Episodes table detected, fixing...') self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status, quality, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_sta_air')"): - log.info(u'Missing idx_sta_epi_sta_air for TV Episodes table' - u' detected!, fixing...') + log.info(u'Missing idx_sta_epi_sta_air for TV Episodes table detected, fixing...') self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, quality, airdate)") def fix_unaired_episodes(self): @@ -189,7 +186,7 @@ def fix_unaired_episodes(self): cur_date = datetime.date.today() sql_results = self.connection.select( - "SELECT episode_id FROM tv_episodes WHERE (airdate > ? or airdate = 1) AND status in (?, ?) AND season > 0", + "SELECT episode_id FROM tv_episodes WHERE (airdate > ? OR airdate = 1) AND status in (?, ?) AND season > 0", [cur_date.toordinal(), common.SKIPPED, common.WANTED]) for cur_unaired in sql_results: @@ -259,7 +256,7 @@ def fix_subtitles_codes(self): [','.join(langs), datetime.datetime.now().strftime(dateTimeFormat), sql_result[b'episode_id']]) def fix_show_nfo_lang(self): - self.connection.action("UPDATE tv_shows SET lang = '' WHERE lang = 0 or lang = '0'") + self.connection.action("UPDATE tv_shows SET lang = '' WHERE lang = 0 OR lang = '0';") def fix_status_qualities(self): """ @@ -270,19 +267,19 @@ def fix_status_qualities(self): Until then this can be used to keep the DB sane. """ log.info(u'Convert composite statuses in tv_episodes to status + quality.') - sql_results = self.connection.select("SELECT status FROM tv_episodes where status > 12 GROUP BY status") + sql_results = self.connection.select("SELECT status FROM tv_episodes where status > 12 GROUP BY status;") for status in sql_results: log.info(u'Split composite status into status and quality for %s', status[b'status']) split = common.Quality.split_composite_status(status[b'status']) - self.connection.select( - "UPDATE tv_episodes SET status = ?, quality = ? WHERE status = ?", + self.connection.action( + "UPDATE tv_episodes SET status = ?, quality = ? WHERE status = ?;", [split.status, split.quality, status[b'status']] ) def update_status_unknown(self): """Changes any `UNKNOWN` quality to 1.""" log.info(u'Update status UNKONWN from tv_episodes') - self.connection.select("UPDATE tv_episodes SET quality = 1 WHERE quality = 65536") + self.connection.action("UPDATE tv_episodes SET quality = 1 WHERE quality = 65536;") # ====================== @@ -382,7 +379,14 @@ def execute(self): log.info(u'Converting column indexer and default_ep_status field types to numeric') self.connection.action("DROP TABLE IF EXISTS tmp_tv_shows") self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") - self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, default_ep_status NUMERIC)") + self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC," + " indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT," + " classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT," + " flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC," + " lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT," + " last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC," + " rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC," + " scene NUMERIC, default_ep_status NUMERIC)") self.connection.action("INSERT INTO tv_shows SELECT * FROM tmp_tv_shows") self.connection.action("DROP TABLE tmp_tv_shows") @@ -400,13 +404,15 @@ def inc_major_version(self): major_version, minor_version = self.connection.version major_version += 1 minor_version = 0 - self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?", [major_version, minor_version]) + self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?;", + [major_version, minor_version]) return self.connection.version def inc_minor_version(self): major_version, minor_version = self.connection.version minor_version += 1 - self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?", [major_version, minor_version]) + self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?;", + [major_version, minor_version]) return self.connection.version def execute(self): @@ -699,7 +705,7 @@ def create_series_dict(): migration_config[0], migration_config[1] ) - query = 'SELECT {config[1]} FROM {config[0]} WHERE {config[2]} is null'.format(config=migration_config) + query = 'SELECT {config[1]} FROM {config[0]} WHERE {config[2]} IS NULL'.format(config=migration_config) results = self.connection.select(query) if not results: continue @@ -735,13 +741,13 @@ def execute(self): utils.backup_database(self.connection.path, self.connection.version) log.info(u'Dropping the unique index on idx_sta_epi_air') - self.connection.action('DROP INDEX IF EXISTS idx_sta_epi_air') + self.connection.action('DROP INDEX IF EXISTS idx_sta_epi_air;') log.info(u'Dropping the unique index on idx_sta_epi_air') - self.connection.action('DROP INDEX IF EXISTS idx_sta_epi_sta_air') + self.connection.action('DROP INDEX IF EXISTS idx_sta_epi_sta_air;') log.info(u'Dropping the unique index on idx_status') - self.connection.action('DROP INDEX IF EXISTS idx_status') + self.connection.action('DROP INDEX IF EXISTS idx_status;') log.info(u'Adding new ep_status and ep_quality fields in the tv_episodes table') self.connection.action('DROP TABLE IF EXISTS new_tv_episodes;') @@ -753,7 +759,7 @@ def execute(self): 'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, ' 'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, ' 'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC, ' - 'ep_status NUMERIC, ep_quality NUMERIC)') + 'ep_status NUMERIC, ep_quality NUMERIC);') self.connection.action('INSERT INTO new_tv_episodes (showid, indexerid, indexer, ' 'name, season, episode,description, airdate, hasnfo, ' @@ -773,10 +779,10 @@ def execute(self): self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") log.info(u'Split composite status into ep_status and ep_quality') - sql_results = self.connection.select("SELECT status from tv_episodes GROUP BY status") + sql_results = self.connection.select("SELECT status from tv_episodes GROUP BY status;") for status in sql_results: split = common.Quality.split_composite_status(status[b'status']) - self.connection.action("UPDATE tv_episodes SET ep_status = ?, ep_quality = ? WHERE status = ?", + self.connection.action("UPDATE tv_episodes SET ep_status = ?, ep_quality = ? WHERE status = ?;", [split.status, split.quality, status[b'status']]) # Remove ep_status and ep_quality and add quality field. @@ -790,7 +796,7 @@ def execute(self): 'hastbn NUMERIC, status NUMERIC, quality NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, ' 'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, ' 'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, ' - 'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC)') + 'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC);') self.connection.action('INSERT INTO new_tv_episodes (showid, indexerid, indexer, ' 'name, season, episode, description, airdate, hasnfo, ' @@ -810,10 +816,10 @@ def execute(self): self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") log.info(u'Remove the quality from the action field, as this is a composite status') - sql_results = self.connection.select("SELECT action FROM history GROUP BY action") + sql_results = self.connection.select("SELECT action FROM history GROUP BY action;") for status in sql_results: split = common.Quality.split_composite_status(status[b'action']) - self.connection.action("UPDATE history SET action = ? WHERE action = ?", + self.connection.action("UPDATE history SET action = ? WHERE action = ?;", [split.status, status[b'action']]) self.inc_minor_version() @@ -841,12 +847,12 @@ def shift_tv_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in tv_shows one place to the left.') - sql_results = self.connection.select("SELECT quality FROM tv_shows GROUP BY quality") + sql_results = self.connection.select("SELECT quality FROM tv_shows GROUP BY quality ORDER BY quality DESC;") for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 - self.connection.select( - "UPDATE tv_shows SET quality = ? WHERE quality = ?", + self.connection.action( + "UPDATE tv_shows SET quality = ? WHERE quality = ?;", [new_quality, quality] ) @@ -857,12 +863,13 @@ def shift_episode_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in tv_episodes one place to the left.') - sql_results = self.connection.select("SELECT quality FROM tv_episodes WHERE quality != 0 GROUP BY quality") + sql_results = self.connection.select("SELECT quality FROM tv_episodes WHERE quality != 0 GROUP BY quality" + " ORDER BY quality DESC;") for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 - self.connection.select( - "UPDATE tv_episodes SET quality = ? WHERE quality = ?", + self.connection.action( + "UPDATE tv_episodes SET quality = ? WHERE quality = ?;", [new_quality, quality] ) @@ -873,11 +880,11 @@ def shift_history_qualities(self): This makes it possible to set UNKNOWN as 1, making it the lowest quality. """ log.info('Shift qualities in history one place to the left.') - sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality") + sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality ORDER BY quality DESC;") for result in sql_results: quality = result[b'quality'] new_quality = quality << 1 - self.connection.select( - "UPDATE history SET quality = ? WHERE quality = ?", + self.connection.action( + "UPDATE history SET quality = ? WHERE quality = ?;", [new_quality, quality] ) diff --git a/medusa/db.py b/medusa/db.py index 220d754353..ee1cff6cc4 100644 --- a/medusa/db.py +++ b/medusa/db.py @@ -33,6 +33,7 @@ from six import itervalues, text_type + db_cons = {} db_locks = {} From bfb6d322330c214670e9908155f134bc9b0634d1 Mon Sep 17 00:00:00 2001 From: Dario Date: Mon, 28 May 2018 19:16:20 +0200 Subject: [PATCH 43/93] UNSET status should always have NA quality --- medusa/common.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index cc4dd0aaa4..56dfc8db20 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -452,7 +452,7 @@ def split_composite_status(status): """ status = int(status) if status == UNSET: - return Quality.composite_status_quality(UNSET, Quality.UNKNOWN) + return Quality.composite_status_quality(UNSET, Quality.NA) for q in sorted(list(Quality.qualityStrings), reverse=True): if status > q * 100: @@ -562,8 +562,6 @@ def scene_quality_from_name(name, quality): def should_search(cur_status, cur_quality, show_obj, manually_searched): """Return true if that episodes should be search for a better quality. - If cur_quality is Quality.UNKNOWN it will return True only if is not in Allowed (Unknown can be in Allowed) - :param cur_status: current status of the episode :param cur_quality: current quality of the episode :param show_obj: Series object of the episode we will check if we should search or not From 2e19f7bd39ae7a6074d626c309d9fcea6cc268fc Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 29 May 2018 03:36:23 +0200 Subject: [PATCH 44/93] Fix APIv1 show.stats --- medusa/server/api/v1/core.py | 52 ++++++++++++++++++++---------------- 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/medusa/server/api/v1/core.py b/medusa/server/api/v1/core.py index 77d952867a..385837f4c2 100644 --- a/medusa/server/api/v1/core.py +++ b/medusa/server/api/v1/core.py @@ -37,8 +37,8 @@ process_tv, sbdatetime, subtitles, ui, ) from medusa.common import ( - ARCHIVED, DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED, SNATCHED_PROPER, - UNAIRED, UNSET, WANTED, statusStrings, + ARCHIVED, DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED, SNATCHED_BEST, + SNATCHED_PROPER, UNAIRED, UNSET, WANTED, statusStrings, ) from medusa.helper.common import ( dateFormat, dateTimeFormat, pretty_file_size, sanitize_filename, @@ -2558,7 +2558,7 @@ def run(self): else: sql_results = main_db_con.select( 'SELECT name, episode, airdate, status, quality, location, file_size, release_name, subtitles' - ' FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? ', + ' FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ?', [INDEXER_TVDBV2, self.indexerid, self.season]) if not sql_results: return _responds(RESULT_FAILURE, msg='Season not found') @@ -2658,19 +2658,18 @@ def run(self): # show stats episode_status_counts_total = {'total': 0} for status_code in statusStrings: - if status_code in [UNSET, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED]: - continue - episode_status_counts_total[status_code] = 0 + if status_code not in (UNSET, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED): + episode_status_counts_total[status_code] = 0 # add all the downloaded qualities episode_qualities_counts_download = {'total': 0} for status_code in (DOWNLOADED, ARCHIVED): - episode_qualities_counts_download[status_code] = 0 + episode_qualities_counts_download[status_code] = {} # add all snatched qualities episode_qualities_counts_snatch = {'total': 0} for status_code in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): - episode_qualities_counts_snatch[status_code] = 0 + episode_qualities_counts_snatch[status_code] = {} main_db_con = db.DBConnection(row_type='dict') sql_results = main_db_con.select('SELECT status, quality, season FROM tv_episodes ' @@ -2682,14 +2681,21 @@ def run(self): status, quality = int(row[b'status']), int(row[b'quality']) episode_status_counts_total['total'] += 1 - episode_status_counts_total[status][quality] += 1 if status in (DOWNLOADED, ARCHIVED): episode_qualities_counts_download['total'] += 1 - episode_qualities_counts_download[status][quality] += 1 + if quality not in episode_qualities_counts_download[status]: + episode_qualities_counts_download[status][quality] = 1 + else: + episode_qualities_counts_download[status][quality] += 1 elif status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): episode_qualities_counts_snatch['total'] += 1 - episode_qualities_counts_snatch[status][quality] += 1 + if quality not in episode_qualities_counts_snatch[status]: + episode_qualities_counts_snatch[status][quality] = 1 + else: + episode_qualities_counts_snatch[status][quality] += 1 + elif status not in (UNSET, ): + episode_status_counts_total[status] += 1 # the outgoing container episodes_stats = {'downloaded': {}} @@ -2698,25 +2704,25 @@ def run(self): if status == 'total': episodes_stats['downloaded']['total'] = episode_qualities_counts_download[status] continue - quality = episode_qualities_counts_download[status] - quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_') - episodes_stats['downloaded'][quality_string] = episode_qualities_counts_download[status] + for quality in episode_qualities_counts_download[status]: + quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_') + if quality_string not in episodes_stats['downloaded']: + episodes_stats['downloaded'][quality_string] = episode_qualities_counts_download[status][quality] + else: + episodes_stats['downloaded'][quality_string] += episode_qualities_counts_download[status][quality] episodes_stats['snatched'] = {} - # turning codes into strings - # and combining proper and normal for status in episode_qualities_counts_snatch: if status == 'total': episodes_stats['snatched']['total'] = episode_qualities_counts_snatch[status] continue - quality = episode_qualities_counts_download[status] - quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_') - if Quality.qualityStrings[quality] in episodes_stats['snatched']: - episodes_stats['snatched'][quality_string] += episode_qualities_counts_snatch[status] - else: - episodes_stats['snatched'][quality_string] = episode_qualities_counts_snatch[status] + for quality in episode_qualities_counts_snatch[status]: + quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_') + if quality_string not in episodes_stats['snatched']: + episodes_stats['snatched'][quality_string] = episode_qualities_counts_snatch[status][quality] + else: + episodes_stats['snatched'][quality_string] += episode_qualities_counts_snatch[status][quality] - # episodes_stats["total"] = {} for status in episode_status_counts_total: if status == 'total': episodes_stats['total'] = episode_status_counts_total[status] From ada351f21435234771f46a65d481d2329e71a9c7 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 29 May 2018 11:13:38 +0200 Subject: [PATCH 45/93] Fix most tests --- tests/apiv2/test_config.py | 2 +- tests/conftest.py | 2 +- tests/test_should_process.py | 7 +- tests/test_should_replace.py | 4 +- tests/test_should_search.py | 68 ++++++++---- tests/test_tvepisode_refiner.py | 2 +- ...tatus.py => test_update_status_quality.py} | 101 +++++++++++------- 7 files changed, 114 insertions(+), 72 deletions(-) rename tests/{test_update_status.py => test_update_status_quality.py} (58%) diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index ecbf9403e4..ac867b798a 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -52,7 +52,7 @@ def config(monkeypatch, app_config): config_data['localUser'] = os_user config_data['programDir'] = app.PROG_DIR config_data['configFile'] = app.CONFIG_FILE - config_data['dbFilename'] = db.dbFilename() + config_data['dbFilename'] = db.DBConnection.path config_data['cacheDir'] = app.CACHE_DIR config_data['logDir'] = app.LOG_DIR config_data['appArgs'] = app.MY_ARGS diff --git a/tests/conftest.py b/tests/conftest.py index 7241fd3864..577262dc83 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -98,7 +98,7 @@ def tvshow(create_tvshow): @pytest.fixture def tvepisode(tvshow, create_tvepisode): return create_tvepisode(series=tvshow, season=3, episode=4, indexer=34, file_size=1122334455, - name='Episode Title', status=Quality.composite_status(DOWNLOADED, Quality.FULLHDBLURAY), + name='Episode Title', status=DOWNLOADED, quality=Quality.FULLHDBLURAY, release_group='SuperGroup') diff --git a/tests/test_should_process.py b/tests/test_should_process.py index 90a615454a..4ef0b6097d 100644 --- a/tests/test_should_process.py +++ b/tests/test_should_process.py @@ -2,6 +2,7 @@ """Tests for medusa/test_should_process.py.""" from medusa.common import Quality from medusa.post_processor import PostProcessor + import pytest @@ -55,12 +56,12 @@ 'preferred_qualities': [Quality.HDTV], 'expected': True }, - { # p7: Current quality is NONE: no - 'cur_quality': Quality.NONE, + { # p7: Current quality is NA: yes + 'cur_quality': Quality.NA, 'new_quality': Quality.HDTV, 'allowed_qualities': [Quality.HDWEBDL], 'preferred_qualities': [Quality.HDTV], - 'expected': False + 'expected': True }, ]) def test_should_process(p): diff --git a/tests/test_should_replace.py b/tests/test_should_replace.py index f342690686..181a08e777 100644 --- a/tests/test_should_replace.py +++ b/tests/test_should_replace.py @@ -414,9 +414,9 @@ 'manually_searched': False, 'expected': False }, - { # p37: Current quality is NONE: yes + { # p37: Current quality is NA: yes 'ep_status': SNATCHED, - 'cur_quality': Quality.NONE, + 'cur_quality': Quality.NA, 'new_quality': Quality.HDTV, 'allowed_qualities': [Quality.SDTV], 'preferred_qualities': [Quality.HDTV], diff --git a/tests/test_should_search.py b/tests/test_should_search.py index 3e039b38bf..9d137ea593 100644 --- a/tests/test_should_search.py +++ b/tests/test_should_search.py @@ -21,7 +21,8 @@ def _load_from_db(self): @pytest.mark.parametrize('p', [ { # p0: Downloaded a quality not in quality system : yes - 'status': Quality.composite_status(DOWNLOADED, Quality.SDTV), + 'status': DOWNLOADED, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -29,7 +30,8 @@ def _load_from_db(self): 'expected': True }, { # p1: Current status is SKIPPED: no - 'status': Quality.composite_status(SKIPPED, None), + 'status': SKIPPED, + 'quality': Quality.NA, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -37,7 +39,8 @@ def _load_from_db(self): 'expected': False }, { # p2: Current status is IGNORED: no - 'status': Quality.composite_status(IGNORED, None), + 'status': IGNORED, + 'quality': Quality.NA, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -45,7 +48,8 @@ def _load_from_db(self): 'expected': False }, { # p3: Current status is SNATCHED_BEST: no - 'status': Quality.composite_status(SNATCHED_BEST, Quality.HDWEBDL), + 'status': SNATCHED_BEST, + 'quality': Quality.HDWEBDL, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -53,7 +57,8 @@ def _load_from_db(self): 'expected': False }, { # p4: Current status is SNATCHED: yes - 'status': Quality.composite_status(SNATCHED, Quality.HDTV), + 'status': SNATCHED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -61,7 +66,8 @@ def _load_from_db(self): 'expected': True }, { # p5: Current status is SNATCHED_PROPER: yes - 'status': Quality.composite_status(SNATCHED_PROPER, Quality.HDTV), + 'status': SNATCHED_PROPER, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -69,7 +75,8 @@ def _load_from_db(self): 'expected': True }, { # p6: Status is DOWNLOADED: yes - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -77,7 +84,8 @@ def _load_from_db(self): 'expected': True }, { # p7: Status is ARCHIVED: no - 'status': Quality.composite_status(ARCHIVED, Quality.HDTV), + 'status': ARCHIVED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -85,7 +93,8 @@ def _load_from_db(self): 'expected': False }, { # p8: Status WANTED: yes - 'status': Quality.composite_status(WANTED, None), + 'status': WANTED, + 'quality': Quality.NA, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDWEBDL], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -93,7 +102,8 @@ def _load_from_db(self): 'expected': True }, { # p9: Episode was manually searched by user: no - 'status': Quality.composite_status(DOWNLOADED, Quality.HDBLURAY), + 'status': DOWNLOADED, + 'quality': Quality.HDBLURAY, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -101,7 +111,8 @@ def _load_from_db(self): 'expected': False }, { # p10: Downloaded an Allowed quality. Preferred not set: no - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [])), # Preferred Qualities @@ -109,7 +120,8 @@ def _load_from_db(self): 'expected': False }, { # p11: Downloaded an Allowed quality but Preferred set: yes - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -117,7 +129,8 @@ def _load_from_db(self): 'expected': True }, { # p12: Downloaded an Preferred quality. Allowed not set: no - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([], # Allowed Qualities [Quality.SDTV, Quality.HDTV])), # Preferred Qualities @@ -125,7 +138,8 @@ def _load_from_db(self): 'expected': False }, { # p13: Already have Preferred quality: no - 'status': Quality.composite_status(SNATCHED, Quality.HDBLURAY), + 'status': SNATCHED, + 'quality': Quality.HDBLURAY, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -133,7 +147,8 @@ def _load_from_db(self): 'expected': False }, { # p14: ´Downloaded UNKNOWN and its on Allowed. Preferred not set: no - 'status': Quality.composite_status(DOWNLOADED, Quality.UNKNOWN), + 'status': DOWNLOADED, + 'quality': Quality.UNKNOWN, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.UNKNOWN, Quality.HDTV], # Allowed Qualities [])), # Preferred Qualities @@ -141,15 +156,17 @@ def _load_from_db(self): 'expected': False }, { # p15: ´Downloaded UNKNOWN and its not on Allowed: yes - 'status': Quality.composite_status(DOWNLOADED, Quality.UNKNOWN), + 'status': DOWNLOADED, + 'quality': Quality.UNKNOWN, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities 'manually_searched': False, 'expected': True }, - { # p16: ´Downloaded NONE (invalid quality): yes - 'status': Quality.composite_status(DOWNLOADED, Quality.NONE), + { # p16: ´Downloaded NA (initial quality): yes + 'status': DOWNLOADED, + 'quality': Quality.NA, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -157,7 +174,8 @@ def _load_from_db(self): 'expected': True }, { # p17: ´SNATCHED BEST but this quality is no longer wanted: yes - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -165,7 +183,8 @@ def _load_from_db(self): 'expected': True }, { # p18: ´SNATCHED BEST but this quality is no longer in preferred but in allowed. Preferred set: yes - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV, Quality.SDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -173,7 +192,8 @@ def _load_from_db(self): 'expected': True }, { # p19: ´SNATCHED BEST but this quality is no longer in preferred but in allowed. Preferred not set: no - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV, Quality.SDTV], # Allowed Qualities [])), # Preferred Qualities @@ -181,7 +201,8 @@ def _load_from_db(self): 'expected': False }, { # p20: ´SNATCHED BEST but this quality is no longer wanted. Preferred not set: yes - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [])), # Preferred Qualities @@ -193,12 +214,13 @@ def test_should_search(p): """Run the test.""" # Given status = p['status'] + quality = p['quality'] show_obj = p['show_obj'] manually_searched = p['manually_searched'] expected = p['expected'] # When - replace, msg = Quality.should_search(status, show_obj, manually_searched) + replace, msg = Quality.should_search(status, quality, show_obj, manually_searched) actual = replace # Then diff --git a/tests/test_tvepisode_refiner.py b/tests/test_tvepisode_refiner.py index 9cda9908e7..b924b371b6 100644 --- a/tests/test_tvepisode_refiner.py +++ b/tests/test_tvepisode_refiner.py @@ -13,7 +13,7 @@ def data(create_tvshow, create_tvepisode): show_year = 2012 tvshow = create_tvshow(indexerid=12, name='{0} ({1})'.format(show_name, show_year), imdb_id='tt0000000') tvepisode = create_tvepisode(series=tvshow, indexer=34, season=3, episode=4, name='Episode Title', - file_size=1122334455, status=Quality.composite_status(DOWNLOADED, Quality.FULLHDBLURAY), + file_size=1122334455, status=DOWNLOADED, quality=Quality.FULLHDBLURAY, release_group='SuperGroup') return { 'tvshow': tvshow, diff --git a/tests/test_update_status.py b/tests/test_update_status_quality.py similarity index 58% rename from tests/test_update_status.py rename to tests/test_update_status_quality.py index d43c39859f..90908d0d6f 100644 --- a/tests/test_update_status.py +++ b/tests/test_update_status_quality.py @@ -1,5 +1,5 @@ # coding=utf-8 -"""Tests for medusa/tv/episode.py:update_status""" +"""Tests for medusa/tv/episode.py:update_status_quality""" from medusa.common import (ARCHIVED, DOWNLOADED, IGNORED, Quality, SKIPPED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, UNAIRED, UNSET, WANTED, statusStrings) @@ -22,122 +22,141 @@ def create(filepath, status, size): @pytest.mark.parametrize('p', [ { # p0: File name and size are the same - 'status': Quality.composite_status(SNATCHED, Quality.SDTV), + 'status': SNATCHED, + 'quality': Quality.SDTV, 'filepath': 'Show.S01E01.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p1: Not a valid media file - 'status': Quality.composite_status(DOWNLOADED, Quality.FULLHDTV), + 'status': DOWNLOADED, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E02.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E02.1080p.HDTV.X264-GROUP.srt', - 'expected': Quality.composite_status(DOWNLOADED, Quality.FULLHDTV) + 'expected': (DOWNLOADED, Quality.FULLHDTV) }, { # p2: File name is the same, different size - 'status': Quality.composite_status(SNATCHED, Quality.SDTV), + 'status': SNATCHED, + 'quality': Quality.SDTV, 'location': 'Show.S01E03.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E03.HDTV.X264-GROUP.mkv', 'new_size': 53, - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p3: File name is different, same size - 'status': Quality.composite_status(DOWNLOADED, Quality.SDTV), + 'status': DOWNLOADED, + 'quality': Quality.SDTV, 'location': 'Show.S01E04.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E04.HDTV.X264-OTHERGROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p4: File name and size are both different - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'location': 'Show.S01E05.720p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E05.720p.HDTV.X264-SOMEOTHERGROUP.mkv', 'new_size': 85, - 'expected': Quality.composite_status(DOWNLOADED, Quality.HDTV) + 'expected': (DOWNLOADED, Quality.HDTV) }, { # p5: No previous file present (location) - 'status': Quality.composite_status(DOWNLOADED, Quality.FULLHDTV), + 'status': DOWNLOADED, + 'quality': Quality.FULLHDTV, 'filepath': 'Show.S01E06.1080p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.FULLHDTV) + 'expected': (ARCHIVED, Quality.FULLHDTV) }, { # p6: Default status and no previous file present (location) 'filepath': 'Show.S01E07.720p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.HDTV) + 'expected': (ARCHIVED, Quality.HDTV) }, { # p7: Snatched and download not finished - 'status': Quality.composite_status(SNATCHED, Quality.FULLHDTV), + 'status': SNATCHED, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E08.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E08.1080p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(SNATCHED, Quality.FULLHDTV) + 'expected': (SNATCHED, Quality.FULLHDTV) }, { # p8: Previous status was Skipped - 'status': Quality.composite_status(SKIPPED, None), + 'status': SKIPPED, + 'quality': Quality.NA, 'filepath': 'Show.S01E09.1080p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.FULLHDTV) + 'expected': (ARCHIVED, Quality.FULLHDTV) }, { # p9: Previous status was Unaired - 'status': Quality.composite_status(UNAIRED, None), + 'status': UNAIRED, + 'quality': Quality.NA, 'filepath': 'Show.S01E10.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.SDTV) + 'expected': (ARCHIVED, Quality.SDTV) }, { # p10: Previous status was Ignored - 'status': Quality.composite_status(IGNORED, None), + 'status': IGNORED, + 'quality': Quality.NA, 'filepath': 'Show.S01E11.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.SDTV) + 'expected': (ARCHIVED, Quality.SDTV) }, { # p11: Previous status was Unset - 'status': Quality.composite_status(UNSET, None), + 'status': UNSET, + 'quality': Quality.NA, 'filepath': 'Show.S01E11.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.SDTV) + 'expected': (ARCHIVED, Quality.SDTV) }, { # p12: Snatched and download is finished - 'status': Quality.composite_status(SNATCHED, Quality.HDTV), + 'status': SNATCHED, + 'quality': Quality.HDTV, 'location': 'Show.S01E12.720p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E12.720p.HDTV.X264-BETTERGROUP.mkv', 'new_size': 29, - 'expected': Quality.composite_status(DOWNLOADED, Quality.HDTV) + 'expected': (DOWNLOADED, Quality.HDTV) }, { # p13: Snatched a Proper and download is finished - 'status': Quality.composite_status(SNATCHED_PROPER, Quality.FULLHDTV), + 'status': SNATCHED_PROPER, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E13.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E13.PROPER.1080p.HDTV.X264-GROUP.mkv', 'new_size': 89, - 'expected': Quality.composite_status(DOWNLOADED, Quality.FULLHDTV) + 'expected': (DOWNLOADED, Quality.FULLHDTV) }, { # p14: Snatched a Proper (Best) and download is finished (higher quality) - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'location': 'Show.S01E14.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E14.720p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.HDTV) + 'expected': (DOWNLOADED, Quality.HDTV) }, { # p15: Snatched a Proper (Best) and download is finished (lower quality) - 'status': Quality.composite_status(SNATCHED_BEST, Quality.FULLHDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E15.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E15.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p16: Previous status was Wanted and no previous file present (location) - 'status': Quality.composite_status(WANTED, None), + 'status': WANTED, + 'quality': Quality.NA, 'filepath': 'Show.S01E16.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p17: Previous status was Wanted - 'status': Quality.composite_status(WANTED, Quality.FULLHDTV), + 'status': WANTED, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E17.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E17.720p.HDTV.X264-GROUP.mkv', 'new_size': 38, - 'expected': Quality.composite_status(ARCHIVED, Quality.HDTV) + 'expected': (ARCHIVED, Quality.HDTV) }, ]) -def test_update_status(p, create_episode, create_file): +def test_update_status_quality(p, create_episode, create_file): """Run the test.""" # Given location = p.get('location') status = p.get('status') episode = create_episode(filepath=location, status=status, size=42) filepath = create_file(p['filepath'], size=p.get('new_size', 42)) - expected = p['expected'] + exp_status, exp_quality = p['expected'] # When - episode.update_status(filepath) - actual = episode.status + episode.update_status_quality(filepath) + actual_status = episode.status + actual_quality = episode.quality # Then - assert statusStrings[expected] == statusStrings[actual] + assert statusStrings[exp_status] == statusStrings[actual_status] + assert Quality.qualityStrings[exp_quality] == Quality.qualityStrings[actual_quality] From 6dc4feab3e9c1ed92da06946c0af54786849e969 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 29 May 2018 17:42:34 +0200 Subject: [PATCH 46/93] Fix remaining tests --- dredd/api-description.yml | 2 +- medusa/common.py | 14 +++++++++----- medusa/server/api/v2/config.py | 2 +- tests/apiv2/test_config.py | 4 ++-- tests/test_should_replace.py | 4 ++-- tests/test_update_status_quality.py | 7 +++++-- themes-default/slim/views/config.mako | 6 +++--- themes/dark/templates/config.mako | 6 +++--- themes/light/templates/config.mako | 6 +++--- 9 files changed, 29 insertions(+), 22 deletions(-) diff --git a/dredd/api-description.yml b/dredd/api-description.yml index 14211fec2d..4c92436c91 100644 --- a/dredd/api-description.yml +++ b/dredd/api-description.yml @@ -1275,7 +1275,7 @@ definitions: properties: enabled: type: boolean - dbFilename: + dbPath: type: string pythonVersion: type: string diff --git a/medusa/common.py b/medusa/common.py index 56dfc8db20..1f247ea9fe 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -492,7 +492,8 @@ def scene_quality_from_name(name, quality): codec = ' DivX' # If any HDTV type or SDTV - if quality in (1, 4, 8, 16, 512, 4096): + if quality in (Quality.SDTV, Quality.HDTV, Quality.RAWHDTV, Quality.FULLHDTV, + Quality.UHD_4K_TV, Quality.UHD_8K_TV): rel_type = ' HDTV' if 'ahdtv' in name: rel_type = ' AHDTV' @@ -507,8 +508,7 @@ def scene_quality_from_name(name, quality): elif 'uhdtv' in name: rel_type = ' UHDTV' - # If SDDVD - if quality == 2: + if quality == Quality.SDDVD: rel_type = ' BDRip' if re.search(r'br(-| |\.)?(rip|mux)', name): rel_type = ' BRRip' @@ -516,7 +516,8 @@ def scene_quality_from_name(name, quality): rel_type = ' DVDRip' # If any WEB type - if quality in (32, 64, 1024, 8192): + if quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD_4K_WEBDL, + Quality.UHD_8K_WEBDL): rel_type = ' WEB' if re.search(r'web(-| |\.)?dl', name): rel_type = ' WEB-DL' @@ -712,6 +713,9 @@ def to_guessit(quality): :return: dict {'screen_size': , 'format': } :rtype: dict (str, str) """ + if quality not in Quality.qualityStrings: + quality = Quality.UNKNOWN + screen_size = Quality.to_guessit_screen_size(quality) fmt = Quality.to_guessit_format(quality) result = dict() @@ -733,7 +737,7 @@ def to_guessit_format(quality): """ for q in Quality.to_guessit_format_list: if quality & q: - key = q & (512 - 1) # 4k formats are bigger than 384 and are not part of ANY* bit set + key = q & (1024 - 1) # 4k formats are bigger than 768 and are not part of ANY* bit set return Quality.combinedQualityStrings.get(key) @staticmethod diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 77dad00afb..4155698a05 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -111,7 +111,7 @@ def get(self, identifier, path_param=None): config_data['localUser'] = app.OS_USER or 'Unknown' config_data['programDir'] = app.PROG_DIR config_data['configFile'] = app.CONFIG_FILE - config_data['dbFilename'] = db.DBConnection.path + config_data['dbPath'] = db.DBConnection().path config_data['cacheDir'] = app.CACHE_DIR config_data['logDir'] = app.LOG_DIR config_data['appArgs'] = app.MY_ARGS diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index ac867b798a..c93e2546fa 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -52,7 +52,7 @@ def config(monkeypatch, app_config): config_data['localUser'] = os_user config_data['programDir'] = app.PROG_DIR config_data['configFile'] = app.CONFIG_FILE - config_data['dbFilename'] = db.DBConnection.path + config_data['dbPath'] = db.DBConnection().path config_data['cacheDir'] = app.CACHE_DIR config_data['logDir'] = app.LOG_DIR config_data['appArgs'] = app.MY_ARGS @@ -151,7 +151,7 @@ def test_config_get(http_client, create_url, auth_headers, config): 'locale', 'localUser', 'githubUrl', - 'dbFilename', + 'dbPath', ]) def test_config_get_detailed(http_client, create_url, auth_headers, config, query): # given diff --git a/tests/test_should_replace.py b/tests/test_should_replace.py index 181a08e777..a85128a014 100644 --- a/tests/test_should_replace.py +++ b/tests/test_should_replace.py @@ -205,7 +205,7 @@ 'manually_searched': False, 'expected': False }, - { # p18: Downloaded Unknown found 720p HDBLURAY: no + { # p18: Downloaded Unknown found 720p HDBLURAY: yes 'ep_status': DOWNLOADED, 'cur_quality': Quality.UNKNOWN, 'new_quality': Quality.HDBLURAY, @@ -214,7 +214,7 @@ 'download_current_quality': False, 'force': False, 'manually_searched': False, - 'expected': False + 'expected': True }, { # p19: Downloaded SDTV (not in quality system) and found 720p HDTV: yes 'ep_status': DOWNLOADED, diff --git a/tests/test_update_status_quality.py b/tests/test_update_status_quality.py index 90908d0d6f..56ffd0c619 100644 --- a/tests/test_update_status_quality.py +++ b/tests/test_update_status_quality.py @@ -8,12 +8,14 @@ @pytest.fixture def create_episode(tvshow, create_tvepisode, create_file): - def create(filepath, status, size): + def create(filepath, status, size, quality): path = create_file(filepath, size=size) if filepath else '' episode = create_tvepisode(tvshow, 2, 14, filepath=path) episode.location = path if status: episode.status = status + if quality: + episode.quality = quality return episode @@ -148,7 +150,8 @@ def test_update_status_quality(p, create_episode, create_file): # Given location = p.get('location') status = p.get('status') - episode = create_episode(filepath=location, status=status, size=42) + quality = p.get('quality') + episode = create_episode(filepath=location, status=status, quality=quality, size=42) filepath = create_file(p['filepath'], size=p.get('new_size', 42)) exp_status, exp_quality = p['expected'] diff --git a/themes-default/slim/views/config.mako b/themes-default/slim/views/config.mako index 8a95086122..59f9dc6688 100644 --- a/themes-default/slim/views/config.mako +++ b/themes-default/slim/views/config.mako @@ -21,7 +21,7 @@ User:{{localUser}} Program Folder:{{programDir}} Config File:{{configFile}} - Database File:{{dbFilename}} + Database File:{{dbPath}} Cache Folder:{{cacheDir}} Log Folder:{{logDir}} Arguments:
{{prettyPrintJSON(appArgs)}}
@@ -50,7 +50,7 @@ const component = { commitHash: undefined, configFile: undefined, databaseVersion: undefined, - dbFilename: undefined, + dbPath: undefined, githubUrl: undefined, locale: undefined, localUser: undefined, @@ -73,7 +73,7 @@ const component = { this.commitHash = data.commitHash; this.configFile = data.configFile; this.databaseVersion = data.databaseVersion; - this.dbFilename = data.dbFilename; + this.dbPath = data.dbPath; this.githubUrl = data.githubUrl; this.locale = data.locale; this.localUser = data.localUser; diff --git a/themes/dark/templates/config.mako b/themes/dark/templates/config.mako index 8a95086122..59f9dc6688 100644 --- a/themes/dark/templates/config.mako +++ b/themes/dark/templates/config.mako @@ -21,7 +21,7 @@ User:{{localUser}} Program Folder:{{programDir}} Config File:{{configFile}} - Database File:{{dbFilename}} + Database File:{{dbPath}} Cache Folder:{{cacheDir}} Log Folder:{{logDir}} Arguments:
{{prettyPrintJSON(appArgs)}}
@@ -50,7 +50,7 @@ const component = { commitHash: undefined, configFile: undefined, databaseVersion: undefined, - dbFilename: undefined, + dbPath: undefined, githubUrl: undefined, locale: undefined, localUser: undefined, @@ -73,7 +73,7 @@ const component = { this.commitHash = data.commitHash; this.configFile = data.configFile; this.databaseVersion = data.databaseVersion; - this.dbFilename = data.dbFilename; + this.dbPath = data.dbPath; this.githubUrl = data.githubUrl; this.locale = data.locale; this.localUser = data.localUser; diff --git a/themes/light/templates/config.mako b/themes/light/templates/config.mako index 8a95086122..59f9dc6688 100644 --- a/themes/light/templates/config.mako +++ b/themes/light/templates/config.mako @@ -21,7 +21,7 @@ User:{{localUser}} Program Folder:{{programDir}} Config File:{{configFile}} - Database File:{{dbFilename}} + Database File:{{dbPath}} Cache Folder:{{cacheDir}} Log Folder:{{logDir}} Arguments:
{{prettyPrintJSON(appArgs)}}
@@ -50,7 +50,7 @@ const component = { commitHash: undefined, configFile: undefined, databaseVersion: undefined, - dbFilename: undefined, + dbPath: undefined, githubUrl: undefined, locale: undefined, localUser: undefined, @@ -73,7 +73,7 @@ const component = { this.commitHash = data.commitHash; this.configFile = data.configFile; this.databaseVersion = data.databaseVersion; - this.dbFilename = data.dbFilename; + this.dbPath = data.dbPath; this.githubUrl = data.githubUrl; this.locale = data.locale; this.localUser = data.localUser; From 1eda022e78d8cf02a5e34a83a90cfb3b14ff6d9d Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 29 May 2018 18:17:00 +0200 Subject: [PATCH 47/93] flake8 --- medusa/databases/failed_db.py | 2 +- medusa/databases/main_db.py | 3 +-- medusa/failed_history.py | 2 +- medusa/history.py | 3 +-- medusa/search/backlog.py | 2 +- medusa/search/proper.py | 2 +- medusa/server/web/manage/handler.py | 8 ++++---- medusa/show/history.py | 1 + medusa/tv/series.py | 2 +- 9 files changed, 12 insertions(+), 13 deletions(-) diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index 09683ae64d..f1944f8aaa 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -193,6 +193,6 @@ def shift_history_qualities(self): ) def update_status_unknown(self): - """Changes any `UNKNOWN` quality to 1.""" + """Change any `UNKNOWN` quality to 1.""" log.info(u'Update status UNKONWN from tv_episodes') self.connection.action("UPDATE history SET quality = 1 WHERE quality = 65536;") diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 0c74181748..69d3f22382 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -260,8 +260,7 @@ def fix_show_nfo_lang(self): def fix_status_qualities(self): """ - Check for a status bigger than 12, and translate to a status + quality, - as these are old composite statuses. + Check for a status bigger than 12, and translate to a status + quality, as these are old composite statuses. This can be removed when all code that creates composite statuses has been migrated. Until then this can be used to keep the DB sane. diff --git a/medusa/failed_history.py b/medusa/failed_history.py index 5dac98c390..5b5a04fd61 100644 --- a/medusa/failed_history.py +++ b/medusa/failed_history.py @@ -24,7 +24,7 @@ from datetime import datetime, timedelta from medusa import db, logger -from medusa.common import FAILED, Quality, WANTED, statusStrings +from medusa.common import FAILED, WANTED, statusStrings from medusa.helper.common import episode_num from medusa.helper.exceptions import EpisodeNotFoundException from medusa.show.history import History diff --git a/medusa/history.py b/medusa/history.py index 467b73299e..6843a1f64d 100644 --- a/medusa/history.py +++ b/medusa/history.py @@ -21,7 +21,7 @@ import datetime from medusa import db -from medusa.common import FAILED, Quality, SNATCHED, SUBTITLED +from medusa.common import FAILED, SNATCHED, SUBTITLED from medusa.helper.encoding import ss from medusa.show.history import History @@ -126,5 +126,4 @@ def log_failed(ep_obj, release, provider=None): :param release: Release group :param provider: Provider used for snatch """ - _log_history_item(FAILED, ep_obj, release, provider) diff --git a/medusa/search/backlog.py b/medusa/search/backlog.py index d63bc07c03..495aac1519 100644 --- a/medusa/search/backlog.py +++ b/medusa/search/backlog.py @@ -10,7 +10,7 @@ from builtins import str from medusa import app, db, scheduler, ui -from medusa.common import UNSET, Quality +from medusa.common import Quality, UNSET from medusa.helper.common import episode_num from medusa.logger.adapters.style import BraceAdapter from medusa.search.queue import BacklogQueueItem diff --git a/medusa/search/proper.py b/medusa/search/proper.py index e1a33847e1..ac64f81deb 100644 --- a/medusa/search/proper.py +++ b/medusa/search/proper.py @@ -15,7 +15,7 @@ from builtins import str from medusa import app, db, helpers -from medusa.common import cpu_presets, DOWNLOADED, SUBTITLED +from medusa.common import DOWNLOADED, SUBTITLED, cpu_presets from medusa.helper.common import enabled_providers from medusa.helper.exceptions import AuthException, ex from medusa.logger.adapters.style import BraceAdapter diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index 4cbfa94e2b..4b72467eb7 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -20,11 +20,11 @@ ui, ) from medusa.common import ( - Overview, DOWNLOADED, + Overview, SNATCHED, - SNATCHED_PROPER, SNATCHED_BEST, + SNATCHED_PROPER, ) from medusa.helper.common import ( episode_num, @@ -286,7 +286,7 @@ def downloadSubtitleMissed(self, *args, **kwargs): all_eps_results = main_db_con.select( b'SELECT season, episode ' b'FROM tv_episodes ' - b"WHERE status = '4' " + b"WHERE status = '4' " b'AND season != 0 ' b'AND indexer = ? ' b'AND showid = ? ' @@ -406,7 +406,7 @@ def backlogOverview(self): sql_results = main_db_con.select( b""" - SELECT e.status, e.quality, e.season, + SELECT e.status, e.quality, e.season, e.episode, e.name, e.airdate, e.manually_searched FROM tv_episodes as e WHERE e.season IS NOT NULL AND diff --git a/medusa/show/history.py b/medusa/show/history.py index 7d59e380f8..3d5c4c9674 100644 --- a/medusa/show/history.py +++ b/medusa/show/history.py @@ -20,6 +20,7 @@ from builtins import object from collections import namedtuple from datetime import datetime, timedelta + from medusa.common import DOWNLOADED, SNATCHED from medusa.helper.common import try_int diff --git a/medusa/tv/series.py b/medusa/tv/series.py index 65f3dca776..4f01ea8090 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -34,8 +34,8 @@ from medusa.common import ( ARCHIVED, DOWNLOADED, - IGNORED, FAILED, + IGNORED, Overview, Quality, SKIPPED, From 785eab867d933d9d8be89c3a49d4df5b3ddec1a3 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 29 May 2018 18:19:19 +0200 Subject: [PATCH 48/93] Remove old Unknown Quality hacks --- medusa/providers/generic_provider.py | 9 --------- medusa/search/core.py | 4 ++-- medusa/search/manual.py | 3 +-- 3 files changed, 3 insertions(+), 13 deletions(-) diff --git a/medusa/providers/generic_provider.py b/medusa/providers/generic_provider.py index dfb3654e04..55428e7e92 100644 --- a/medusa/providers/generic_provider.py +++ b/medusa/providers/generic_provider.py @@ -294,15 +294,6 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False sorted_qualities = sorted(categorized_items, reverse=True) log.debug('Found qualities: {0}', sorted_qualities) - # move Quality.UNKNOWN to the end of the list - try: - sorted_qualities.remove(Quality.UNKNOWN) - except ValueError: - log.debug('No unknown qualities in results') - else: - sorted_qualities.append(Quality.UNKNOWN) - log.debug('Unknown qualities moved to end of results') - # chain items sorted by quality sorted_items = chain.from_iterable( categorized_items[quality] diff --git a/medusa/search/core.py b/medusa/search/core.py index 3605af8162..ebfd803881 100644 --- a/medusa/search/core.py +++ b/medusa/search/core.py @@ -420,7 +420,7 @@ def wanted_episodes(series_obj, from_date): } ) ep_obj = series_obj.get_episode(episode[b'season'], episode[b'episode']) - ep_obj.wanted_quality = [i for i in all_qualities if i > cur_quality and i != Quality.UNKNOWN] + ep_obj.wanted_quality = [i for i in all_qualities if i > cur_quality] wanted.append(ep_obj) return wanted @@ -686,7 +686,7 @@ def search_providers(series_obj, episodes, forced_search=False, down_cur_quality highest_quality_overall = 0 for cur_episode in found_results[cur_provider.name]: for cur_result in found_results[cur_provider.name][cur_episode]: - if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: + if cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality log.debug(u'The highest quality of any match is {0}', Quality.qualityStrings[highest_quality_overall]) diff --git a/medusa/search/manual.py b/medusa/search/manual.py index 8b2ee98cdc..3f7b1d1686 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -264,8 +264,7 @@ def get_provider_cache_results(series_obj, show_all_results=None, perform_search # Check if we have the combined sql strings if combined_sql_q: sql_prepend = b"SELECT * FROM (" - sql_append = b") ORDER BY CASE quality WHEN '{quality_unknown}' THEN -1 ELSE CAST(quality AS DECIMAL) END DESC, " \ - b" proper_tags DESC, seeders DESC".format(quality_unknown=Quality.UNKNOWN) + sql_append = b") ORDER BY CASE quality CAST(quality AS DECIMAL) END DESC, proper_tags DESC, seeders DESC" # Add all results sql_total += main_db_con.select(b'{0} {1} {2}'. From 098215762db4de521b86314ffcbbe9ef7fc3638f Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 29 May 2018 18:20:08 +0200 Subject: [PATCH 49/93] Update tests and add cases --- tests/conftest.py | 2 +- tests/legacy/helper/quality_tests.py | 2 +- tests/test_should_replace.py | 11 +++++++++++ tests/test_should_search.py | 9 +++++++++ 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 577262dc83..aa5c6ff1d9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -128,7 +128,7 @@ def create(language, **kwargs): @pytest.fixture def create_tvshow(monkeypatch): - def create(indexer=INDEXER_TVDBV2, indexerid=0, lang='', quality=Quality.UNKNOWN, season_folders=1, + def create(indexer=INDEXER_TVDBV2, indexerid=0, lang='', quality=Quality.NA, season_folders=1, enabled_subtitles=0, **kwargs): monkeypatch.setattr(Series, '_load_from_db', lambda method: None) target = Series(indexer=indexer, indexerid=indexerid, lang=lang, quality=quality, diff --git a/tests/legacy/helper/quality_tests.py b/tests/legacy/helper/quality_tests.py index 60f0b4fe3d..6729181b85 100644 --- a/tests/legacy/helper/quality_tests.py +++ b/tests/legacy/helper/quality_tests.py @@ -41,7 +41,7 @@ def test_get_quality_string(self): Quality.HDBLURAY: '720p BluRay', Quality.HDTV: '720p HDTV', Quality.HDWEBDL: '720p WEB-DL', - Quality.NONE: 'N/A', + Quality.NA: 'N/A', Quality.RAWHDTV: 'RawHD', Quality.SDDVD: 'SD DVD', Quality.SDTV: 'SDTV', diff --git a/tests/test_should_replace.py b/tests/test_should_replace.py index a85128a014..42dee9af64 100644 --- a/tests/test_should_replace.py +++ b/tests/test_should_replace.py @@ -505,6 +505,17 @@ 'search_type': PROPER_SEARCH, 'expected': False }, + { # p45: Downloaded UNKNOWN and it's Preferred: yes + 'ep_status': DOWNLOADED, + 'cur_quality': Quality.HDTV, + 'new_quality': Quality.UNKNOWN, + 'allowed_qualities': [Quality.HDTV], + 'preferred_qualities': [Quality.UNKNOWN], + 'download_current_quality': False, + 'force': False, + 'manually_searched': False, + 'expected': True + }, ]) def test_should_replace(p): """Run the test.""" diff --git a/tests/test_should_search.py b/tests/test_should_search.py index 9d137ea593..760ffaabf1 100644 --- a/tests/test_should_search.py +++ b/tests/test_should_search.py @@ -209,6 +209,15 @@ def _load_from_db(self): 'manually_searched': False, 'expected': True }, + { # p21: Downloaded HDTV and it's in Allowed. Preferred is set (UNKNOWN): yes + 'status': DOWNLOADED, + 'quality': Quality.HDTV, + 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', + quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities + [Quality.UNKNOWN])), # Preferred Qualities + 'manually_searched': False, + 'expected': True + }, ]) def test_should_search(p): """Run the test.""" From 3fb57741ee6a2f2bf8a67072facb408ede20ea10 Mon Sep 17 00:00:00 2001 From: Dario Date: Tue, 29 May 2018 21:20:10 +0200 Subject: [PATCH 50/93] Fix SQL statement --- medusa/search/manual.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/search/manual.py b/medusa/search/manual.py index 3f7b1d1686..92de2e9819 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -264,7 +264,7 @@ def get_provider_cache_results(series_obj, show_all_results=None, perform_search # Check if we have the combined sql strings if combined_sql_q: sql_prepend = b"SELECT * FROM (" - sql_append = b") ORDER BY CASE quality CAST(quality AS DECIMAL) END DESC, proper_tags DESC, seeders DESC" + sql_append = b") ORDER BY quality DESC, proper_tags DESC, seeders DESC" # Add all results sql_total += main_db_con.select(b'{0} {1} {2}'. From f9e3863e547a057fe01755e5e73a26cf6e9adede Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 30 May 2018 01:09:56 +0200 Subject: [PATCH 51/93] Put provider name in quotes. Thanks @sharkykh --- medusa/databases/cache_db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/databases/cache_db.py b/medusa/databases/cache_db.py index 5fdbb0da03..0325ab3909 100644 --- a/medusa/databases/cache_db.py +++ b/medusa/databases/cache_db.py @@ -182,7 +182,7 @@ def clear_provider_tables(self): " 'db_version', 'scene_exceptions', 'last_update');") for provider in providers: - self.connection.action("DELETE FROM {name};".format(name=provider[b'name'])) + self.connection.action("DELETE FROM '{name}';".format(name=provider[b'name'])) def inc_major_version(self): major_version, minor_version = self.connection.version From a3a769048a4a15aefd959f0c61a66711eb7a495d Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 30 May 2018 01:18:11 +0200 Subject: [PATCH 52/93] Add space in SQL query --- medusa/databases/main_db.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index 69d3f22382..8fb9a3db85 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -753,7 +753,7 @@ def execute(self): self.connection.action('CREATE TABLE IF NOT EXISTS new_tv_episodes ' '(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, ' - 'name TEXT, season NUMERIC, episode NUMERIC,description TEXT, airdate NUMERIC, hasnfo NUMERIC, ' + 'name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, ' 'hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, ' 'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, ' 'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, ' @@ -761,13 +761,13 @@ def execute(self): 'ep_status NUMERIC, ep_quality NUMERIC);') self.connection.action('INSERT INTO new_tv_episodes (showid, indexerid, indexer, ' - 'name, season, episode,description, airdate, hasnfo, ' + 'name, season, episode, description, airdate, hasnfo, ' 'hastbn, status, location, file_size, release_name, ' 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' 'is_proper, scene_season, scene_episode, absolute_number, ' 'scene_absolute_number, version, release_group, manually_searched, ' 'ep_status, ep_quality) SELECT showid, indexerid, indexer, ' - 'name, season, episode,description, airdate, hasnfo, ' + 'name, season, episode, description, airdate, hasnfo, ' 'hastbn, status, location, file_size, release_name, ' 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' 'is_proper, scene_season, scene_episode, absolute_number, ' From 71265ea6b30478b6573a27f2f8d78197f52910cd Mon Sep 17 00:00:00 2001 From: Dario Date: Wed, 30 May 2018 20:11:40 +0200 Subject: [PATCH 53/93] Fix qualities not showing after a manual snatch --- medusa/search/manual.py | 3 ++- themes-default/slim/static/js/ajax-episode-search.js | 10 +--------- themes/dark/assets/js/ajax-episode-search.js | 10 +--------- themes/dark/assets/js/ajax-episode-search.js.map | 2 +- themes/light/assets/js/ajax-episode-search.js | 10 +--------- themes/light/assets/js/ajax-episode-search.js.map | 2 +- 6 files changed, 7 insertions(+), 30 deletions(-) diff --git a/medusa/search/manual.py b/medusa/search/manual.py index 92de2e9819..7c130a872f 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -102,7 +102,8 @@ def get_episodes(search_thread, searchstatus): 'season': ep.season, 'searchstatus': searchstatus, 'status': statusStrings[ep.status], - 'quality': get_quality_class(ep), + 'quality_name': Quality.qualityStrings[ep.quality], + 'quality_style': get_quality_class(ep), 'overview': Overview.overviewStrings[series_obj.get_overview( ep.status, ep.quality, manually_searched=ep.manually_searched diff --git a/themes-default/slim/static/js/ajax-episode-search.js b/themes-default/slim/static/js/ajax-episode-search.js index 907fbc29c6..04d07b65d5 100644 --- a/themes-default/slim/static/js/ajax-episode-search.js +++ b/themes-default/slim/static/js/ajax-episode-search.js @@ -30,7 +30,6 @@ function updateImages(data) { const img = el.children('img[data-ep-search]'); const parent = el.parent(); if (el) { - let rSearchTerm = ''; if (ep.searchstatus.toLowerCase() === 'searching') { // El=$('td#' + ep.season + 'x' + ep.episode + '.search img'); img.prop('title', 'Searching'); @@ -54,8 +53,7 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - rSearchTerm = /(\w+(\s\((\bBest\b|\bProper\b)\))?)\s\((.+?)\)/; - htmlContent = ep.status.replace(rSearchTerm, "$1" + ' ' + "$4" + ''); // eslint-disable-line quotes, no-useless-concat + htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists @@ -147,7 +145,6 @@ $.ajaxEpSearch = function(options) { function forcedSearch() { let imageName; let imageResult; - let htmlContent; const parent = selectedEpisode.parent(); @@ -186,11 +183,6 @@ $.ajaxEpSearch = function(options) { if (options.colorRow) { parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched'); } - // Applying the quality class - const rSearchTerm = /(\w+)\s\((.+?)\)/; - htmlContent = data.result.replace(rSearchTerm, '$1 $2'); - // Update the status column if it exists - parent.siblings('.col-status').html(htmlContent); // Only if the queuing was successful, disable the onClick event of the loading image disableLink(link); } diff --git a/themes/dark/assets/js/ajax-episode-search.js b/themes/dark/assets/js/ajax-episode-search.js index 907fbc29c6..04d07b65d5 100644 --- a/themes/dark/assets/js/ajax-episode-search.js +++ b/themes/dark/assets/js/ajax-episode-search.js @@ -30,7 +30,6 @@ function updateImages(data) { const img = el.children('img[data-ep-search]'); const parent = el.parent(); if (el) { - let rSearchTerm = ''; if (ep.searchstatus.toLowerCase() === 'searching') { // El=$('td#' + ep.season + 'x' + ep.episode + '.search img'); img.prop('title', 'Searching'); @@ -54,8 +53,7 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - rSearchTerm = /(\w+(\s\((\bBest\b|\bProper\b)\))?)\s\((.+?)\)/; - htmlContent = ep.status.replace(rSearchTerm, "$1" + ' ' + "$4" + ''); // eslint-disable-line quotes, no-useless-concat + htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists @@ -147,7 +145,6 @@ $.ajaxEpSearch = function(options) { function forcedSearch() { let imageName; let imageResult; - let htmlContent; const parent = selectedEpisode.parent(); @@ -186,11 +183,6 @@ $.ajaxEpSearch = function(options) { if (options.colorRow) { parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched'); } - // Applying the quality class - const rSearchTerm = /(\w+)\s\((.+?)\)/; - htmlContent = data.result.replace(rSearchTerm, '$1 $2'); - // Update the status column if it exists - parent.siblings('.col-status').html(htmlContent); // Only if the queuing was successful, disable the onClick event of the loading image disableLink(link); } diff --git a/themes/dark/assets/js/ajax-episode-search.js.map b/themes/dark/assets/js/ajax-episode-search.js.map index 5c7c94a0da..1430a6ab10 100644 --- a/themes/dark/assets/js/ajax-episode-search.js.map +++ b/themes/dark/assets/js/ajax-episode-search.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n let rSearchTerm = '';\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n rSearchTerm = /(\\w+(\\s\\((\\bBest\\b|\\bProper\\b)\\))?)\\s\\((.+?)\\)/;\n htmlContent = ep.status.replace(rSearchTerm, \"$1\" + ' ' + \"$4\" + ''); // eslint-disable-line quotes, no-useless-concat\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n let htmlContent;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Applying the quality class\n const rSearchTerm = /(\\w+)\\s\\((.+?)\\)/;\n htmlContent = data.result.replace(rSearchTerm, '$1 $2');\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file diff --git a/themes/light/assets/js/ajax-episode-search.js b/themes/light/assets/js/ajax-episode-search.js index 907fbc29c6..04d07b65d5 100644 --- a/themes/light/assets/js/ajax-episode-search.js +++ b/themes/light/assets/js/ajax-episode-search.js @@ -30,7 +30,6 @@ function updateImages(data) { const img = el.children('img[data-ep-search]'); const parent = el.parent(); if (el) { - let rSearchTerm = ''; if (ep.searchstatus.toLowerCase() === 'searching') { // El=$('td#' + ep.season + 'x' + ep.episode + '.search img'); img.prop('title', 'Searching'); @@ -54,8 +53,7 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - rSearchTerm = /(\w+(\s\((\bBest\b|\bProper\b)\))?)\s\((.+?)\)/; - htmlContent = ep.status.replace(rSearchTerm, "$1" + ' ' + "$4" + ''); // eslint-disable-line quotes, no-useless-concat + htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists @@ -147,7 +145,6 @@ $.ajaxEpSearch = function(options) { function forcedSearch() { let imageName; let imageResult; - let htmlContent; const parent = selectedEpisode.parent(); @@ -186,11 +183,6 @@ $.ajaxEpSearch = function(options) { if (options.colorRow) { parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched'); } - // Applying the quality class - const rSearchTerm = /(\w+)\s\((.+?)\)/; - htmlContent = data.result.replace(rSearchTerm, '$1 $2'); - // Update the status column if it exists - parent.siblings('.col-status').html(htmlContent); // Only if the queuing was successful, disable the onClick event of the loading image disableLink(link); } diff --git a/themes/light/assets/js/ajax-episode-search.js.map b/themes/light/assets/js/ajax-episode-search.js.map index 5c7c94a0da..1430a6ab10 100644 --- a/themes/light/assets/js/ajax-episode-search.js.map +++ b/themes/light/assets/js/ajax-episode-search.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n let rSearchTerm = '';\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n rSearchTerm = /(\\w+(\\s\\((\\bBest\\b|\\bProper\\b)\\))?)\\s\\((.+?)\\)/;\n htmlContent = ep.status.replace(rSearchTerm, \"$1\" + ' ' + \"$4\" + ''); // eslint-disable-line quotes, no-useless-concat\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n let htmlContent;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Applying the quality class\n const rSearchTerm = /(\\w+)\\s\\((.+?)\\)/;\n htmlContent = data.result.replace(rSearchTerm, '$1 $2');\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file From 942eb3095ace01860979f1f6caea4eae1928c925 Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 01:55:49 +0200 Subject: [PATCH 54/93] Fix status compare. Thanks @sharkykh --- medusa/tv/series.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index 4f01ea8090..f053c9deed 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -1757,7 +1757,7 @@ def refresh_dir(self): with cur_ep.lock: # if it used to have a file associated with it and it doesn't anymore then # set it to app.EP_DEFAULT_DELETED_STATUS - if cur_ep.location and cur_ep.status in DOWNLOADED: + if cur_ep.location and cur_ep.status == DOWNLOADED: if app.EP_DEFAULT_DELETED_STATUS == ARCHIVED: new_status = ARCHIVED From 49ebf4ac0f3b6805a54efc8c7052eece2e65ac51 Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 14:53:58 +0200 Subject: [PATCH 55/93] Use explicit status/quality instead of integers --- medusa/search/backlog.py | 2 +- medusa/search/core.py | 2 +- medusa/tv/episode.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/medusa/search/backlog.py b/medusa/search/backlog.py index 495aac1519..12311bc9e9 100644 --- a/medusa/search/backlog.py +++ b/medusa/search/backlog.py @@ -169,7 +169,7 @@ def _get_segments(series_obj, from_date): # check through the list of statuses to see if we want any for episode in sql_results: - cur_status, cur_quality = int(episode[b'status'] or UNSET), int(episode[b'quality'] or 0) + cur_status, cur_quality = int(episode[b'status'] or UNSET), int(episode[b'quality'] or Quality.NA) should_search, should_search_reason = Quality.should_search( cur_status, cur_quality, series_obj, episode[b'manually_searched'] ) diff --git a/medusa/search/core.py b/medusa/search/core.py index ebfd803881..63d571f826 100644 --- a/medusa/search/core.py +++ b/medusa/search/core.py @@ -405,7 +405,7 @@ def wanted_episodes(series_obj, from_date): # check through the list of statuses to see if we want any for episode in sql_results: - cur_status, cur_quality = int(episode[b'status'] or UNSET), int(episode[b'quality'] or 0) + cur_status, cur_quality = int(episode[b'status'] or UNSET), int(episode[b'quality'] or Quality.NA) should_search, should_search_reason = Quality.should_search( cur_status, cur_quality, series_obj, episode[b'manually_searched'] ) diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 4e43b6af90..15bd0816e8 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -627,8 +627,8 @@ def load_from_db(self, season, episode): self.subtitles_searchcount = sql_results[0][b'subtitles_searchcount'] self.subtitles_lastsearch = sql_results[0][b'subtitles_lastsearch'] self.airdate = date.fromordinal(int(sql_results[0][b'airdate'])) - self.status = int(sql_results[0][b'status'] or -1) - self.quality = int(sql_results[0][b'quality'] or 0) + self.status = int(sql_results[0][b'status'] or UNSET) + self.quality = int(sql_results[0][b'quality'] or Quality.NA) # don't overwrite my location if sql_results[0][b'location']: From 3e3ec64d6ac182ca249cbb468652b3429e3cd6d6 Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 15:00:02 +0200 Subject: [PATCH 56/93] Improve log message and doc --- medusa/common.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index 1f247ea9fe..9c4a9fe5ba 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -602,24 +602,24 @@ def should_replace(ep_status, old_quality, new_quality, allowed_qualities, prefe """Return true if the old quality should be replaced with new quality. If not preferred qualities, then any downloaded quality is final - if preferred quality, then new quality should be higher than existing one AND not be in preferred - If new quality is already in preferred then is already final quality. + If preferred quality, then new quality should be higher than existing one AND not be in preferred + If new quality is already in preferred then is already final quality Force (forced search) bypass episode status only or unknown quality :param ep_status: current status of the episode :param old_quality: current quality of the episode - :param new_quality: quality of the episode we found it and check if we should snatch it + :param new_quality: quality of the episode we found :param allowed_qualities: List of selected allowed qualities of the show we are checking :param preferred_qualities: List of selected preferred qualities of the show we are checking :param download_current_quality: True if user wants the same existing quality to be snatched :param force: True if user did a forced search for that episode - :param manually_searched: True if episode was manually searched by user + :param manually_searched: True if episode was manually searched :param search_type: The search type, that started this method - :return: True if the old quality should be replaced with new quality. + :return: True if the old quality should be replaced with new quality """ if ep_status and ep_status not in (DOWNLOADED, SNATCHED, SNATCHED_PROPER): if not force: - return False, 'Episode status is not DOWNLOADED|SNATCHED|SNATCHED PROPER. Ignoring new quality' + return False, 'Episode status is not Downloaded, Snatched or Snatched Proper. Ignoring new quality' if manually_searched: if not force: From b4b27ca1ee4a61d5fdd762637c24003c50602ecf Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 15:11:36 +0200 Subject: [PATCH 57/93] Improve trakt_checker.py coding style --- medusa/trakt_checker.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/medusa/trakt_checker.py b/medusa/trakt_checker.py index 19505885b7..2528c6d112 100644 --- a/medusa/trakt_checker.py +++ b/medusa/trakt_checker.py @@ -223,17 +223,17 @@ def remove_episode_trakt_collection(self, filter_show=None): params = [] main_db_con = db.DBConnection() - status = [DOWNLOADED, ARCHIVED] + statuses = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name,' \ b'e.season, e.episode, e.status ' \ b'FROM tv_episodes AS e, tv_shows AS s WHERE e.indexer = s.indexer AND ' \ b's.indexer_id = e.showid and e.location = "" ' \ - b'AND e.status in ({0})'.format(','.join(['?']*len(status))) + b'AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) if filter_show: sql_selection += b' AND s.indexer_id = ? AND e.indexer = ?' params = [filter_show.series_id, filter_show.indexer] - sql_result = main_db_con.select(sql_selection, status + params) + sql_result = main_db_con.select(sql_selection, statuses + params) episodes = [dict(e) for e in sql_result] if episodes: @@ -274,13 +274,13 @@ def add_episode_trakt_collection(self): if app.TRAKT_SYNC and app.USE_TRAKT: main_db_con = db.DBConnection() - status = [DOWNLOADED, ARCHIVED] + statuses = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid ' \ - b"AND e.status in ({0}) AND e.location <> ''".format(','.join(['?']*len(status))) + b"AND e.status in ({0}) AND e.location <> ''".format(','.join(['?'] * len(statuses))) - sql_result = main_db_con.select(sql_selection, status) + sql_result = main_db_con.select(sql_selection, statuses) episodes = [dict(e) for e in sql_result] if episodes: @@ -337,12 +337,13 @@ def remove_episode_watchlist(self): if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() - status = [DOWNLOADED, ARCHIVED] + statuses = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer ' \ - b'AND s.indexer_id = e.showid AND e.status in ({0})'.format(b','.join([b'?']*len(status))) - sql_result = main_db_con.select(sql_selection, status) + b'AND s.indexer_id = e.showid AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) + + sql_result = main_db_con.select(sql_selection, statuses) episodes = [dict(i) for i in sql_result] if episodes: @@ -381,12 +382,13 @@ def add_episode_watchlist(self): if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() - status = [SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, WANTED] + statuses = [SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, WANTED] sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid AND s.paused = 0 ' \ - b'AND e.status in ({0})'.format(b','.join([b'?']*len(status))) - sql_result = main_db_con.select(sql_selection, status) + b'AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) + + sql_result = main_db_con.select(sql_selection, statuses) episodes = [dict(i) for i in sql_result] if episodes: From cfdc8aa4566ac91dd9306b047c60d0eec9f545d1 Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 16:01:18 +0200 Subject: [PATCH 58/93] Compare quality value instead of name --- medusa/post_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 372854a6ed..706e1cfc11 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -1013,7 +1013,7 @@ def process(self): old_ep_quality = ep_obj.quality # get the quality of the episode we're processing - if quality and common.Quality.qualityStrings[quality] != 'Unknown': + if quality and quality != common.Quality.UNKNOWN: self.log(u'The episode file has a quality in it, using that: {0}'.format (common.Quality.qualityStrings[quality]), logger.DEBUG) new_ep_quality = quality From aa18bfd6a1bef67d15a89b9cf4818c3c0cb97feb Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 16:04:49 +0200 Subject: [PATCH 59/93] Removed extra space from SQL query --- medusa/server/web/manage/handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index 4b72467eb7..bc3d728c38 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -229,7 +229,7 @@ def subtitleMissed(self, whichSubs=None): b'WHERE tv_shows.subtitles = 1 ' b"AND tv_episodes.status = '4' " b'AND tv_episodes.season != 0 ' - b"AND tv_episodes.location != '' " + b"AND tv_episodes.location != '' " b'AND tv_episodes.showid = tv_shows.indexer_id ' b'AND tv_episodes.indexer = tv_shows.indexer ' b'ORDER BY show_name' From 144b51ddafcb749eadf7055772a1a9b2ad5aee3d Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 16:24:49 +0200 Subject: [PATCH 60/93] Fix doc in series, remove try_int and or --- medusa/tv/series.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index f053c9deed..80c38630bb 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -2197,7 +2197,7 @@ def get_overview(self, ep_status, ep_quality, backlog_mode=False, manually_searc :param ep_status: an Episode status :type ep_status: int - :param ep_quality: an Episode status + :param ep_quality: an Episode quality :type ep_quality: int :param backlog_mode: if we should return overview for backlogOverview :type backlog_mode: boolean @@ -2206,7 +2206,8 @@ def get_overview(self, ep_status, ep_quality, backlog_mode=False, manually_searc :return: an Overview status :rtype: int """ - ep_status = try_int(ep_status) or UNSET + ep_status = int(ep_status) + ep_quality = int(ep_quality) if backlog_mode: if ep_status == WANTED: From 831922341802e39e7a8d475f8c4cd2b376755bfc Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 16:26:00 +0200 Subject: [PATCH 61/93] Renamed logSubtitle to log_subtitle --- medusa/history.py | 2 +- medusa/subtitles.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/history.py b/medusa/history.py index 6843a1f64d..fe9ddf78f3 100644 --- a/medusa/history.py +++ b/medusa/history.py @@ -102,7 +102,7 @@ def log_download(ep_obj, filename, new_ep_quality, release_group=None, version=- _log_history_item(action, ep_obj, filename, provider, version, size=size) -def logSubtitle(ep_obj, subtitle_result): +def log_subtitle(ep_obj, subtitle_result): """ Log download of subtitle diff --git a/medusa/subtitles.py b/medusa/subtitles.py index f2e8f469a0..08cb80324b 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -492,7 +492,7 @@ def save_subs(tv_episode, video, found_subtitles, video_path=None): if app.SUBTITLES_HISTORY: logger.debug(u'Logging to history downloaded subtitle from provider %s and language %s', subtitle.provider_name, subtitle.language.opensubtitles) - history.logSubtitle(tv_episode, subtitle) + history.log_subtitle(tv_episode, subtitle) # Refresh the subtitles property if tv_episode.location: From ac3bd5883b6875495a1f2eead2ce8ff416793933 Mon Sep 17 00:00:00 2001 From: Dario Date: Thu, 31 May 2018 16:34:01 +0200 Subject: [PATCH 62/93] Remove superfluous parenthesis and variable --- themes-default/slim/views/displayShow.mako | 4 ++-- themes-default/slim/views/manage_backlogOverview.mako | 3 +-- themes-default/slim/views/manage_episodeStatuses.mako | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/themes-default/slim/views/displayShow.mako b/themes-default/slim/views/displayShow.mako index 787035eec7..2d495b7a58 100644 --- a/themes-default/slim/views/displayShow.mako +++ b/themes-default/slim/views/displayShow.mako @@ -291,7 +291,7 @@ const startVue = () => { % endif % if int(epResult["season"]) != 0: - % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): + % if app.USE_FAILED_DOWNLOADS and int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED): retry % else: search @@ -300,7 +300,7 @@ const startVue = () => { % else: search % endif - % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and (int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST)): + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): search subtitles % endif diff --git a/themes-default/slim/views/manage_backlogOverview.mako b/themes-default/slim/views/manage_backlogOverview.mako index 3f89da76fb..50322d6940 100644 --- a/themes-default/slim/views/manage_backlogOverview.mako +++ b/themes-default/slim/views/manage_backlogOverview.mako @@ -146,7 +146,6 @@ const startVue = () => { <% old_status = cur_result['status'] old_quality = cur_result['quality'] - archived_status = ARCHIVED %> ${cur_result['episode_string']} @@ -172,7 +171,7 @@ const startVue = () => { search search % if old_status == DOWNLOADED: - search + search % endif diff --git a/themes-default/slim/views/manage_episodeStatuses.mako b/themes-default/slim/views/manage_episodeStatuses.mako index 986e320175..e94e9b2601 100644 --- a/themes-default/slim/views/manage_episodeStatuses.mako +++ b/themes-default/slim/views/manage_episodeStatuses.mako @@ -59,7 +59,7 @@ Set checked shows/episodes to - <% availableStatus = [WANTED, SKIPPED, IGNORED, FAILED] %> % if not app.USE_FAILED_DOWNLOADS: <% availableStatus.remove(FAILED) %> diff --git a/themes/dark/templates/displayShow.mako b/themes/dark/templates/displayShow.mako index 787035eec7..2d495b7a58 100644 --- a/themes/dark/templates/displayShow.mako +++ b/themes/dark/templates/displayShow.mako @@ -291,7 +291,7 @@ const startVue = () => { % endif % if int(epResult["season"]) != 0: - % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): + % if app.USE_FAILED_DOWNLOADS and int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED): retry % else: search @@ -300,7 +300,7 @@ const startVue = () => { % else: search % endif - % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and (int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST)): + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): search subtitles % endif diff --git a/themes/dark/templates/manage_backlogOverview.mako b/themes/dark/templates/manage_backlogOverview.mako index 3f89da76fb..50322d6940 100644 --- a/themes/dark/templates/manage_backlogOverview.mako +++ b/themes/dark/templates/manage_backlogOverview.mako @@ -146,7 +146,6 @@ const startVue = () => { <% old_status = cur_result['status'] old_quality = cur_result['quality'] - archived_status = ARCHIVED %> ${cur_result['episode_string']} @@ -172,7 +171,7 @@ const startVue = () => { search search % if old_status == DOWNLOADED: - search + search % endif diff --git a/themes/dark/templates/manage_episodeStatuses.mako b/themes/dark/templates/manage_episodeStatuses.mako index 986e320175..e94e9b2601 100644 --- a/themes/dark/templates/manage_episodeStatuses.mako +++ b/themes/dark/templates/manage_episodeStatuses.mako @@ -59,7 +59,7 @@ Set checked shows/episodes to - <% availableStatus = [WANTED, SKIPPED, IGNORED, FAILED] %> % if not app.USE_FAILED_DOWNLOADS: <% availableStatus.remove(FAILED) %> diff --git a/themes/light/templates/displayShow.mako b/themes/light/templates/displayShow.mako index 787035eec7..2d495b7a58 100644 --- a/themes/light/templates/displayShow.mako +++ b/themes/light/templates/displayShow.mako @@ -291,7 +291,7 @@ const startVue = () => { % endif % if int(epResult["season"]) != 0: - % if app.USE_FAILED_DOWNLOADS and (int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED)): + % if app.USE_FAILED_DOWNLOADS and int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED): retry % else: search @@ -300,7 +300,7 @@ const startVue = () => { % else: search % endif - % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and (int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST)): + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): search subtitles % endif diff --git a/themes/light/templates/manage_backlogOverview.mako b/themes/light/templates/manage_backlogOverview.mako index 3f89da76fb..50322d6940 100644 --- a/themes/light/templates/manage_backlogOverview.mako +++ b/themes/light/templates/manage_backlogOverview.mako @@ -146,7 +146,6 @@ const startVue = () => { <% old_status = cur_result['status'] old_quality = cur_result['quality'] - archived_status = ARCHIVED %> ${cur_result['episode_string']} @@ -172,7 +171,7 @@ const startVue = () => { search search % if old_status == DOWNLOADED: - search + search % endif diff --git a/themes/light/templates/manage_episodeStatuses.mako b/themes/light/templates/manage_episodeStatuses.mako index 986e320175..e94e9b2601 100644 --- a/themes/light/templates/manage_episodeStatuses.mako +++ b/themes/light/templates/manage_episodeStatuses.mako @@ -59,7 +59,7 @@ Set checked shows/episodes to - <% availableStatus = [WANTED, SKIPPED, IGNORED, FAILED] %> % if not app.USE_FAILED_DOWNLOADS: <% availableStatus.remove(FAILED) %> From 68779e62c5887cb4d1448acdb860839d2bba6eb6 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Fri, 1 Jun 2018 03:18:58 +0300 Subject: [PATCH 75/93] Fix default status on failed.db history table schema --- medusa/databases/failed_db.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index 167507d768..e5fa345b6a 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -24,7 +24,7 @@ def execute(self): queries = [ ('CREATE TABLE failed (release TEXT, size NUMERIC, provider TEXT);',), ('CREATE TABLE history (date NUMERIC, size NUMERIC, release TEXT, provider TEXT,' - ' status NUMERIC DEFAULT 0, quality NUMERIC DEFAULT 0, showid NUMERIC DEFAULT -1,' + ' status NUMERIC DEFAULT -1, quality NUMERIC DEFAULT 0, showid NUMERIC DEFAULT -1,' ' season NUMERIC DEFAULT -1, episode NUMERIC DEFAULT -1);',), ('CREATE TABLE db_version (db_version INTEGER);',), ('INSERT INTO db_version (db_version) VALUES (1);',), From a9a1f3a3a6797eb20ad6c85921a6bb5e49bd5470 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Fri, 1 Jun 2018 04:40:51 +0300 Subject: [PATCH 76/93] Fix composite_status splitting --- medusa/databases/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/medusa/databases/utils.py b/medusa/databases/utils.py index 1accfbbf98..37ba0a5c26 100644 --- a/medusa/databases/utils.py +++ b/medusa/databases/utils.py @@ -62,9 +62,9 @@ def split_composite_status(status): status = int(status) if status == status_unset: - return (status_unset, qualities['NA']) + return (status_unset, qualities['NONE']) - for q in itervalues(qualities): + for q in sorted(itervalues(qualities), reverse=True): if status > q * 100: return (status - q * 100, q) From 7eb05f8235fc3aaae0aab035b5e0db92fcf59e06 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 00:11:10 +0300 Subject: [PATCH 77/93] Update to_guessit_format code and explaination --- medusa/common.py | 79 ++++++++++++++++++++++++++---------------------- 1 file changed, 43 insertions(+), 36 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index 9552c9d5e0..c1c52c3ad9 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -505,39 +505,6 @@ def scene_quality_from_name(name, quality): return rel_type + codec - guessit_map = { - '720p': { - 'HDTV': HDTV, - 'WEB-DL': HDWEBDL, - 'WEBRip': HDWEBDL, - 'BluRay': HDBLURAY, - }, - '1080i': RAWHDTV, - '1080p': { - 'HDTV': FULLHDTV, - 'WEB-DL': FULLHDWEBDL, - 'WEBRip': FULLHDWEBDL, - 'BluRay': FULLHDBLURAY - }, - '4K': { - 'HDTV': UHD_4K_TV, - 'WEB-DL': UHD_4K_WEBDL, - 'WEBRip': UHD_4K_WEBDL, - 'BluRay': UHD_4K_BLURAY - } - } - - to_guessit_format_list = [ - ANYHDTV, ANYWEBDL, ANYBLURAY, ANYHDTV | UHD_4K_TV, ANYWEBDL | UHD_4K_WEBDL, ANYBLURAY | UHD_4K_BLURAY - ] - - to_guessit_screen_size_map = { - HDTV | HDWEBDL | HDBLURAY: '720p', - RAWHDTV: '1080i', - FULLHDTV | FULLHDWEBDL | FULLHDBLURAY: '1080p', - UHD_4K_TV | UHD_4K_WEBDL | UHD_4K_BLURAY: '4K', - } - @staticmethod def should_search(cur_status, cur_quality, show_obj, manually_searched): """Return true if that episodes should be search for a better quality. @@ -652,6 +619,44 @@ def wanted_quality(new_quality, allowed_qualities, preferred_qualities): """Check if new quality is wanted.""" return new_quality in allowed_qualities + preferred_qualities + # Map guessit screen sizes and formats to our Quality values + guessit_map = { + '720p': { + 'HDTV': HDTV, + 'WEB-DL': HDWEBDL, + 'WEBRip': HDWEBDL, + 'BluRay': HDBLURAY, + }, + '1080i': RAWHDTV, + '1080p': { + 'HDTV': FULLHDTV, + 'WEB-DL': FULLHDWEBDL, + 'WEBRip': FULLHDWEBDL, + 'BluRay': FULLHDBLURAY + }, + '4K': { + 'HDTV': UHD_4K_TV, + 'WEB-DL': UHD_4K_WEBDL, + 'WEBRip': UHD_4K_WEBDL, + 'BluRay': UHD_4K_BLURAY + } + } + + # Consolidate the guessit-supported screen sizes of each format + to_guessit_format_list = [ + ANYHDTV | UHD_4K_TV, + ANYWEBDL | UHD_4K_WEBDL, + ANYBLURAY | UHD_4K_BLURAY + ] + + # Consolidate the formats of each guessit-supported screen size + to_guessit_screen_size_map = { + HDTV | HDWEBDL | HDBLURAY: '720p', + RAWHDTV: '1080i', + FULLHDTV | FULLHDWEBDL | FULLHDBLURAY: '1080p', + UHD_4K_TV | UHD_4K_WEBDL | UHD_4K_BLURAY: '4K', + } + @staticmethod def from_guessit(guess): """ @@ -714,9 +719,11 @@ def to_guessit_format(quality): :return: guessit format :rtype: str """ - for q in Quality.to_guessit_format_list: - if quality & q: - key = q & (1024 - 1) # 4k formats are bigger than 768 and are not part of ANY* bit set + for quality_set in Quality.to_guessit_format_list: + if quality_set & quality: # If quality_set contains quality + # Remove all 4K (and above) formats as they are bigger than Quality.ANYBLURAY, + # and they are not part of an "ANY*" bit set + key = quality_set & (Quality.UHD_4K_TV - 1) return Quality.combinedQualityStrings.get(key) @staticmethod From 6e6b3741037183e6a0bfc5a0c5b30ee308dac572 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 00:39:12 +0300 Subject: [PATCH 78/93] Update Quality.to_guessit docstring --- medusa/common.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/medusa/common.py b/medusa/common.py index c1c52c3ad9..abdee6f63c 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -690,8 +690,6 @@ def from_guessit(guess): def to_guessit(quality): """Return a guessit dict containing 'screen_size and format' from a Quality. - This was previously a composite status. But status/quality have been separated into their own attributes. - :param quality: a quality :type quality: int :return: dict {'screen_size': , 'format': } From 72c7912b4567e82e3edbdc66d001b511d829cbe8 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 01:01:25 +0300 Subject: [PATCH 79/93] Use status constants in SQL queries --- medusa/helpers/__init__.py | 10 +++++----- medusa/post_processor.py | 18 ++++++++++++------ medusa/process_tv.py | 5 +++-- medusa/search/proper.py | 9 +++++---- medusa/server/web/home/handler.py | 5 +++-- medusa/server/web/manage/handler.py | 13 +++++++------ medusa/subtitles.py | 7 ++++--- 7 files changed, 39 insertions(+), 28 deletions(-) diff --git a/medusa/helpers/__init__.py b/medusa/helpers/__init__.py index b38885b314..7f3b6c74a1 100644 --- a/medusa/helpers/__init__.py +++ b/medusa/helpers/__init__.py @@ -43,7 +43,7 @@ from imdbpie import imdbpie from medusa import app, db -from medusa.common import USER_AGENT +from medusa.common import DOWNLOADED, USER_AGENT from medusa.helper.common import (episode_num, http_code_description, media_extensions, pretty_file_size, subtitle_extensions) from medusa.helpers.utils import generate @@ -1710,9 +1710,9 @@ def is_already_processed_media(full_filename): """Check if resource was already processed.""" main_db_con = db.DBConnection() history_result = main_db_con.select('SELECT action FROM history ' - "WHERE action = '4' " + 'WHERE action = ? ' 'AND resource LIKE ?', - ['%' + full_filename]) + [DOWNLOADED, '%' + full_filename]) return bool(history_result) @@ -1735,8 +1735,8 @@ def is_info_hash_processed(info_hash): 'd.season = s.season AND ' 'd.episode = s.episode AND ' 'd.quality = s.quality ' - 'WHERE d.action = "4"', - [info_hash]) + 'WHERE d.action = ?', + [info_hash, DOWNLOADED]) return bool(history_result) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 706e1cfc11..5f34353b2a 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -789,6 +789,7 @@ def _get_quality(self, ep_obj): def _priority_from_history(self, series_obj, season, episodes, quality): """Evaluate if the file should be marked as priority.""" main_db_con = db.DBConnection() + snatched_statuses = [common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST] for episode in episodes: # First: check if the episode status is snatched tv_episodes_result = main_db_con.select( @@ -798,8 +799,9 @@ def _priority_from_history(self, series_obj, season, episodes, quality): 'AND showid = ? ' 'AND season = ? ' 'AND episode = ? ' - "AND status IN ('2', '9', '12') ", - [series_obj.indexer, series_obj.series_id, season, episode] + 'AND status IN (?, ?, ?) ', + [series_obj.indexer, series_obj.series_id, + season, episode] + snatched_statuses ) if tv_episodes_result: @@ -812,9 +814,11 @@ def _priority_from_history(self, series_obj, season, episodes, quality): 'AND showid = ? ' 'AND season = ? ' 'AND episode = ? ' - "AND action IN ('2', '9', '12') " + 'AND action IN (?, ?, ?) ' 'ORDER BY date DESC', - [series_obj.indexer, series_obj.series_id, season, episode]) + [series_obj.indexer, series_obj.series_id, + season, episode] + snatched_statuses + ) if history_result and history_result[0][b'quality'] == quality: # Third: make sure the file we are post-processing hasn't been @@ -834,9 +838,11 @@ def _priority_from_history(self, series_obj, season, episodes, quality): 'AND season = ? ' 'AND episode = ? ' 'AND quality = ? ' - "AND action = '4' " + 'AND action = ? ' 'ORDER BY date DESC', - [series_obj.indexer, series_obj.series_id, season, episode, quality]) + [series_obj.indexer, series_obj.series_id, + season, episode, quality, common.DOWNLOADED] + ) if download_result: download_name = os.path.basename(download_result[0][b'resource']) diff --git a/medusa/process_tv.py b/medusa/process_tv.py index 2aee1c35a2..8be0e3e850 100644 --- a/medusa/process_tv.py +++ b/medusa/process_tv.py @@ -10,6 +10,7 @@ from builtins import object from medusa import app, db, failed_processor, helpers, logger, notifiers, post_processor +from medusa.common import DOWNLOADED from medusa.clients import torrent from medusa.helper.common import is_sync_file from medusa.helper.exceptions import EpisodePostProcessingFailedException, FailedPostProcessingFailedException, ex @@ -498,9 +499,9 @@ def already_postprocessed(self, video_file): main_db_con = db.DBConnection() history_result = main_db_con.select( 'SELECT * FROM history ' - "WHERE action = '4' " # DOWNLOADED + 'WHERE action = ? ' 'AND resource LIKE ?', - ['%' + video_file]) + [DOWNLOADED, '%' + video_file]) if history_result: self.log("You're trying to post-process a file that has already " diff --git a/medusa/search/proper.py b/medusa/search/proper.py index ac64f81deb..e5c1500639 100644 --- a/medusa/search/proper.py +++ b/medusa/search/proper.py @@ -15,7 +15,7 @@ from builtins import str from medusa import app, db, helpers -from medusa.common import DOWNLOADED, SUBTITLED, cpu_presets +from medusa.common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, SUBTITLED, cpu_presets from medusa.helper.common import enabled_providers from medusa.helper.exceptions import AuthException, ex from medusa.logger.adapters.style import BraceAdapter @@ -108,7 +108,7 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran # Episode status becomes downloaded only after found subtitles last_subtitled = search_date.strftime(History.date_format) recently_aired = main_db_con.select(b'SELECT indexer_id AS indexer, showid, season, episode FROM history ' - b"WHERE date >= ? AND action = ?", [last_subtitled, SUBTITLED]) + b'WHERE date >= ? AND action = ?', [last_subtitled, SUBTITLED]) if not recently_aired: log.info('No recently aired new episodes, nothing to search for') @@ -339,9 +339,10 @@ def _download_propers(self, proper_list): b'AND episode = ? ' b'AND quality = ? ' b'AND date >= ? ' - b"AND action in ('2', '4', '9', '12')", + b'AND action in (?, ?, ?, ?)', [cur_proper.indexerid, cur_proper.actual_season, cur_proper.actual_episode, cur_proper.quality, - history_limit.strftime(History.date_format)]) + history_limit.strftime(History.date_format), + DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST]) # make sure that none of the existing history downloads are the same proper we're trying to download # if the result exists in history already we need to skip it diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index 6058c7dc38..002d420b51 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -1283,9 +1283,10 @@ def titler(x): b'AND showid = ? ' b'AND season = ? ' b'AND episode = ? ' - b"AND action in ('2', '4', '9', '11', '12') " # SNATCHED, DOWN, SNATCH_PROP, FAILED, SNATCH_BEST + b'AND action in (?, ?, ?, ?, ?) ' b'ORDER BY date DESC', - [indexer_id, series_id, season, episode] + [indexer_id, series_id, season, episode, + DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED] ) episode_history = [dict(row) for row in episode_status_result] for i in episode_history: diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index bc3d728c38..f860d17892 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -186,9 +186,9 @@ def showSubtitleMissed(indexer, seriesid, whichSubs): b'WHERE indexer = ? ' b'AND showid = ? ' b'AND season != 0 ' - b"AND status = '4' " + b'AND status = ? ' b"AND location != ''", - [int(indexer), int(seriesid)] + [int(indexer), int(seriesid), DOWNLOADED] ) result = {} @@ -227,12 +227,13 @@ def subtitleMissed(self, whichSubs=None): b'tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles ' b'FROM tv_episodes, tv_shows ' b'WHERE tv_shows.subtitles = 1 ' - b"AND tv_episodes.status = '4' " + b'AND tv_episodes.status = ? ' b'AND tv_episodes.season != 0 ' b"AND tv_episodes.location != '' " b'AND tv_episodes.showid = tv_shows.indexer_id ' b'AND tv_episodes.indexer = tv_shows.indexer ' - b'ORDER BY show_name' + b'ORDER BY show_name', + [DOWNLOADED] ) ep_counts = {} @@ -286,12 +287,12 @@ def downloadSubtitleMissed(self, *args, **kwargs): all_eps_results = main_db_con.select( b'SELECT season, episode ' b'FROM tv_episodes ' - b"WHERE status = '4' " + b'WHERE status = ? ' b'AND season != 0 ' b'AND indexer = ? ' b'AND showid = ? ' b"AND location != ''", - [cur_indexer_id, cur_series_id] + [DOWNLOADED, cur_indexer_id, cur_series_id] ) to_download[(cur_indexer_id, cur_series_id)] = [str(x[b'season']) + 'x' + str(x[b'episode']) for x in all_eps_results] diff --git a/medusa/subtitles.py b/medusa/subtitles.py index 08cb80324b..09d374e425 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -38,7 +38,7 @@ from medusa import app, db, helpers, history from medusa.cache import cache, memory_cache -from medusa.common import SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, cpu_presets +from medusa.common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, cpu_presets from medusa.helper.common import dateTimeFormat, episode_num, remove_extension, subtitle_extensions from medusa.helper.exceptions import ex from medusa.helpers import is_media_file, is_rar_file @@ -992,7 +992,7 @@ def dhm(td): "WHERE " "s.subtitles = 1 " "AND s.paused = 0 " - "AND e.status = '4' " + "AND e.status = ? " "AND e.season > 0 " "AND e.location != '' " "AND age {} 30 " @@ -1000,7 +1000,8 @@ def dhm(td): "ORDER BY " "lastsearch ASC " "LIMIT {}".format - (args['age_comparison'], args['limit']), [datetime.datetime.now().toordinal(), sql_like_languages] + (args['age_comparison'], args['limit']), + [datetime.datetime.now().toordinal(), DOWNLOADED, sql_like_languages] ) if not sql_results: From bfbe112c48a23c963be213508bb1a72ecb451e4a Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 01:49:08 +0300 Subject: [PATCH 80/93] fix import order --- medusa/process_tv.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/medusa/process_tv.py b/medusa/process_tv.py index 8be0e3e850..bde6c56b70 100644 --- a/medusa/process_tv.py +++ b/medusa/process_tv.py @@ -10,8 +10,8 @@ from builtins import object from medusa import app, db, failed_processor, helpers, logger, notifiers, post_processor -from medusa.common import DOWNLOADED from medusa.clients import torrent +from medusa.common import DOWNLOADED from medusa.helper.common import is_sync_file from medusa.helper.exceptions import EpisodePostProcessingFailedException, FailedPostProcessingFailedException, ex from medusa.name_parser.parser import InvalidNameException, InvalidShowException, NameParser From 665c5e8073e67d57912786b8f4add73aa5567e2a Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 02:56:50 +0300 Subject: [PATCH 81/93] Fix inconsistency with "file name" column and episode number tooltip --- themes-default/slim/views/displayShow.mako | 2 +- themes/dark/templates/displayShow.mako | 2 +- themes/light/templates/displayShow.mako | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/themes-default/slim/views/displayShow.mako b/themes-default/slim/views/displayShow.mako index 2d495b7a58..b6eb55712a 100644 --- a/themes-default/slim/views/displayShow.mako +++ b/themes-default/slim/views/displayShow.mako @@ -236,7 +236,7 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if int(epResult['status']) in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc or ''} % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: ${pretty_file_size(epResult["file_size"])} diff --git a/themes/dark/templates/displayShow.mako b/themes/dark/templates/displayShow.mako index 2d495b7a58..b6eb55712a 100644 --- a/themes/dark/templates/displayShow.mako +++ b/themes/dark/templates/displayShow.mako @@ -236,7 +236,7 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if int(epResult['status']) in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc or ''} % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: ${pretty_file_size(epResult["file_size"])} diff --git a/themes/light/templates/displayShow.mako b/themes/light/templates/displayShow.mako index 2d495b7a58..b6eb55712a 100644 --- a/themes/light/templates/displayShow.mako +++ b/themes/light/templates/displayShow.mako @@ -236,7 +236,7 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if int(epResult['status']) in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc or ''} % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: ${pretty_file_size(epResult["file_size"])} From fe1c9e8b5e0728332776e71a68a59d33716d5837 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 03:39:51 +0300 Subject: [PATCH 82/93] Update Series.refresh_dir * Set status to app.EP_DEFAULT_DELETED_STATUS if episode file was delete and old status was ARCHIVED or DOWNLOADED or IGNORED or SKIPPED * Add some episode attributes that should be reset if a file is deleted --- medusa/tv/series.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/medusa/tv/series.py b/medusa/tv/series.py index 65d66cc915..843f56bbb8 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -1757,12 +1757,9 @@ def refresh_dir(self): with cur_ep.lock: # if it used to have a file associated with it and it doesn't anymore then # set it to app.EP_DEFAULT_DELETED_STATUS - if cur_ep.location and cur_ep.status == DOWNLOADED: + if cur_ep.location and cur_ep.status in [ARCHIVED, DOWNLOADED, IGNORED, SKIPPED]: - if app.EP_DEFAULT_DELETED_STATUS == ARCHIVED: - new_status = ARCHIVED - else: - new_status = app.EP_DEFAULT_DELETED_STATUS + new_status = app.EP_DEFAULT_DELETED_STATUS log.debug( u"{id}: Location for '{show}' {ep} doesn't exist and current status is '{old_status}'," @@ -1783,6 +1780,10 @@ def refresh_dir(self): cur_ep.hasnfo = False cur_ep.hastbn = False cur_ep.release_name = '' + cur_ep.release_group = '' + cur_ep.is_proper = False + cur_ep.version = 0 + cur_ep.manually_searched = False sql_l.append(cur_ep.get_sql()) From 25346d9cd223eed6a6c960462e0bb5517c5eddbd Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 03:59:33 +0300 Subject: [PATCH 83/93] More inconsistencies in displayShow --- themes-default/slim/views/displayShow.mako | 6 +++--- themes/dark/templates/displayShow.mako | 6 +++--- themes/light/templates/displayShow.mako | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/themes-default/slim/views/displayShow.mako b/themes-default/slim/views/displayShow.mako index b6eb55712a..2eacef92b0 100644 --- a/themes-default/slim/views/displayShow.mako +++ b/themes-default/slim/views/displayShow.mako @@ -238,7 +238,7 @@ const startVue = () => { ${epLoc or ''} - % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"]: ${pretty_file_size(epResult["file_size"])} % endif @@ -269,9 +269,9 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [ARCHIVED, DOWNLOADED, IGNORED, SKIPPED]: % if flag != 'und': - + ${flag} % else: diff --git a/themes/dark/templates/displayShow.mako b/themes/dark/templates/displayShow.mako index b6eb55712a..2eacef92b0 100644 --- a/themes/dark/templates/displayShow.mako +++ b/themes/dark/templates/displayShow.mako @@ -238,7 +238,7 @@ const startVue = () => { ${epLoc or ''} - % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"]: ${pretty_file_size(epResult["file_size"])} % endif @@ -269,9 +269,9 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [ARCHIVED, DOWNLOADED, IGNORED, SKIPPED]: % if flag != 'und': - + ${flag} % else: diff --git a/themes/light/templates/displayShow.mako b/themes/light/templates/displayShow.mako index b6eb55712a..2eacef92b0 100644 --- a/themes/light/templates/displayShow.mako +++ b/themes/light/templates/displayShow.mako @@ -238,7 +238,7 @@ const startVue = () => { ${epLoc or ''} - % if epResult["file_size"] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"]: ${pretty_file_size(epResult["file_size"])} % endif @@ -269,9 +269,9 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [ARCHIVED, DOWNLOADED, IGNORED, SKIPPED]: % if flag != 'und': - + ${flag} % else: From 3ab82f70acc7782cf1da33120c705cecfbff4ae9 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 07:00:01 +0300 Subject: [PATCH 84/93] Restore compact history functionality + fix quality sorting --- medusa/show/history.py | 22 +++++++++++++--------- themes-default/slim/views/history.mako | 18 +++++++++--------- themes/dark/templates/history.mako | 18 +++++++++--------- themes/light/templates/history.mako | 18 +++++++++--------- 4 files changed, 40 insertions(+), 36 deletions(-) diff --git a/medusa/show/history.py b/medusa/show/history.py index 3d5c4c9674..6906b700d7 100644 --- a/medusa/show/history.py +++ b/medusa/show/history.py @@ -60,10 +60,12 @@ def get(self, limit=100, action=None): actions = History._get_actions(action) limit = max(try_int(limit), 0) - common_sql = 'SELECT show_name, h.indexer_id, showid, season, episode, action, h.quality, ' \ - ' provider, resource, date, h.proper_tags, h.manually_searched ' \ - 'FROM history h, tv_shows s ' \ - 'WHERE h.showid = s.indexer_id AND h.indexer_id = s.indexer ' + common_sql = ( + 'SELECT show_name, h.indexer_id, showid AS show_id, season, episode, action, h.quality, ' + 'provider, resource, date, h.proper_tags, h.manually_searched ' + 'FROM history h, tv_shows s ' + 'WHERE h.showid = s.indexer_id AND h.indexer_id = s.indexer ' + ) filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') ' order_sql = 'ORDER BY date DESC ' @@ -79,7 +81,7 @@ def get(self, limit=100, action=None): # TODO: Convert to a defaultdict and compact items as needed # TODO: Convert to using operators to combine items for row in sql_results: - row = History.Item(*row) + row = History.Item(**row) if not limit or len(detailed) < limit: detailed.append(row) if row.index in compact: @@ -121,17 +123,17 @@ def _get_actions(action): Action = namedtuple('Action', action_fields) Action.width = len(action_fields) - index_fields = ('indexer_id', 'show_id', 'season', 'episode') + index_fields = ('indexer_id', 'show_id', 'season', 'episode', 'quality') # An index for an item or compact item from history Index = namedtuple('Index', index_fields) Index.width = len(index_fields) - compact_fields = ('show_name', 'index', 'actions') + compact_fields = ('show_name', 'index', 'actions', 'quality') # Related items compacted with a list of actions from history CompactItem = namedtuple('CompactItem', compact_fields) item_fields = tuple( # make it a tuple so its immutable - ['show_name'] + list(index_fields) + list(action_fields) + set(('show_name',) + index_fields + action_fields) # unique only ) class Item(namedtuple('Item', item_fields)): @@ -152,7 +154,8 @@ def index(self): self.indexer_id, self.show_id, self.season, - self.episode + self.episode, + self.quality, ) @property @@ -180,6 +183,7 @@ def compacted(self): self.show_name, self.index, [self.cur_action], # actions + self.quality, ) return result diff --git a/themes-default/slim/views/history.mako b/themes-default/slim/views/history.mako index d3ac56976a..dca24bfd3a 100644 --- a/themes-default/slim/views/history.mako +++ b/themes-default/slim/views/history.mako @@ -36,7 +36,8 @@ const startVue = () => { return { // 0: Time, 1: Episode, 2: Action, 3: Provider, 4: Quality 0: node => $(node).find('time').attr('datetime'), - 1: node => $(node).find('a').text() + 1: node => $(node).find('a').text(), + 4: node => $(node).attr('quality') }; } // 0: Time, 1: Episode, 2: Snatched, 3: Downloaded @@ -49,10 +50,10 @@ const startVue = () => { if ($.isMeta({ subtitles: 'enabled' }, [true])) { // 4: Subtitled, 5: Quality compactExtract[4] = node => $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'), - compactExtract[5] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[5] = node => $(node).attr('quality') } else { // 4: Quality - compactExtract[4] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[4] = node => $(node).attr('quality') } return compactExtract; })(), @@ -191,8 +192,9 @@ const startVue = () => { % endif % endif - ${hItem.quality} - ${renderQualityPill(hItem.quality)} + + ${renderQualityPill(hItem.quality)} + % endfor @@ -272,10 +274,8 @@ const startVue = () => { % endfor % endif - - % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - ${renderQualityPill(cur_action.quality, customTitle=statusStrings[cur_action.action])} - % endfor + + ${renderQualityPill(hItem.quality)} % endfor diff --git a/themes/dark/templates/history.mako b/themes/dark/templates/history.mako index d3ac56976a..dca24bfd3a 100644 --- a/themes/dark/templates/history.mako +++ b/themes/dark/templates/history.mako @@ -36,7 +36,8 @@ const startVue = () => { return { // 0: Time, 1: Episode, 2: Action, 3: Provider, 4: Quality 0: node => $(node).find('time').attr('datetime'), - 1: node => $(node).find('a').text() + 1: node => $(node).find('a').text(), + 4: node => $(node).attr('quality') }; } // 0: Time, 1: Episode, 2: Snatched, 3: Downloaded @@ -49,10 +50,10 @@ const startVue = () => { if ($.isMeta({ subtitles: 'enabled' }, [true])) { // 4: Subtitled, 5: Quality compactExtract[4] = node => $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'), - compactExtract[5] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[5] = node => $(node).attr('quality') } else { // 4: Quality - compactExtract[4] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[4] = node => $(node).attr('quality') } return compactExtract; })(), @@ -191,8 +192,9 @@ const startVue = () => { % endif % endif - ${hItem.quality} - ${renderQualityPill(hItem.quality)} + + ${renderQualityPill(hItem.quality)} + % endfor @@ -272,10 +274,8 @@ const startVue = () => { % endfor % endif - - % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - ${renderQualityPill(cur_action.quality, customTitle=statusStrings[cur_action.action])} - % endfor + + ${renderQualityPill(hItem.quality)} % endfor diff --git a/themes/light/templates/history.mako b/themes/light/templates/history.mako index d3ac56976a..dca24bfd3a 100644 --- a/themes/light/templates/history.mako +++ b/themes/light/templates/history.mako @@ -36,7 +36,8 @@ const startVue = () => { return { // 0: Time, 1: Episode, 2: Action, 3: Provider, 4: Quality 0: node => $(node).find('time').attr('datetime'), - 1: node => $(node).find('a').text() + 1: node => $(node).find('a').text(), + 4: node => $(node).attr('quality') }; } // 0: Time, 1: Episode, 2: Snatched, 3: Downloaded @@ -49,10 +50,10 @@ const startVue = () => { if ($.isMeta({ subtitles: 'enabled' }, [true])) { // 4: Subtitled, 5: Quality compactExtract[4] = node => $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'), - compactExtract[5] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[5] = node => $(node).attr('quality') } else { // 4: Quality - compactExtract[4] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[4] = node => $(node).attr('quality') } return compactExtract; })(), @@ -191,8 +192,9 @@ const startVue = () => { % endif % endif - ${hItem.quality} - ${renderQualityPill(hItem.quality)} + + ${renderQualityPill(hItem.quality)} + % endfor @@ -272,10 +274,8 @@ const startVue = () => { % endfor % endif - - % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - ${renderQualityPill(cur_action.quality, customTitle=statusStrings[cur_action.action])} - % endfor + + ${renderQualityPill(hItem.quality)} % endfor From aa8b4312029d820a78430b6e5d05177953565caa Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sat, 2 Jun 2018 07:05:56 +0300 Subject: [PATCH 85/93] Remove redundant quality from CompactItem --- medusa/show/history.py | 3 +-- themes-default/slim/views/history.mako | 4 ++-- themes/dark/templates/history.mako | 4 ++-- themes/light/templates/history.mako | 4 ++-- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/medusa/show/history.py b/medusa/show/history.py index 6906b700d7..4532eec2db 100644 --- a/medusa/show/history.py +++ b/medusa/show/history.py @@ -128,7 +128,7 @@ def _get_actions(action): Index = namedtuple('Index', index_fields) Index.width = len(index_fields) - compact_fields = ('show_name', 'index', 'actions', 'quality') + compact_fields = ('show_name', 'index', 'actions') # Related items compacted with a list of actions from history CompactItem = namedtuple('CompactItem', compact_fields) @@ -183,7 +183,6 @@ def compacted(self): self.show_name, self.index, [self.cur_action], # actions - self.quality, ) return result diff --git a/themes-default/slim/views/history.mako b/themes-default/slim/views/history.mako index dca24bfd3a..f0ce4fd056 100644 --- a/themes-default/slim/views/history.mako +++ b/themes-default/slim/views/history.mako @@ -274,8 +274,8 @@ const startVue = () => { % endfor % endif - - ${renderQualityPill(hItem.quality)} + + ${renderQualityPill(hItem.index.quality)} % endfor diff --git a/themes/dark/templates/history.mako b/themes/dark/templates/history.mako index dca24bfd3a..f0ce4fd056 100644 --- a/themes/dark/templates/history.mako +++ b/themes/dark/templates/history.mako @@ -274,8 +274,8 @@ const startVue = () => { % endfor % endif - - ${renderQualityPill(hItem.quality)} + + ${renderQualityPill(hItem.index.quality)} % endfor diff --git a/themes/light/templates/history.mako b/themes/light/templates/history.mako index dca24bfd3a..f0ce4fd056 100644 --- a/themes/light/templates/history.mako +++ b/themes/light/templates/history.mako @@ -274,8 +274,8 @@ const startVue = () => { % endfor % endif - - ${renderQualityPill(hItem.quality)} + + ${renderQualityPill(hItem.index.quality)} % endfor From 3f089aece6500c073c33dbf23ee7b3b7f33a05b7 Mon Sep 17 00:00:00 2001 From: sharkykh Date: Sun, 3 Jun 2018 03:22:24 +0300 Subject: [PATCH 86/93] Should fix N/A quality in displayShow, thanks @medariox --- themes-default/slim/static/js/ajax-episode-search.js | 6 +++++- themes/dark/assets/js/ajax-episode-search.js | 6 +++++- themes/dark/assets/js/ajax-episode-search.js.map | 2 +- themes/light/assets/js/ajax-episode-search.js | 6 +++++- themes/light/assets/js/ajax-episode-search.js.map | 2 +- 5 files changed, 17 insertions(+), 5 deletions(-) diff --git a/themes-default/slim/static/js/ajax-episode-search.js b/themes-default/slim/static/js/ajax-episode-search.js index 04d07b65d5..d671a31133 100644 --- a/themes-default/slim/static/js/ajax-episode-search.js +++ b/themes-default/slim/static/js/ajax-episode-search.js @@ -53,7 +53,11 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes + let qualityPill = ''; + if (ep.quality_style && ep.quality_style !== 'na') { + qualityPill = ' ' + ep.quality_name + ''; + } + htmlContent = ep.status + qualityPill; parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists diff --git a/themes/dark/assets/js/ajax-episode-search.js b/themes/dark/assets/js/ajax-episode-search.js index 04d07b65d5..d671a31133 100644 --- a/themes/dark/assets/js/ajax-episode-search.js +++ b/themes/dark/assets/js/ajax-episode-search.js @@ -53,7 +53,11 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes + let qualityPill = ''; + if (ep.quality_style && ep.quality_style !== 'na') { + qualityPill = ' ' + ep.quality_name + ''; + } + htmlContent = ep.status + qualityPill; parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists diff --git a/themes/dark/assets/js/ajax-episode-search.js.map b/themes/dark/assets/js/ajax-episode-search.js.map index 1430a6ab10..603fb96220 100644 --- a/themes/dark/assets/js/ajax-episode-search.js.map +++ b/themes/dark/assets/js/ajax-episode-search.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n let qualityPill = '';\n if (ep.quality_style && ep.quality_style !== 'na') {\n qualityPill = ' ' + ep.quality_name + '';\n }\n htmlContent = ep.status + qualityPill;\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file diff --git a/themes/light/assets/js/ajax-episode-search.js b/themes/light/assets/js/ajax-episode-search.js index 04d07b65d5..d671a31133 100644 --- a/themes/light/assets/js/ajax-episode-search.js +++ b/themes/light/assets/js/ajax-episode-search.js @@ -53,7 +53,11 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes + let qualityPill = ''; + if (ep.quality_style && ep.quality_style !== 'na') { + qualityPill = ' ' + ep.quality_name + ''; + } + htmlContent = ep.status + qualityPill; parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists diff --git a/themes/light/assets/js/ajax-episode-search.js.map b/themes/light/assets/js/ajax-episode-search.js.map index 1430a6ab10..603fb96220 100644 --- a/themes/light/assets/js/ajax-episode-search.js.map +++ b/themes/light/assets/js/ajax-episode-search.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n htmlContent = ep.status + ' ' + ep.quality_name + ''; // eslint-disable-line quotes\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n let qualityPill = '';\n if (ep.quality_style && ep.quality_style !== 'na') {\n qualityPill = ' ' + ep.quality_name + '';\n }\n htmlContent = ep.status + qualityPill;\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file From ed0da4670eb0b576203579183617e375e79f0979 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 3 Jun 2018 08:45:39 +0200 Subject: [PATCH 87/93] Remove superfluous check --- themes-default/slim/static/js/ajax-episode-search.js | 2 +- themes/dark/assets/js/ajax-episode-search.js | 2 +- themes/dark/assets/js/ajax-episode-search.js.map | 2 +- themes/light/assets/js/ajax-episode-search.js | 2 +- themes/light/assets/js/ajax-episode-search.js.map | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/themes-default/slim/static/js/ajax-episode-search.js b/themes-default/slim/static/js/ajax-episode-search.js index d671a31133..81ceaebb38 100644 --- a/themes-default/slim/static/js/ajax-episode-search.js +++ b/themes-default/slim/static/js/ajax-episode-search.js @@ -54,7 +54,7 @@ function updateImages(data) { // Update Status and Quality let qualityPill = ''; - if (ep.quality_style && ep.quality_style !== 'na') { + if (ep.quality_style !== 'na') { qualityPill = ' ' + ep.quality_name + ''; } htmlContent = ep.status + qualityPill; diff --git a/themes/dark/assets/js/ajax-episode-search.js b/themes/dark/assets/js/ajax-episode-search.js index d671a31133..81ceaebb38 100644 --- a/themes/dark/assets/js/ajax-episode-search.js +++ b/themes/dark/assets/js/ajax-episode-search.js @@ -54,7 +54,7 @@ function updateImages(data) { // Update Status and Quality let qualityPill = ''; - if (ep.quality_style && ep.quality_style !== 'na') { + if (ep.quality_style !== 'na') { qualityPill = ' ' + ep.quality_name + ''; } htmlContent = ep.status + qualityPill; diff --git a/themes/dark/assets/js/ajax-episode-search.js.map b/themes/dark/assets/js/ajax-episode-search.js.map index 603fb96220..45a524866d 100644 --- a/themes/dark/assets/js/ajax-episode-search.js.map +++ b/themes/dark/assets/js/ajax-episode-search.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n let qualityPill = '';\n if (ep.quality_style && ep.quality_style !== 'na') {\n qualityPill = ' ' + ep.quality_name + '';\n }\n htmlContent = ep.status + qualityPill;\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n let qualityPill = '';\n if (ep.quality_style !== 'na') {\n qualityPill = ' ' + ep.quality_name + '';\n }\n htmlContent = ep.status + qualityPill;\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file diff --git a/themes/light/assets/js/ajax-episode-search.js b/themes/light/assets/js/ajax-episode-search.js index d671a31133..81ceaebb38 100644 --- a/themes/light/assets/js/ajax-episode-search.js +++ b/themes/light/assets/js/ajax-episode-search.js @@ -54,7 +54,7 @@ function updateImages(data) { // Update Status and Quality let qualityPill = ''; - if (ep.quality_style && ep.quality_style !== 'na') { + if (ep.quality_style !== 'na') { qualityPill = ' ' + ep.quality_name + ''; } htmlContent = ep.status + qualityPill; diff --git a/themes/light/assets/js/ajax-episode-search.js.map b/themes/light/assets/js/ajax-episode-search.js.map index 603fb96220..45a524866d 100644 --- a/themes/light/assets/js/ajax-episode-search.js.map +++ b/themes/light/assets/js/ajax-episode-search.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n let qualityPill = '';\n if (ep.quality_style && ep.quality_style !== 'na') {\n qualityPill = ' ' + ep.quality_name + '';\n }\n htmlContent = ep.status + qualityPill;\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n let qualityPill = '';\n if (ep.quality_style !== 'na') {\n qualityPill = ' ' + ep.quality_name + '';\n }\n htmlContent = ep.status + qualityPill;\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file From 6e27c39982b9956549a8fc77e2edf9175004eaa0 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 3 Jun 2018 11:51:55 +0200 Subject: [PATCH 88/93] Fix Overview referenced but not imported. --- themes-default/slim/views/manage_episodeStatuses.mako | 2 +- themes/dark/templates/manage_episodeStatuses.mako | 2 +- themes/light/templates/manage_episodeStatuses.mako | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/themes-default/slim/views/manage_episodeStatuses.mako b/themes-default/slim/views/manage_episodeStatuses.mako index e94e9b2601..9e81397745 100644 --- a/themes-default/slim/views/manage_episodeStatuses.mako +++ b/themes-default/slim/views/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa.common import statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST + from medusa.common import Overview, statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> diff --git a/themes/dark/templates/manage_episodeStatuses.mako b/themes/dark/templates/manage_episodeStatuses.mako index e94e9b2601..9e81397745 100644 --- a/themes/dark/templates/manage_episodeStatuses.mako +++ b/themes/dark/templates/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa.common import statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST + from medusa.common import Overview, statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> diff --git a/themes/light/templates/manage_episodeStatuses.mako b/themes/light/templates/manage_episodeStatuses.mako index e94e9b2601..9e81397745 100644 --- a/themes/light/templates/manage_episodeStatuses.mako +++ b/themes/light/templates/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa.common import statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST + from medusa.common import Overview, statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> From ef10b06a45133e7372c003dd2f36381fcdb2cd60 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 3 Jun 2018 11:53:01 +0200 Subject: [PATCH 89/93] Fix allowed_qualities referenced before assigned. --- themes-default/slim/views/inc_defs.mako | 2 +- themes/dark/templates/inc_defs.mako | 2 +- themes/light/templates/inc_defs.mako | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/themes-default/slim/views/inc_defs.mako b/themes-default/slim/views/inc_defs.mako index 81d1fdbd1a..6383aae5b5 100644 --- a/themes-default/slim/views/inc_defs.mako +++ b/themes-default/slim/views/inc_defs.mako @@ -4,8 +4,8 @@ %> <%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute + allowed_qualities, preferred_qualities = Quality.split_quality(quality) if showTitle: - allowed_qualities, preferred_qualities = Quality.split_quality(quality) title = 'Allowed Quality:\n' if allowed_qualities: for curQual in allowed_qualities: diff --git a/themes/dark/templates/inc_defs.mako b/themes/dark/templates/inc_defs.mako index 81d1fdbd1a..6383aae5b5 100644 --- a/themes/dark/templates/inc_defs.mako +++ b/themes/dark/templates/inc_defs.mako @@ -4,8 +4,8 @@ %> <%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute + allowed_qualities, preferred_qualities = Quality.split_quality(quality) if showTitle: - allowed_qualities, preferred_qualities = Quality.split_quality(quality) title = 'Allowed Quality:\n' if allowed_qualities: for curQual in allowed_qualities: diff --git a/themes/light/templates/inc_defs.mako b/themes/light/templates/inc_defs.mako index 81d1fdbd1a..6383aae5b5 100644 --- a/themes/light/templates/inc_defs.mako +++ b/themes/light/templates/inc_defs.mako @@ -4,8 +4,8 @@ %> <%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute + allowed_qualities, preferred_qualities = Quality.split_quality(quality) if showTitle: - allowed_qualities, preferred_qualities = Quality.split_quality(quality) title = 'Allowed Quality:\n' if allowed_qualities: for curQual in allowed_qualities: From 2e1e7da77aaa0908149b72c6386dfc3152872c84 Mon Sep 17 00:00:00 2001 From: P0psicles Date: Sun, 3 Jun 2018 12:05:37 +0200 Subject: [PATCH 90/93] Cast whichStatus to int. --- themes-default/slim/views/manage_episodeStatuses.mako | 6 +++--- themes/dark/templates/manage_episodeStatuses.mako | 6 +++--- themes/light/templates/manage_episodeStatuses.mako | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/themes-default/slim/views/manage_episodeStatuses.mako b/themes-default/slim/views/manage_episodeStatuses.mako index 9e81397745..047abd5d6a 100644 --- a/themes-default/slim/views/manage_episodeStatuses.mako +++ b/themes-default/slim/views/manage_episodeStatuses.mako @@ -26,14 +26,14 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): % if whichStatus: -

None of your episodes have status ${statusStrings[whichStatus]}

+

None of your episodes have status ${statusStrings[int(whichStatus)]}


% endif Manage episodes with status @@ -42,7 +42,7 @@ Manage episodes with status -

Shows containing ${statusStrings[whichStatus]} episodes

+

Shows containing ${statusStrings[int(whichStatus)]} episodes


<% if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): diff --git a/themes/dark/templates/manage_episodeStatuses.mako b/themes/dark/templates/manage_episodeStatuses.mako index 9e81397745..047abd5d6a 100644 --- a/themes/dark/templates/manage_episodeStatuses.mako +++ b/themes/dark/templates/manage_episodeStatuses.mako @@ -26,14 +26,14 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): % if whichStatus: -

None of your episodes have status ${statusStrings[whichStatus]}

+

None of your episodes have status ${statusStrings[int(whichStatus)]}


% endif Manage episodes with status @@ -42,7 +42,7 @@ Manage episodes with status -

Shows containing ${statusStrings[whichStatus]} episodes

+

Shows containing ${statusStrings[int(whichStatus)]} episodes


<% if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): diff --git a/themes/light/templates/manage_episodeStatuses.mako b/themes/light/templates/manage_episodeStatuses.mako index 9e81397745..047abd5d6a 100644 --- a/themes/light/templates/manage_episodeStatuses.mako +++ b/themes/light/templates/manage_episodeStatuses.mako @@ -26,14 +26,14 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): % if whichStatus: -

None of your episodes have status ${statusStrings[whichStatus]}

+

None of your episodes have status ${statusStrings[int(whichStatus)]}


% endif Manage episodes with status @@ -42,7 +42,7 @@ Manage episodes with status -

Shows containing ${statusStrings[whichStatus]} episodes

+

Shows containing ${statusStrings[int(whichStatus)]} episodes


<% if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): From b519468f33ab2018ad6273e421c6973c038ee1e5 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 3 Jun 2018 12:32:30 +0200 Subject: [PATCH 91/93] Fix DB upgrade with older DBs, improve upgrade logging --- medusa/databases/cache_db.py | 9 +++++++++ medusa/databases/failed_db.py | 2 ++ medusa/databases/main_db.py | 28 +++++++++++++++++++++++++--- 3 files changed, 36 insertions(+), 3 deletions(-) diff --git a/medusa/databases/cache_db.py b/medusa/databases/cache_db.py index 0325ab3909..2048495dd7 100644 --- a/medusa/databases/cache_db.py +++ b/medusa/databases/cache_db.py @@ -2,8 +2,15 @@ from __future__ import unicode_literals +import logging + from medusa import db from medusa.databases import utils +from medusa.logger.adapters.style import BraceAdapter + + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) # Add new migrations at the bottom of the list @@ -175,6 +182,8 @@ def execute(self): self.clear_provider_tables() self.inc_major_version() + log.info('Updated to: {}.{}', *self.connection.version) + def clear_provider_tables(self): providers = self.connection.select( "SELECT name FROM sqlite_master WHERE type='table' AND name NOT IN ('lastUpdate'," diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index e5fa345b6a..b4df13d77a 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -176,6 +176,8 @@ def execute(self): self.update_status_unknown() self.inc_major_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + def shift_history_qualities(self): """ Shift all qualities << 1. diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index cdf0369619..f6c410b455 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -240,7 +240,7 @@ def execute(self): "CREATE TABLE info(last_backlog NUMERIC, last_indexer NUMERIC, last_proper_search NUMERIC);", "CREATE TABLE scene_numbering(indexer TEXT, indexer_id INTEGER, season INTEGER, episode INTEGER, scene_season INTEGER, scene_episode INTEGER, absolute_number NUMERIC, scene_absolute_number NUMERIC, PRIMARY KEY(indexer_id, season, episode));", "CREATE TABLE tv_shows(show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, default_ep_status NUMERIC DEFAULT -1);", - "CREATE TABLE tv_episodes(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, quality NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT);", + "CREATE TABLE tv_episodes(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT);", "CREATE TABLE blacklist (show_id INTEGER, range TEXT, keyword TEXT);", "CREATE TABLE whitelist (show_id INTEGER, range TEXT, keyword TEXT);", "CREATE TABLE xem_refresh (indexer TEXT, indexer_id INTEGER PRIMARY KEY, last_refreshed INTEGER);", @@ -335,7 +335,7 @@ def execute(self): class AddMinorVersion(AlterTVShowsFieldTypes): def test(self): - return self.checkDBVersion() >= 42 and self.hasColumn(b'db_version', b'db_minor_version') + return self.checkDBVersion() >= 43 and self.hasColumn(b'db_version', b'db_minor_version') def incDBVersion(self): warnings.warn("Deprecated: Use inc_major_version or inc_minor_version instead", DeprecationWarning) @@ -361,6 +361,7 @@ def execute(self): log.info(u'Add minor version numbers to database') self.addColumn(b'db_version', b'db_minor_version') + self.inc_major_version() self.inc_minor_version() log.info(u'Updated to: {}.{}', *self.connection.version) @@ -462,8 +463,11 @@ def execute(self): log.info(u'Adding column info_hash in history') if not self.hasColumn("history", "info_hash"): self.addColumn("history", "info_hash", 'TEXT', None) + self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddPlot(AddInfoHash): """Adds column plot to imdb_info table.""" @@ -484,8 +488,11 @@ def execute(self): log.info(u'Adding column plot in tv_show') if not self.hasColumn('tv_shows', 'plot'): self.addColumn('tv_shows', 'plot', 'TEXT', None) + self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddResourceSize(AddPlot): """Adds column size to history table.""" @@ -505,6 +512,8 @@ def execute(self): self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddPKIndexerMapping(AddResourceSize): """Add PK to mindexer column in indexer_mapping table.""" @@ -525,8 +534,11 @@ def execute(self): self.connection.action("DROP TABLE IF EXISTS indexer_mapping;") self.connection.action("ALTER TABLE new_indexer_mapping RENAME TO indexer_mapping;") self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") + self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddIndexerInteger(AddPKIndexerMapping): """Make indexer as INTEGER in tv_episodes table.""" @@ -544,7 +556,7 @@ def execute(self): "CREATE TABLE new_tv_episodes " "(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, " "season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, " - "status NUMERIC, quality NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, " + "status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, " "subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, " "scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, " "version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC);") @@ -552,8 +564,11 @@ def execute(self): self.connection.action("DROP TABLE IF EXISTS tv_episodes;") self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") self.connection.action("DROP TABLE IF EXISTS new_tv_episodoes;") + self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddIndexerIds(AddIndexerInteger): """ @@ -665,6 +680,9 @@ def create_series_dict(): [indexer_id, series_id]) self.inc_minor_version() + + log.info(u'Updated to: {}.{}', *self.connection.version) + # Flag the image migration. from medusa import app app.MIGRATE_IMAGES = True @@ -735,6 +753,8 @@ def execute(self): self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class ShiftQualities(AddSeparatedStatusQualityFields): """Shift all qualities one place to the left.""" @@ -751,6 +771,8 @@ def execute(self): self.shift_history_qualities() self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + def shift_tv_qualities(self): """ Shift all qualities << 1. From 2f39b8714e9c735f4632850715c03aae21064bc3 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 3 Jun 2018 12:49:51 +0200 Subject: [PATCH 92/93] Indent manage_episodeStatuses.mako --- .../slim/views/manage_episodeStatuses.mako | 114 +++++++++--------- 1 file changed, 57 insertions(+), 57 deletions(-) diff --git a/themes-default/slim/views/manage_episodeStatuses.mako b/themes-default/slim/views/manage_episodeStatuses.mako index 047abd5d6a..02bddbcfa0 100644 --- a/themes-default/slim/views/manage_episodeStatuses.mako +++ b/themes-default/slim/views/manage_episodeStatuses.mako @@ -25,65 +25,65 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): -% if whichStatus: -

None of your episodes have status ${statusStrings[int(whichStatus)]}

-
-% endif - -Manage episodes with status - - + % if whichStatus: +

None of your episodes have status ${statusStrings[int(whichStatus)]}

+
+ % endif +
+ Manage episodes with status + +
% else: -
- -

Shows containing ${statusStrings[int(whichStatus)]} episodes

-
-<% - if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): - row_class = "good" - else: - row_class = Overview.overviewStrings[int(whichStatus)] -%> - -Set checked shows/episodes to - -
- - -
-
- - % for cur_series in sorted_show_ids: - <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> - - - - + + +

Shows containing ${statusStrings[int(whichStatus)]} episodes

+
+ <% + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): + row_class = "good" + else: + row_class = Overview.overviewStrings[int(whichStatus)] + %> + + Set checked shows/episodes to -
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) -
-
+ + +
+ + +
+
+ + % for cur_series in sorted_show_ids: + <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> + + + + + % endfor + +
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) +
+ % endif
From ad44aefae896f034230086129b7c526d5b054740 Mon Sep 17 00:00:00 2001 From: Dario Date: Sun, 3 Jun 2018 12:53:58 +0200 Subject: [PATCH 93/93] Gulp sync --- .../templates/manage_episodeStatuses.mako | 114 +++++++++--------- .../templates/manage_episodeStatuses.mako | 114 +++++++++--------- 2 files changed, 114 insertions(+), 114 deletions(-) diff --git a/themes/dark/templates/manage_episodeStatuses.mako b/themes/dark/templates/manage_episodeStatuses.mako index 047abd5d6a..02bddbcfa0 100644 --- a/themes/dark/templates/manage_episodeStatuses.mako +++ b/themes/dark/templates/manage_episodeStatuses.mako @@ -25,65 +25,65 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): -% if whichStatus: -

None of your episodes have status ${statusStrings[int(whichStatus)]}

-
-% endif -
-Manage episodes with status - -
+ % if whichStatus: +

None of your episodes have status ${statusStrings[int(whichStatus)]}

+
+ % endif +
+ Manage episodes with status + +
% else: -
- -

Shows containing ${statusStrings[int(whichStatus)]} episodes

-
-<% - if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): - row_class = "good" - else: - row_class = Overview.overviewStrings[int(whichStatus)] -%> - -Set checked shows/episodes to - -
- - -
-
- - % for cur_series in sorted_show_ids: - <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> - - - - + + +

Shows containing ${statusStrings[int(whichStatus)]} episodes

+
+ <% + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): + row_class = "good" + else: + row_class = Overview.overviewStrings[int(whichStatus)] + %> + + Set checked shows/episodes to -
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) -
-
+ + +
+ + +
+
+ + % for cur_series in sorted_show_ids: + <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> + + + + + % endfor + +
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) +
+ % endif
diff --git a/themes/light/templates/manage_episodeStatuses.mako b/themes/light/templates/manage_episodeStatuses.mako index 047abd5d6a..02bddbcfa0 100644 --- a/themes/light/templates/manage_episodeStatuses.mako +++ b/themes/light/templates/manage_episodeStatuses.mako @@ -25,65 +25,65 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): -% if whichStatus: -

None of your episodes have status ${statusStrings[int(whichStatus)]}

-
-% endif -
-Manage episodes with status - -
+ % if whichStatus: +

None of your episodes have status ${statusStrings[int(whichStatus)]}

+
+ % endif +
+ Manage episodes with status + +
% else: -
- -

Shows containing ${statusStrings[int(whichStatus)]} episodes

-
-<% - if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): - row_class = "good" - else: - row_class = Overview.overviewStrings[int(whichStatus)] -%> - -Set checked shows/episodes to - -
- - -
-
- - % for cur_series in sorted_show_ids: - <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> - - - - + + +

Shows containing ${statusStrings[int(whichStatus)]} episodes

+
+ <% + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): + row_class = "good" + else: + row_class = Overview.overviewStrings[int(whichStatus)] + %> + + Set checked shows/episodes to -
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) -
-
+ + +
+ + +
+
+ + % for cur_series in sorted_show_ids: + <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> + + + + + % endfor + +
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) +
+ % endif