diff --git a/dredd/api-description.yml b/dredd/api-description.yml index 14211fec2d..4c92436c91 100644 --- a/dredd/api-description.yml +++ b/dredd/api-description.yml @@ -1275,7 +1275,7 @@ definitions: properties: enabled: type: boolean - dbFilename: + dbPath: type: string pythonVersion: type: string diff --git a/medusa/common.py b/medusa/common.py index a3aa3abe97..abdee6f63c 100644 --- a/medusa/common.py +++ b/medusa/common.py @@ -26,7 +26,6 @@ import uuid from builtins import object from builtins import str -from collections import namedtuple from functools import reduce from fake_useragent import UserAgent, settings as ua_settings @@ -90,18 +89,18 @@ NOTIFY_SNATCH_PROPER = 8 notifyStrings = { - NOTIFY_SNATCH: "Started Download", - NOTIFY_DOWNLOAD: "Download Finished", - NOTIFY_SUBTITLE_DOWNLOAD: "Subtitle Download Finished", - NOTIFY_GIT_UPDATE: "Medusa Updated", - NOTIFY_GIT_UPDATE_TEXT: "Medusa Updated To Commit#: ", - NOTIFY_LOGIN: "Medusa new login", - NOTIFY_LOGIN_TEXT: "New login from IP: {0}. http://geomaplookup.net/?ip={0}", - NOTIFY_SNATCH_PROPER: "Started PROPER Download" + NOTIFY_SNATCH: 'Started Download', + NOTIFY_DOWNLOAD: 'Download Finished', + NOTIFY_SUBTITLE_DOWNLOAD: 'Subtitle Download Finished', + NOTIFY_GIT_UPDATE: 'Medusa Updated', + NOTIFY_GIT_UPDATE_TEXT: 'Medusa Updated To Commit#: ', + NOTIFY_LOGIN: 'Medusa new login', + NOTIFY_LOGIN_TEXT: 'New login from IP: {0}. http://geomaplookup.net/?ip={0}', + NOTIFY_SNATCH_PROPER: 'Started Proper Download' } # Episode statuses -UNSET = -1 # should never happen +UNSET = -1 # default episode status UNAIRED = 1 # episodes that haven't aired yet SNATCHED = 2 # qualified with quality WANTED = 3 # episodes we don't have but want to get @@ -122,143 +121,123 @@ NAMING_LIMITED_EXTEND_E_PREFIXED = 32 MULTI_EP_STRINGS = { - NAMING_REPEAT: "Repeat", - NAMING_SEPARATED_REPEAT: "Repeat (Separated)", - NAMING_DUPLICATE: "Duplicate", - NAMING_EXTEND: "Extend", - NAMING_LIMITED_EXTEND: "Extend (Limited)", - NAMING_LIMITED_EXTEND_E_PREFIXED: "Extend (Limited, E-prefixed)" + NAMING_REPEAT: 'Repeat', + NAMING_SEPARATED_REPEAT: 'Repeat (Separated)', + NAMING_DUPLICATE: 'Duplicate', + NAMING_EXTEND: 'Extend', + NAMING_LIMITED_EXTEND: 'Extend (Limited)', + NAMING_LIMITED_EXTEND_E_PREFIXED: 'Extend (Limited, E-prefixed)' +} + + +statusStrings = { + ARCHIVED: 'Archived', + DOWNLOADED: 'Downloaded', + FAILED: 'Failed', + IGNORED: 'Ignored', + SKIPPED: 'Skipped', + SNATCHED: 'Snatched', + SNATCHED_BEST: 'Snatched (Best)', + SNATCHED_PROPER: 'Snatched (Proper)', + SUBTITLED: 'Subtitled', + UNAIRED: 'Unaired', + UNSET: 'Unset', + WANTED: 'Wanted' } class Quality(object): - """Determine quality and set status codes.""" - - NONE = 0 # 0 - SDTV = 1 # 1 - SDDVD = 1 << 1 # 2 - HDTV = 1 << 2 # 4 - RAWHDTV = 1 << 3 # 8 -- 720p/1080i mpeg2 (trollhd releases) - FULLHDTV = 1 << 4 # 16 -- 1080p HDTV (QCF releases) - HDWEBDL = 1 << 5 # 32 - FULLHDWEBDL = 1 << 6 # 64 -- 1080p web-dl - HDBLURAY = 1 << 7 # 128 - FULLHDBLURAY = 1 << 8 # 256 - UHD_4K_TV = 1 << 9 # 512 -- 2160p aka 4K UHD aka UHD-1 - UHD_4K_WEBDL = 1 << 10 # 1024 - UHD_4K_BLURAY = 1 << 11 # 2048 - UHD_8K_TV = 1 << 12 # 4096 -- 4320p aka 8K UHD aka UHD-2 - UHD_8K_WEBDL = 1 << 13 # 8192 - UHD_8K_BLURAY = 1 << 14 # 16384 - ANYHDTV = HDTV | FULLHDTV # 20 - ANYWEBDL = HDWEBDL | FULLHDWEBDL # 96 - ANYBLURAY = HDBLURAY | FULLHDBLURAY # 384 - - # put these bits at the other end of the spectrum, - # far enough out that they shouldn't interfere - UNKNOWN = 1 << 15 # 32768 + + NA = 0 # 0 + UNKNOWN = 1 # 1 + SDTV = 1 << 1 # 2 + SDDVD = 1 << 2 # 4 + HDTV = 1 << 3 # 8 + RAWHDTV = 1 << 4 # 16 -- 720p/1080i mpeg2 + FULLHDTV = 1 << 5 # 32 -- 1080p HDTV + HDWEBDL = 1 << 6 # 64 + FULLHDWEBDL = 1 << 7 # 128 -- 1080p web-dl + HDBLURAY = 1 << 8 # 256 + FULLHDBLURAY = 1 << 9 # 512 + UHD_4K_TV = 1 << 10 # 1024 -- 2160p aka 4K UHD aka UHD-1 + UHD_4K_WEBDL = 1 << 11 # 2048 + UHD_4K_BLURAY = 1 << 12 # 4096 + UHD_8K_TV = 1 << 13 # 8192 -- 4320p aka 8K UHD aka UHD-2 + UHD_8K_WEBDL = 1 << 14 # 16384 + UHD_8K_BLURAY = 1 << 15 # 32768 + ANYHDTV = HDTV | FULLHDTV # 40 + ANYWEBDL = HDWEBDL | FULLHDWEBDL # 192 + ANYBLURAY = HDBLURAY | FULLHDBLURAY # 768 qualityStrings = { - NONE: "N/A", - UNKNOWN: "Unknown", - SDTV: "SDTV", - SDDVD: "SD DVD", - HDTV: "720p HDTV", - RAWHDTV: "RawHD", - FULLHDTV: "1080p HDTV", - HDWEBDL: "720p WEB-DL", - FULLHDWEBDL: "1080p WEB-DL", - HDBLURAY: "720p BluRay", - FULLHDBLURAY: "1080p BluRay", - UHD_4K_TV: "4K UHD TV", - UHD_8K_TV: "8K UHD TV", - UHD_4K_WEBDL: "4K UHD WEB-DL", - UHD_8K_WEBDL: "8K UHD WEB-DL", - UHD_4K_BLURAY: "4K UHD BluRay", - UHD_8K_BLURAY: "8K UHD BluRay", + NA: 'N/A', + UNKNOWN: 'Unknown', + SDTV: 'SDTV', + SDDVD: 'SD DVD', + HDTV: '720p HDTV', + RAWHDTV: 'RawHD', + FULLHDTV: '1080p HDTV', + HDWEBDL: '720p WEB-DL', + FULLHDWEBDL: '1080p WEB-DL', + HDBLURAY: '720p BluRay', + FULLHDBLURAY: '1080p BluRay', + UHD_4K_TV: '4K UHD TV', + UHD_8K_TV: '8K UHD TV', + UHD_4K_WEBDL: '4K UHD WEB-DL', + UHD_8K_WEBDL: '8K UHD WEB-DL', + UHD_4K_BLURAY: '4K UHD BluRay', + UHD_8K_BLURAY: '8K UHD BluRay', } sceneQualityStrings = { - NONE: "N/A", - UNKNOWN: "Unknown", - SDTV: "", - SDDVD: "", - HDTV: "720p", - RAWHDTV: "1080i", - FULLHDTV: "1080p", - HDWEBDL: "720p", - FULLHDWEBDL: "1080p", - HDBLURAY: "720p BluRay", - FULLHDBLURAY: "1080p BluRay", - UHD_4K_TV: "2160p", - UHD_8K_TV: "4320p", - UHD_4K_WEBDL: "2160p", - UHD_8K_WEBDL: "4320p", - UHD_4K_BLURAY: "2160p BluRay", - UHD_8K_BLURAY: "4320p BluRay", + NA: 'N/A', + UNKNOWN: 'Unknown', + SDTV: '', + SDDVD: '', + HDTV: '720p', + RAWHDTV: '1080i', + FULLHDTV: '1080p', + HDWEBDL: '720p', + FULLHDWEBDL: '1080p', + HDBLURAY: '720p BluRay', + FULLHDBLURAY: '1080p BluRay', + UHD_4K_TV: '2160p', + UHD_8K_TV: '4320p', + UHD_4K_WEBDL: '2160p', + UHD_8K_WEBDL: '4320p', + UHD_4K_BLURAY: '2160p BluRay', + UHD_8K_BLURAY: '4320p BluRay', } combinedQualityStrings = { - ANYHDTV: "HDTV", - ANYWEBDL: "WEB-DL", - ANYBLURAY: "BluRay" + ANYHDTV: 'HDTV', + ANYWEBDL: 'WEB-DL', + ANYBLURAY: 'BluRay' } cssClassStrings = { - NONE: "N/A", - UNKNOWN: "Unknown", - SDTV: "SDTV", - SDDVD: "SDDVD", - HDTV: "HD720p", - RAWHDTV: "RawHD", - FULLHDTV: "HD1080p", - HDWEBDL: "HD720p", - FULLHDWEBDL: "HD1080p", - HDBLURAY: "HD720p", - FULLHDBLURAY: "HD1080p", - UHD_4K_TV: "UHD-4K", - UHD_8K_TV: "UHD-8K", - UHD_4K_WEBDL: "UHD-4K", - UHD_8K_WEBDL: "UHD-8K", - UHD_4K_BLURAY: "UHD-4K", - UHD_8K_BLURAY: "UHD-8K", - ANYHDTV: "any-hd", - ANYWEBDL: "any-hd", - ANYBLURAY: "any-hd" - } - - statusPrefixes = { - UNSET: "Unset", - UNAIRED: "Unaired", - WANTED: "Wanted", - SKIPPED: "Skipped", - IGNORED: "Ignored", - SUBTITLED: "Subtitled", - DOWNLOADED: "Downloaded", - SNATCHED: "Snatched", - SNATCHED_PROPER: "Snatched (Proper)", - FAILED: "Failed", - SNATCHED_BEST: "Snatched (Best)", - ARCHIVED: "Archived" + NA: 'na', + UNKNOWN: 'Unknown', + SDTV: 'SDTV', + SDDVD: 'SDDVD', + HDTV: 'HD720p', + RAWHDTV: 'RawHD', + FULLHDTV: 'HD1080p', + HDWEBDL: 'HD720p', + FULLHDWEBDL: 'HD1080p', + HDBLURAY: 'HD720p', + FULLHDBLURAY: 'HD1080p', + UHD_4K_TV: 'UHD-4K', + UHD_8K_TV: 'UHD-8K', + UHD_4K_WEBDL: 'UHD-4K', + UHD_8K_WEBDL: 'UHD-8K', + UHD_4K_BLURAY: 'UHD-4K', + UHD_8K_BLURAY: 'UHD-8K', + ANYHDTV: 'any-hd', + ANYWEBDL: 'any-hd', + ANYBLURAY: 'any-hd' } - @staticmethod - def _get_status_strings(status): - """ - Return string values associated with Status prefix. - - :param status: Status prefix to resolve - :return: Human readable status value - """ - to_return = {} - for quality in Quality.qualityStrings: - if quality is not None: - stat = Quality.statusPrefixes[status] - qual = Quality.qualityStrings[quality] - comp = Quality.composite_status(status, quality) - to_return[comp] = '%s (%s)' % (stat, qual) - return to_return - @staticmethod def combine_qualities(allowed_qualities, preferred_qualities): any_quality = 0 @@ -271,13 +250,9 @@ def combine_qualities(allowed_qualities, preferred_qualities): @staticmethod def split_quality(quality): - if quality is None: - quality = Quality.NONE allowed_qualities = [] preferred_qualities = [] for cur_qual in Quality.qualityStrings: - if cur_qual is None: - cur_qual = Quality.NONE if cur_qual & quality: allowed_qualities.append(cur_qual) if cur_qual << 16 & quality: @@ -288,14 +263,14 @@ def split_quality(quality): @staticmethod def name_quality(name, anime=False, extend=True): """ - Return The quality from an episode File renamed by the application. + Return the quality from an episode filename. - If no quality is achieved it will try scene_quality regex + If no quality is achieved it will try scene_quality regex. :param name: to parse :param anime: Boolean to indicate if the show we're resolving is Anime :param extend: boolean to extend methods to try - :return: Quality prefix + :return: Quality """ # Try Scene names first quality = Quality.scene_quality(name, anime) @@ -311,7 +286,7 @@ def name_quality(name, anime=False, extend=True): @staticmethod def scene_quality(name, anime=False): """ - Return The quality from the Scene episode File. + Return the quality from the episode filename with the regex. :param name: Episode filename to analyse :param anime: Boolean to indicate if the show we're resolving is Anime @@ -447,8 +422,8 @@ def quality_from_file_meta(file_path): # TODO: Use knowledge information like 'resolution' base_filename = os.path.basename(file_path) - bluray = re.search(r"blue?-?ray|hddvd|b[rd](rip|mux)", base_filename, re.I) is not None - webdl = re.search(r"web.?dl|web(rip|mux|hd)", base_filename, re.I) is not None + bluray = re.search(r'blue?-?ray|hddvd|b[rd](rip|mux)', base_filename, re.I) is not None + webdl = re.search(r'web.?dl|web(rip|mux|hd)', base_filename, re.I) is not None ret = Quality.UNKNOWN if 3240 < height: @@ -464,36 +439,6 @@ def quality_from_file_meta(file_path): return ret - composite_status_quality = namedtuple('composite_status', ['status', 'quality']) - - @staticmethod - def composite_status(status, quality): - if quality is None: - quality = Quality.NONE - return status + 100 * quality - - @staticmethod - def quality_downloaded(status): - return (status - DOWNLOADED) // 100 - - @staticmethod - def split_composite_status(status): - """ - Split a composite status code into a status and quality. - - :param status: to split - :returns: a namedtuple containing (status, quality) - """ - status = int(status) - if status == UNSET: - return Quality.composite_status_quality(UNSET, Quality.UNKNOWN) - - for q in sorted(list(Quality.qualityStrings), reverse=True): - if status > q * 100: - return Quality.composite_status_quality(status - q * 100, q) - - return Quality.composite_status_quality(status, Quality.NONE) - @staticmethod def scene_quality_from_name(name, quality): """ @@ -526,7 +471,8 @@ def scene_quality_from_name(name, quality): codec = ' DivX' # If any HDTV type or SDTV - if quality in (1, 4, 8, 16, 512, 4096): + if quality in (Quality.SDTV, Quality.HDTV, Quality.RAWHDTV, Quality.FULLHDTV, + Quality.UHD_4K_TV, Quality.UHD_8K_TV): rel_type = ' HDTV' if 'ahdtv' in name: rel_type = ' AHDTV' @@ -541,8 +487,7 @@ def scene_quality_from_name(name, quality): elif 'uhdtv' in name: rel_type = ' UHDTV' - # If SDDVD - if quality == 2: + if quality == Quality.SDDVD: rel_type = ' BDRip' if re.search(r'br(-| |\.)?(rip|mux)', name): rel_type = ' BRRip' @@ -550,7 +495,8 @@ def scene_quality_from_name(name, quality): rel_type = ' DVDRip' # If any WEB type - if quality in (32, 64, 1024, 8192): + if quality in (Quality.HDWEBDL, Quality.FULLHDWEBDL, Quality.UHD_4K_WEBDL, + Quality.UHD_8K_WEBDL): rel_type = ' WEB' if re.search(r'web(-| |\.)?dl', name): rel_type = ' WEB-DL' @@ -560,63 +506,15 @@ def scene_quality_from_name(name, quality): return rel_type + codec @staticmethod - def status_from_name(name, anime=False): - """ - Get a status object from filename. - - :param name: Filename to check - :param anime: boolean to enable anime parsing - :return: Composite status/quality object - """ - quality = Quality.name_quality(name, anime) - return Quality.composite_status(DOWNLOADED, quality) - - guessit_map = { - '720p': { - 'HDTV': HDTV, - 'WEB-DL': HDWEBDL, - 'WEBRip': HDWEBDL, - 'BluRay': HDBLURAY, - }, - '1080i': RAWHDTV, - '1080p': { - 'HDTV': FULLHDTV, - 'WEB-DL': FULLHDWEBDL, - 'WEBRip': FULLHDWEBDL, - 'BluRay': FULLHDBLURAY - }, - '4K': { - 'HDTV': UHD_4K_TV, - 'WEB-DL': UHD_4K_WEBDL, - 'WEBRip': UHD_4K_WEBDL, - 'BluRay': UHD_4K_BLURAY - } - } - - to_guessit_format_list = [ - ANYHDTV, ANYWEBDL, ANYBLURAY, ANYHDTV | UHD_4K_TV, ANYWEBDL | UHD_4K_WEBDL, ANYBLURAY | UHD_4K_BLURAY - ] - - to_guessit_screen_size_map = { - HDTV | HDWEBDL | HDBLURAY: '720p', - RAWHDTV: '1080i', - FULLHDTV | FULLHDWEBDL | FULLHDBLURAY: '1080p', - UHD_4K_TV | UHD_4K_WEBDL | UHD_4K_BLURAY: '4K', - } - - @staticmethod - def should_search(status, show_obj, manually_searched): + def should_search(cur_status, cur_quality, show_obj, manually_searched): """Return true if that episodes should be search for a better quality. - If cur_quality is Quality.NONE, it will return True as its a invalid quality - If cur_quality is Quality.UNKNOWN it will return True only if is not in Allowed (Unknown can be in Allowed) - - :param status: current status of the episode + :param cur_status: current status of the episode + :param cur_quality: current quality of the episode :param show_obj: Series object of the episode we will check if we should search or not :param manually_searched: if episode was manually searched by user :return: True if need to run a search for given episode """ - cur_status, cur_quality = Quality.split_composite_status(int(status) or UNSET) allowed_qualities, preferred_qualities = show_obj.current_qualities # When user manually searched, we should consider this as final quality. @@ -650,30 +548,24 @@ def should_replace(ep_status, old_quality, new_quality, allowed_qualities, prefe """Return true if the old quality should be replaced with new quality. If not preferred qualities, then any downloaded quality is final - if preferred quality, then new quality should be higher than existing one AND not be in preferred - If new quality is already in preferred then is already final quality. + If preferred quality, then new quality should be higher than existing one AND not be in preferred + If new quality is already in preferred then is already final quality Force (forced search) bypass episode status only or unknown quality - If old quality is Quality.NONE, it will be replaced :param ep_status: current status of the episode :param old_quality: current quality of the episode - :param new_quality: quality of the episode we found it and check if we should snatch it + :param new_quality: quality of the episode we found :param allowed_qualities: List of selected allowed qualities of the show we are checking :param preferred_qualities: List of selected preferred qualities of the show we are checking :param download_current_quality: True if user wants the same existing quality to be snatched :param force: True if user did a forced search for that episode - :param manually_searched: True if episode was manually searched by user + :param manually_searched: True if episode was manually searched :param search_type: The search type, that started this method - :return: True if the old quality should be replaced with new quality. + :return: True if the old quality should be replaced with new quality """ - if ep_status and ep_status not in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER: + if ep_status and ep_status not in (DOWNLOADED, SNATCHED, SNATCHED_PROPER): if not force: - return False, 'Episode status is not DOWNLOADED|SNATCHED|SNATCHED PROPER. Ignoring new quality' - - # If existing quality is UNKNOWN but Preferred is set, UNKNOWN should be replaced. - if old_quality == Quality.UNKNOWN: - if not (force or preferred_qualities): - return False, 'Existing quality is UNKNOWN. Ignoring new quality' + return False, 'Episode status is not Downloaded, Snatched or Snatched Proper. Ignoring new quality' if manually_searched: if not force: @@ -693,17 +585,15 @@ def should_replace(ep_status, old_quality, new_quality, allowed_qualities, prefe # If old quality is no longer wanted quality and new quality is wanted, we should replace. return True, 'Existing quality is no longer in any wanted quality lists. Accepting new quality' - if force and download_current_quality: + if download_current_quality and force and new_quality == old_quality: # If we already downloaded quality, just redownload it as long is still part of the wanted qualities - return new_quality == old_quality, 'Redownloading same quality' + return True, 'Re-downloading same quality' if preferred_qualities: # Don't replace because old quality is already best quality. if old_quality in preferred_qualities: return False, 'Existing quality is already a preferred quality. Ignoring new quality' - # Old quality is not final. Check if we should replace: - # Replace if preferred quality if new_quality in preferred_qualities: return True, 'New quality is preferred. Accepting new quality' @@ -729,6 +619,44 @@ def wanted_quality(new_quality, allowed_qualities, preferred_qualities): """Check if new quality is wanted.""" return new_quality in allowed_qualities + preferred_qualities + # Map guessit screen sizes and formats to our Quality values + guessit_map = { + '720p': { + 'HDTV': HDTV, + 'WEB-DL': HDWEBDL, + 'WEBRip': HDWEBDL, + 'BluRay': HDBLURAY, + }, + '1080i': RAWHDTV, + '1080p': { + 'HDTV': FULLHDTV, + 'WEB-DL': FULLHDWEBDL, + 'WEBRip': FULLHDWEBDL, + 'BluRay': FULLHDBLURAY + }, + '4K': { + 'HDTV': UHD_4K_TV, + 'WEB-DL': UHD_4K_WEBDL, + 'WEBRip': UHD_4K_WEBDL, + 'BluRay': UHD_4K_BLURAY + } + } + + # Consolidate the guessit-supported screen sizes of each format + to_guessit_format_list = [ + ANYHDTV | UHD_4K_TV, + ANYWEBDL | UHD_4K_WEBDL, + ANYBLURAY | UHD_4K_BLURAY + ] + + # Consolidate the formats of each guessit-supported screen size + to_guessit_screen_size_map = { + HDTV | HDWEBDL | HDBLURAY: '720p', + RAWHDTV: '1080i', + FULLHDTV | FULLHDWEBDL | FULLHDBLURAY: '1080p', + UHD_4K_TV | UHD_4K_WEBDL | UHD_4K_BLURAY: '4K', + } + @staticmethod def from_guessit(guess): """ @@ -759,15 +687,17 @@ def from_guessit(guess): return quality if quality is not None else Quality.UNKNOWN @staticmethod - def to_guessit(status): - """Return a guessit dict containing 'screen_size and format' from a Quality (composite status). + def to_guessit(quality): + """Return a guessit dict containing 'screen_size and format' from a Quality. - :param status: a quality composite status - :type status: int + :param quality: a quality + :type quality: int :return: dict {'screen_size': , 'format': } :rtype: dict (str, str) """ - _, quality = Quality.split_composite_status(status) + if quality not in Quality.qualityStrings: + quality = Quality.UNKNOWN + screen_size = Quality.to_guessit_screen_size(quality) fmt = Quality.to_guessit_format(quality) result = dict() @@ -787,9 +717,11 @@ def to_guessit_format(quality): :return: guessit format :rtype: str """ - for q in Quality.to_guessit_format_list: - if quality & q: - key = q & (512 - 1) # 4k formats are bigger than 384 and are not part of ANY* bit set + for quality_set in Quality.to_guessit_format_list: + if quality_set & quality: # If quality_set contains quality + # Remove all 4K (and above) formats as they are bigger than Quality.ANYBLURAY, + # and they are not part of an "ANY*" bit set + key = quality_set & (Quality.UHD_4K_TV - 1) return Quality.combinedQualityStrings.get(key) @staticmethod @@ -805,28 +737,6 @@ def to_guessit_screen_size(quality): if quality & key: return value - DOWNLOADED = None - SNATCHED = None - SNATCHED_PROPER = None - FAILED = None - SNATCHED_BEST = None - ARCHIVED = None - - -Quality.DOWNLOADED = [Quality.composite_status(DOWNLOADED, x) for x in Quality.qualityStrings if x is not None] -Quality.SNATCHED = [Quality.composite_status(SNATCHED, x) for x in Quality.qualityStrings if x is not None] -Quality.SNATCHED_BEST = [Quality.composite_status(SNATCHED_BEST, x) for x in Quality.qualityStrings if x is not None] -Quality.SNATCHED_PROPER = [Quality.composite_status(SNATCHED_PROPER, x) for x in Quality.qualityStrings if x is not None] -Quality.FAILED = [Quality.composite_status(FAILED, x) for x in Quality.qualityStrings if x is not None] -Quality.ARCHIVED = [Quality.composite_status(ARCHIVED, x) for x in Quality.qualityStrings if x is not None] -Quality.WANTED = [Quality.composite_status(WANTED, x) for x in Quality.qualityStrings if x is not None] - -Quality.DOWNLOADED.sort() -Quality.SNATCHED.sort() -Quality.SNATCHED_BEST.sort() -Quality.SNATCHED_PROPER.sort() -Quality.FAILED.sort() -Quality.ARCHIVED.sort() HD720p = Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL, Quality.HDBLURAY], []) HD1080p = Quality.combine_qualities([Quality.FULLHDTV, Quality.FULLHDWEBDL, Quality.FULLHDBLURAY], []) @@ -838,9 +748,6 @@ def to_guessit_screen_size(quality): UHD = Quality.combine_qualities([UHD_4K, UHD_8K], []) ANY = Quality.combine_qualities([SD, HD, UHD], []) -# legacy template, cant remove due to reference in main_db upgrade? -BEST = Quality.combine_qualities([Quality.SDTV, Quality.HDTV, Quality.HDWEBDL], [Quality.HDTV]) - qualityPresets = ( ANY, SD, @@ -849,70 +756,17 @@ def to_guessit_screen_size(quality): ) qualityPresetStrings = { - SD: "SD", - HD: "HD", - HD720p: "HD720p", - HD1080p: "HD1080p", - UHD: "UHD", - UHD_4K: "UHD-4K", - UHD_8K: "UHD-8K", - ANY: "Any", + SD: 'SD', + HD: 'HD', + HD720p: 'HD720p', + HD1080p: 'HD1080p', + UHD: 'UHD', + UHD_4K: 'UHD-4K', + UHD_8K: 'UHD-8K', + ANY: 'Any', } -class StatusStrings(dict): - """Dictionary containing strings for status codes.""" - - # todo: Make views return Qualities too - statuses = list(Quality.statusPrefixes) - qualities = list(Quality.qualityStrings) - - def __missing__(self, key): - """ - If the key is not found try to determine a status from Quality. - - :param key: A numeric key - :raise KeyError: if the key is invalid and can't be determined from Quality - """ - try: - key = int(key) - except (TypeError, ValueError): - raise ValueError(key) - - current = Quality.split_composite_status(key) - if current.quality in self.qualities: - return '{status} ({quality})'.format( - status=self[current.status], - quality=Quality.qualityStrings[current.quality] - ) - else: # the key wasn't found in qualities either - raise KeyError(key) # ... so the key is invalid - - def __contains__(self, key): - try: - key = int(key) - return key in self.statuses or key in self.qualities - except (TypeError, ValueError): - raise ValueError(key) - - -# Assign strings to statuses -statusStrings = StatusStrings({ - UNSET: "Unset", - UNAIRED: "Unaired", - SNATCHED: "Snatched", - DOWNLOADED: "Downloaded", - SKIPPED: "Skipped", - SNATCHED_PROPER: "Snatched (Proper)", - WANTED: "Wanted", - ARCHIVED: "Archived", - IGNORED: "Ignored", - SUBTITLED: "Subtitled", - FAILED: "Failed", - SNATCHED_BEST: "Snatched (Best)" -}) - - class Overview(object): UNAIRED = UNAIRED # 1 SNATCHED = SNATCHED # 2 @@ -926,16 +780,16 @@ class Overview(object): QUAL = 50 overviewStrings = { - SKIPPED: "skipped", - WANTED: "wanted", - QUAL: "qual", - GOOD: "good", - UNAIRED: "unaired", - SNATCHED: "snatched", + SKIPPED: 'skipped', + WANTED: 'wanted', + QUAL: 'qual', + GOOD: 'good', + UNAIRED: 'unaired', + SNATCHED: 'snatched', # we can give these a different class later, otherwise # breaks checkboxes in displayShow for showing different statuses - SNATCHED_BEST: "snatched", - SNATCHED_PROPER: "snatched" + SNATCHED_BEST: 'snatched', + SNATCHED_PROPER: 'snatched' } diff --git a/medusa/databases/cache_db.py b/medusa/databases/cache_db.py index 48d0dc0fce..2048495dd7 100644 --- a/medusa/databases/cache_db.py +++ b/medusa/databases/cache_db.py @@ -2,7 +2,15 @@ from __future__ import unicode_literals +import logging + from medusa import db +from medusa.databases import utils +from medusa.logger.adapters.style import BraceAdapter + + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) # Add new migrations at the bottom of the list @@ -15,7 +23,8 @@ def execute(self): queries = [ ("CREATE TABLE lastUpdate (provider TEXT, time NUMERIC);",), ("CREATE TABLE lastSearch (provider TEXT, time NUMERIC);",), - ("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER, show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);",), + ("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER," + " show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);",), ("CREATE TABLE scene_names (indexer_id INTEGER, name TEXT);",), ("CREATE TABLE network_timezones (network_name TEXT PRIMARY KEY, timezone TEXT);",), ("CREATE TABLE scene_exceptions_refresh (list TEXT PRIMARY KEY, last_refreshed INTEGER);",), @@ -94,8 +103,10 @@ def test(self): def execute(self): self.connection.action("DROP TABLE IF EXISTS tmp_scene_exceptions;") self.connection.action("ALTER TABLE scene_exceptions RENAME TO tmp_scene_exceptions;") - self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER, show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);") - self.connection.action("INSERT INTO scene_exceptions SELECT exception_id, tvdb_id as indexer_id, show_name, season, custom FROM tmp_scene_exceptions;") + self.connection.action("CREATE TABLE scene_exceptions (exception_id INTEGER PRIMARY KEY, indexer_id INTEGER," + " show_name TEXT, season NUMERIC DEFAULT -1, custom NUMERIC DEFAULT 0);") + self.connection.action("INSERT INTO scene_exceptions SELECT exception_id, tvdb_id as indexer_id, show_name," + " season, custom FROM tmp_scene_exceptions;") self.connection.action("DROP TABLE tmp_scene_exceptions;") @@ -155,4 +166,35 @@ def execute(self): self.addColumn('scene_names', 'indexer', 'NUMERIC', -1) # clean up null values from the scene_exceptions_table - self.connection.action("DELETE FROM scene_exceptions WHERE indexer = '' or indexer is null;") + self.connection.action("DELETE FROM scene_exceptions WHERE indexer = '' OR indexer IS NULL;") + + +class ClearProviderTables(AddIndexerIds): + """Clear provider cache items by deleting their tables.""" + + def test(self): + """Test if the version is at least 2.""" + return self.connection.version >= (2, None) + + def execute(self): + utils.backup_database(self.connection.path, self.connection.version) + + self.clear_provider_tables() + self.inc_major_version() + + log.info('Updated to: {}.{}', *self.connection.version) + + def clear_provider_tables(self): + providers = self.connection.select( + "SELECT name FROM sqlite_master WHERE type='table' AND name NOT IN ('lastUpdate'," + " 'lastSearch', 'scene_names', 'network_timezones', 'scene_exceptions_refresh'," + " 'db_version', 'scene_exceptions', 'last_update');") + + for provider in providers: + self.connection.action("DELETE FROM '{name}';".format(name=provider[b'name'])) + + def inc_major_version(self): + major_version, minor_version = self.connection.version + major_version += 1 + self.connection.action("UPDATE db_version SET db_version = ?;", [major_version]) + return self.connection.version diff --git a/medusa/databases/failed_db.py b/medusa/databases/failed_db.py index 32c4140697..b4df13d77a 100644 --- a/medusa/databases/failed_db.py +++ b/medusa/databases/failed_db.py @@ -6,6 +6,7 @@ from medusa import db from medusa.common import Quality +from medusa.databases import utils from medusa.logger.adapters.style import BraceAdapter @@ -23,8 +24,8 @@ def execute(self): queries = [ ('CREATE TABLE failed (release TEXT, size NUMERIC, provider TEXT);',), ('CREATE TABLE history (date NUMERIC, size NUMERIC, release TEXT, provider TEXT,' - ' old_status NUMERIC DEFAULT 0, showid NUMERIC DEFAULT -1, season NUMERIC DEFAULT -1,' - ' episode NUMERIC DEFAULT -1);',), + ' status NUMERIC DEFAULT -1, quality NUMERIC DEFAULT 0, showid NUMERIC DEFAULT -1,' + ' season NUMERIC DEFAULT -1, episode NUMERIC DEFAULT -1);',), ('CREATE TABLE db_version (db_version INTEGER);',), ('INSERT INTO db_version (db_version) VALUES (1);',), ] @@ -59,11 +60,11 @@ class HistoryStatus(History): """Store episode status before snatch to revert to if necessary.""" def test(self): - return self.hasColumn('history', 'old_status') + return self.hasColumn('history', 'showid') def execute(self): - self.addColumn('history', 'old_status', 'NUMERIC', Quality.NONE) self.addColumn('history', 'showid', 'NUMERIC', '-1') + self.addColumn('history', 'old_status', 'NUMERIC', Quality.NA) self.addColumn('history', 'season', 'NUMERIC', '-1') self.addColumn('history', 'episode', 'NUMERIC', '-1') @@ -93,7 +94,7 @@ def execute(self): if series[b'indexer_id'] not in series_dict: series_dict[series[b'indexer_id']] = series[b'indexer'] - query = 'SELECT showid FROM history WHERE indexer_id is null' + query = 'SELECT showid FROM history WHERE indexer_id IS NULL' results = self.connection.select(query) if not results: return @@ -111,3 +112,89 @@ def execute(self): self.connection.action( 'UPDATE history SET indexer_id = ? WHERE showid = ?', [indexer_id, series_id] ) + + +class UpdateHistoryTableQuality(AddIndexerIds): + """Add the quality field and separate status from quality.""" + + def test(self): + """Test if the table history already has the column quality.""" + return self.hasColumn('history', 'quality') + + def execute(self): + utils.backup_database(self.connection.path, self.connection.version) + + self.translate_status() + self.inc_major_version() + + def translate_status(self): + """ + Add columns status and quality. + + Translate composite status/quality from old_status to the new fields. + """ + log.info(u'Transforming old_status (composite) to separated fields status + quality.') + self.connection.action('DROP TABLE IF EXISTS new_history;') + + self.connection.action('CREATE TABLE new_history (date NUMERIC, size NUMERIC, release TEXT, provider TEXT, ' + 'status NUMERIC DEFAULT -1, quality NUMERIC DEFAULT 0, showid NUMERIC DEFAULT -1, ' + 'season NUMERIC DEFAULT -1, episode NUMERIC DEFAULT -1, indexer_id NUMERIC)') + + self.connection.action('INSERT INTO new_history (date, size, release, provider, ' + 'status, quality, showid, season, episode, indexer_id) ' + 'SELECT date, size, release, provider, ' + 'old_status, -1, showid, season, episode, indexer_id ' + 'FROM history;') + self.connection.action('DROP TABLE IF EXISTS history;') + self.connection.action('ALTER TABLE new_history RENAME TO history;') + self.connection.action('DROP TABLE IF EXISTS new_history;') + + sql_results = self.connection.select('SELECT status FROM history GROUP BY status;') + for result in sql_results: + status, quality = utils.split_composite_status(result[b'status']) + self.connection.action('UPDATE history SET status = ?, quality = ? WHERE status = ?;', + [status, quality, result[b'status']]) + + def inc_major_version(self): + major_version, minor_version = self.connection.version + major_version += 1 + self.connection.action("UPDATE db_version SET db_version = ?;", [major_version]) + return self.connection.version + + +class ShiftQualities(UpdateHistoryTableQuality): + """Shift all qualities one place to the left.""" + + def test(self): + """Test if the version is at least 3.""" + return self.connection.version >= (3, None) + + def execute(self): + utils.backup_database(self.connection.path, self.connection.version) + + self.shift_history_qualities() + self.update_status_unknown() + self.inc_major_version() + + log.info(u'Updated to: {}.{}', *self.connection.version) + + def shift_history_qualities(self): + """ + Shift all qualities << 1. + + This makes it possible to set UNKNOWN as 1, making it the lowest quality. + """ + log.info('Shift qualities in history one place to the left.') + sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality ORDER BY quality DESC;") + for result in sql_results: + quality = result[b'quality'] + new_quality = quality << 1 + self.connection.action( + "UPDATE history SET quality = ? WHERE quality = ?;", + [new_quality, quality] + ) + + def update_status_unknown(self): + """Change any `UNKNOWN` quality to 1.""" + log.info(u'Update status UNKONWN from tv_episodes') + self.connection.action("UPDATE history SET quality = 1 WHERE quality = 65536;") diff --git a/medusa/databases/main_db.py b/medusa/databases/main_db.py index aa2660a857..f6c410b455 100644 --- a/medusa/databases/main_db.py +++ b/medusa/databases/main_db.py @@ -4,12 +4,12 @@ import datetime import logging -import os.path import sys import warnings -from medusa import common, db, helpers, subtitles -from medusa.helper.common import dateTimeFormat, episode_num +from medusa import common, db, subtitles +from medusa.databases import utils +from medusa.helper.common import dateTimeFormat from medusa.indexers.indexer_config import STATUS_MAP from medusa.logger.adapters.style import BraceAdapter from medusa.name_parser.parser import NameParser @@ -23,7 +23,7 @@ MAX_DB_VERSION = 44 # Used to check when checking for updates -CURRENT_MINOR_DB_VERSION = 9 +CURRENT_MINOR_DB_VERSION = 11 class MainSanityCheck(db.DBSanityCheck): @@ -37,13 +37,12 @@ def check(self): self.fix_invalid_airdates() # self.fix_subtitles_codes() self.fix_show_nfo_lang() - self.convert_archived_to_compound() self.fix_subtitle_reference() self.clean_null_indexer_mappings() def clean_null_indexer_mappings(self): log.debug(u'Checking for null indexer mappings') - query = "SELECT * from indexer_mapping where mindexer_id = ''" + query = "SELECT * from indexer_mapping WHERE mindexer_id = ''" sql_results = self.connection.select(query) if sql_results: @@ -54,9 +53,9 @@ def clean_null_indexer_mappings(self): def update_old_propers(self): # This is called once when we create proper_tags columns log.debug(u'Checking for old propers without proper tags') - query = "SELECT resource FROM history WHERE (proper_tags is null or proper_tags is '') " + \ + query = "SELECT resource FROM history WHERE (proper_tags IS NULL OR proper_tags = '') " + \ "AND (action LIKE '%2' OR action LIKE '%9') AND " + \ - "(resource LIKE '%REPACK%' or resource LIKE '%PROPER%' or resource LIKE '%REAL%')" + "(resource LIKE '%REPACK%' OR resource LIKE '%PROPER%' OR resource LIKE '%REAL%')" sql_results = self.connection.select(query) if sql_results: for sql_result in sql_results: @@ -87,40 +86,6 @@ def fix_subtitle_reference(self): "WHERE episode_id = %i" % (sql_result[b'episode_id']) ) - def convert_archived_to_compound(self): - log.debug(u'Checking for archived episodes not qualified') - - query = "SELECT episode_id, showid, e.status, e.location, season, episode, anime " + \ - "FROM tv_episodes e, tv_shows s WHERE e.status = %s AND e.showid = s.indexer_id" % common.ARCHIVED - - sql_results = self.connection.select(query) - if sql_results: - log.warning(u'Found {0} shows with bare archived status, ' - u'attempting automatic conversion...', - len(sql_results)) - - for archivedEp in sql_results: - fixedStatus = common.Quality.composite_status(common.ARCHIVED, common.Quality.UNKNOWN) - existing = archivedEp[b'location'] and os.path.exists(archivedEp[b'location']) - if existing: - quality = common.Quality.name_quality(archivedEp[b'location'], archivedEp[b'anime'], extend=False) - fixedStatus = common.Quality.composite_status(common.ARCHIVED, quality) - - log.info( - u'Changing status from {old_status} to {new_status} for' - u' {id}: {ep} at {location} (File {result})', - {'old_status': common.statusStrings[common.ARCHIVED], - 'new_status': common.statusStrings[fixedStatus], - 'id': archivedEp[b'showid'], - 'ep': episode_num(archivedEp[b'season'], - archivedEp[b'episode']), - 'location': archivedEp[b'location'] or 'unknown location', - 'result': 'EXISTS' if existing else 'NOT FOUND', } - ) - - self.connection.action("UPDATE tv_episodes SET status = %i WHERE episode_id = %i" % - (fixedStatus, archivedEp[b'episode_id'])) - def fix_duplicate_episodes(self): sql_results = self.connection.select( @@ -146,7 +111,9 @@ def fix_duplicate_episodes(self): def fix_orphan_episodes(self): sql_results = self.connection.select( - "SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id WHERE tv_shows.indexer_id is NULL") + "SELECT episode_id, showid, tv_shows.indexer_id FROM tv_episodes" + " LEFT JOIN tv_shows ON tv_episodes.showid=tv_shows.indexer_id" + " WHERE tv_shows.indexer_id IS NULL;") for cur_orphan in sql_results: log.debug(u'Orphan episode detected! episode_id: {0!s}' @@ -158,37 +125,32 @@ def fix_orphan_episodes(self): def fix_missing_table_indexes(self): if not self.connection.select("PRAGMA index_info('idx_tv_episodes_showid_airdate')"): - log.info(u'Missing idx_tv_episodes_showid_airdate for TV Episodes' - u' table detected!, fixing...') + log.info(u'Missing idx_tv_episodes_showid_airdate for TV Episodes table detected, fixing...') self.connection.action("CREATE INDEX idx_tv_episodes_showid_airdate ON tv_episodes(showid, airdate);") if not self.connection.select("PRAGMA index_info('idx_showid')"): - log.info(u'Missing idx_showid for TV Episodes table detected!,' - u' fixing...') + log.info(u'Missing idx_showid for TV Episodes table detected, fixing...') self.connection.action("CREATE INDEX idx_showid ON tv_episodes (showid);") if not self.connection.select("PRAGMA index_info('idx_status')"): - log.info(u'Missing idx_status for TV Episodes table detected!,' - u' fixing...') - self.connection.action("CREATE INDEX idx_status ON tv_episodes (status, season, episode, airdate)") + log.info(u'Missing idx_status for TV Episodes table detected, fixing...') + self.connection.action("CREATE INDEX idx_status ON tv_episodes (status, quality, season, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_air')"): - log.info(u'Missing idx_sta_epi_air for TV Episodes table' - u' detected!, fixing...') - self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status, episode, airdate)") + log.info(u'Missing idx_sta_epi_air for TV Episodes table detected, fixing...') + self.connection.action("CREATE INDEX idx_sta_epi_air ON tv_episodes (status, quality, episode, airdate)") if not self.connection.select("PRAGMA index_info('idx_sta_epi_sta_air')"): - log.info(u'Missing idx_sta_epi_sta_air for TV Episodes table' - u' detected!, fixing...') - self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, airdate)") + log.info(u'Missing idx_sta_epi_sta_air for TV Episodes table detected, fixing...') + self.connection.action("CREATE INDEX idx_sta_epi_sta_air ON tv_episodes (season, episode, status, quality, airdate)") def fix_unaired_episodes(self): - curDate = datetime.date.today() + cur_date = datetime.date.today() sql_results = self.connection.select( - "SELECT episode_id FROM tv_episodes WHERE (airdate > ? or airdate = 1) AND status in (?,?) AND season > 0", - [curDate.toordinal(), common.SKIPPED, common.WANTED]) + "SELECT episode_id FROM tv_episodes WHERE (airdate > ? OR airdate = 1) AND status in (?, ?) AND season > 0", + [cur_date.toordinal(), common.SKIPPED, common.WANTED]) for cur_unaired in sql_results: log.info(u'Fixing unaired episode status for episode_id: {0!s}', @@ -257,16 +219,7 @@ def fix_subtitles_codes(self): [','.join(langs), datetime.datetime.now().strftime(dateTimeFormat), sql_result[b'episode_id']]) def fix_show_nfo_lang(self): - self.connection.action("UPDATE tv_shows SET lang = '' WHERE lang = 0 or lang = '0'") - - -def backupDatabase(version): - log.info(u'Backing up database before upgrade') - if not helpers.backup_versioned_file(db.dbFilename(), version): - log.error(u'Database backup failed, abort upgrading database') - sys.exit(1) - else: - log.info(u'Proceeding with upgrade') + self.connection.action("UPDATE tv_shows SET lang = '' WHERE lang = 0 OR lang = '0';") # ====================== @@ -333,7 +286,7 @@ def test(self): return self.checkDBVersion() >= 40 def execute(self): - backupDatabase(self.checkDBVersion()) + utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Adding column version to tv_episodes and history') self.addColumn("tv_episodes", "version", "NUMERIC", "-1") @@ -348,7 +301,7 @@ def test(self): return self.checkDBVersion() >= 41 def execute(self): - backupDatabase(self.checkDBVersion()) + utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Adding column default_ep_status to tv_shows') self.addColumn("tv_shows", "default_ep_status", "NUMERIC", "-1") @@ -361,12 +314,19 @@ def test(self): return self.checkDBVersion() >= 42 def execute(self): - backupDatabase(self.checkDBVersion()) + utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Converting column indexer and default_ep_status field types to numeric') self.connection.action("DROP TABLE IF EXISTS tmp_tv_shows") self.connection.action("ALTER TABLE tv_shows RENAME TO tmp_tv_shows") - self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC, indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT, classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT, flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC, lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT, last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC, rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC, scene NUMERIC, default_ep_status NUMERIC)") + self.connection.action("CREATE TABLE tv_shows (show_id INTEGER PRIMARY KEY, indexer_id NUMERIC," + " indexer NUMERIC, show_name TEXT, location TEXT, network TEXT, genre TEXT," + " classification TEXT, runtime NUMERIC, quality NUMERIC, airs TEXT, status TEXT," + " flatten_folders NUMERIC, paused NUMERIC, startyear NUMERIC, air_by_date NUMERIC," + " lang TEXT, subtitles NUMERIC, notify_list TEXT, imdb_id TEXT," + " last_update_indexer NUMERIC, dvdorder NUMERIC, archive_firstmatch NUMERIC," + " rls_require_words TEXT, rls_ignore_words TEXT, sports NUMERIC, anime NUMERIC," + " scene NUMERIC, default_ep_status NUMERIC)") self.connection.action("INSERT INTO tv_shows SELECT * FROM tmp_tv_shows") self.connection.action("DROP TABLE tmp_tv_shows") @@ -375,7 +335,7 @@ def execute(self): class AddMinorVersion(AlterTVShowsFieldTypes): def test(self): - return self.checkDBVersion() >= 42 and self.hasColumn(b'db_version', b'db_minor_version') + return self.checkDBVersion() >= 43 and self.hasColumn(b'db_version', b'db_minor_version') def incDBVersion(self): warnings.warn("Deprecated: Use inc_major_version or inc_minor_version instead", DeprecationWarning) @@ -384,21 +344,24 @@ def inc_major_version(self): major_version, minor_version = self.connection.version major_version += 1 minor_version = 0 - self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?", [major_version, minor_version]) + self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?;", + [major_version, minor_version]) return self.connection.version def inc_minor_version(self): major_version, minor_version = self.connection.version minor_version += 1 - self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?", [major_version, minor_version]) + self.connection.action("UPDATE db_version SET db_version = ?, db_minor_version = ?;", + [major_version, minor_version]) return self.connection.version def execute(self): - backupDatabase(self.checkDBVersion()) + utils.backup_database(self.connection.path, self.checkDBVersion()) log.info(u'Add minor version numbers to database') self.addColumn(b'db_version', b'db_minor_version') + self.inc_major_version() self.inc_minor_version() log.info(u'Updated to: {}.{}', *self.connection.version) @@ -421,7 +384,7 @@ def execute(self): """ Updates the version until 44.1 """ - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Test major and minor version updates database') self.inc_major_version() @@ -443,7 +406,7 @@ def execute(self): """ Updates the version until 44.2 and adds proper_tags column """ - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) if not self.hasColumn('history', 'proper_tags'): log.info(u'Adding column proper_tags to history') @@ -469,7 +432,7 @@ def execute(self): """ Updates the version until 44.3 and adds manually_searched columns """ - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) if not self.hasColumn('history', 'manually_searched'): log.info(u'Adding column manually_searched to history') @@ -495,13 +458,16 @@ def test(self): return self.connection.version >= (44, 4) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column info_hash in history') if not self.hasColumn("history", "info_hash"): self.addColumn("history", "info_hash", 'TEXT', None) + self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddPlot(AddInfoHash): """Adds column plot to imdb_info table.""" @@ -513,7 +479,7 @@ def test(self): return self.connection.version >= (44, 5) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column plot in imdb_info') if not self.hasColumn('imdb_info', 'plot'): @@ -522,8 +488,11 @@ def execute(self): log.info(u'Adding column plot in tv_show') if not self.hasColumn('tv_shows', 'plot'): self.addColumn('tv_shows', 'plot', 'TEXT', None) + self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddResourceSize(AddPlot): """Adds column size to history table.""" @@ -535,7 +504,7 @@ def test(self): return self.connection.version >= (44, 6) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u"Adding column size in history") if not self.hasColumn("history", "size"): @@ -543,6 +512,8 @@ def execute(self): self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddPKIndexerMapping(AddResourceSize): """Add PK to mindexer column in indexer_mapping table.""" @@ -552,7 +523,7 @@ def test(self): return self.connection.version >= (44, 7) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding PK to mindexer column in indexer_mapping table') self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") @@ -563,8 +534,11 @@ def execute(self): self.connection.action("DROP TABLE IF EXISTS indexer_mapping;") self.connection.action("ALTER TABLE new_indexer_mapping RENAME TO indexer_mapping;") self.connection.action("DROP TABLE IF EXISTS new_indexer_mapping;") + self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddIndexerInteger(AddPKIndexerMapping): """Make indexer as INTEGER in tv_episodes table.""" @@ -574,24 +548,27 @@ def test(self): return self.connection.version >= (44, 8) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Make indexer and indexer_id as INTEGER in tv_episodes table') self.connection.action("DROP TABLE IF EXISTS new_tv_episodes;") - self.connection.action("CREATE TABLE new_tv_episodes(episode_id INTEGER PRIMARY KEY, showid NUMERIC," - "indexerid INTEGER, indexer INTEGER, name TEXT, season NUMERIC, episode NUMERIC," - "description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, status NUMERIC," - "location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT," - "subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP," - "is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC," - "absolute_number NUMERIC, scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1," - "release_group TEXT, manually_searched NUMERIC);") + self.connection.action( + "CREATE TABLE new_tv_episodes " + "(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, name TEXT, " + "season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, hastbn NUMERIC, " + "status NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, subtitles TEXT, " + "subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, is_proper NUMERIC, " + "scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, scene_absolute_number NUMERIC, " + "version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC);") self.connection.action("INSERT INTO new_tv_episodes SELECT * FROM tv_episodes;") self.connection.action("DROP TABLE IF EXISTS tv_episodes;") self.connection.action("ALTER TABLE new_tv_episodes RENAME TO tv_episodes;") self.connection.action("DROP TABLE IF EXISTS new_tv_episodoes;") + self.inc_minor_version() + log.info(u'Updated to: {}.{}', *self.connection.version) + class AddIndexerIds(AddIndexerInteger): """ @@ -608,7 +585,7 @@ def test(self): return self.connection.version >= (44, 9) def execute(self): - backupDatabase(self.connection.version) + utils.backup_database(self.connection.path, self.connection.version) log.info(u'Adding column indexer_id in history') if not self.hasColumn('history', 'indexer_id'): @@ -683,7 +660,7 @@ def create_series_dict(): migration_config[0], migration_config[1] ) - query = 'SELECT {config[1]} FROM {config[0]} WHERE {config[2]} is null'.format(config=migration_config) + query = 'SELECT {config[1]} FROM {config[0]} WHERE {config[2]} IS NULL'.format(config=migration_config) results = self.connection.select(query) if not results: continue @@ -703,6 +680,163 @@ def create_series_dict(): [indexer_id, series_id]) self.inc_minor_version() + + log.info(u'Updated to: {}.{}', *self.connection.version) + # Flag the image migration. from medusa import app app.MIGRATE_IMAGES = True + + +class AddSeparatedStatusQualityFields(AddIndexerIds): + """Add new separated status and quality fields.""" + + def test(self): + """Test if the version is at least 44.10""" + return self.connection.version >= (44, 10) + + def execute(self): + utils.backup_database(self.connection.path, self.connection.version) + + log.info(u'Adding new quality field in the tv_episodes table') + self.connection.action('DROP TABLE IF EXISTS old_tv_episodes;') + self.connection.action('ALTER TABLE tv_episodes RENAME TO old_tv_episodes;') + + self.connection.action( + 'CREATE TABLE IF NOT EXISTS tv_episodes ' + '(episode_id INTEGER PRIMARY KEY, showid NUMERIC, indexerid INTEGER, indexer INTEGER, ' + 'name TEXT, season NUMERIC, episode NUMERIC, description TEXT, airdate NUMERIC, hasnfo NUMERIC, ' + 'hastbn NUMERIC, status NUMERIC, quality NUMERIC, location TEXT, file_size NUMERIC, release_name TEXT, ' + 'subtitles TEXT, subtitles_searchcount NUMERIC, subtitles_lastsearch TIMESTAMP, ' + 'is_proper NUMERIC, scene_season NUMERIC, scene_episode NUMERIC, absolute_number NUMERIC, ' + 'scene_absolute_number NUMERIC, version NUMERIC DEFAULT -1, release_group TEXT, manually_searched NUMERIC);' + ) + + # Re-insert old values, setting the new quality column to the invalid value of -1 + self.connection.action( + 'INSERT INTO tv_episodes ' + '(showid, indexerid, indexer, name, season, episode, description, airdate, hasnfo, ' + 'hastbn, status, quality, location, file_size, release_name, subtitles, subtitles_searchcount, ' + 'subtitles_lastsearch, is_proper, scene_season, scene_episode, absolute_number, scene_absolute_number, ' + 'version, release_group, manually_searched) ' + 'SELECT showid, indexerid, indexer, ' + 'name, season, episode, description, airdate, hasnfo, ' + 'hastbn, status, -1 AS quality, location, file_size, release_name, ' + 'subtitles, subtitles_searchcount, subtitles_lastsearch, ' + 'is_proper, scene_season, scene_episode, absolute_number, ' + 'scene_absolute_number, version, release_group, manually_searched ' + 'FROM old_tv_episodes;' + ) + + # We have all that we need, drop the old table + for index in ['idx_sta_epi_air', 'idx_sta_epi_sta_air', 'idx_status']: + log.info(u'Dropping the index on {0}', index) + self.connection.action('DROP INDEX IF EXISTS {index};'.format(index=index)) + self.connection.action('DROP TABLE IF EXISTS old_tv_episodes;') + + log.info(u'Splitting the composite status into status and quality') + sql_results = self.connection.select('SELECT status from tv_episodes GROUP BY status;') + for episode in sql_results: + composite_status = episode[b'status'] + status, quality = utils.split_composite_status(composite_status) + self.connection.action('UPDATE tv_episodes SET status = ?, quality = ? WHERE status = ?;', + [status, quality, composite_status]) + + # Update `history` table: Remove the quality value from `action` + log.info(u'Removing the quality from the action field, as this is a composite status') + sql_results = self.connection.select("SELECT action FROM history GROUP BY action;") + for item in sql_results: + composite_action = item[b'action'] + status, quality = utils.split_composite_status(composite_action) + self.connection.action('UPDATE history SET action = ? WHERE action = ?;', + [status, composite_action]) + + self.inc_minor_version() + + log.info(u'Updated to: {}.{}', *self.connection.version) + + +class ShiftQualities(AddSeparatedStatusQualityFields): + """Shift all qualities one place to the left.""" + + def test(self): + """Test if the version is at least 44.11""" + return self.connection.version >= (44, 11) + + def execute(self): + utils.backup_database(self.connection.path, self.connection.version) + + self.shift_tv_qualities() + self.shift_episode_qualities() + self.shift_history_qualities() + self.inc_minor_version() + + log.info(u'Updated to: {}.{}', *self.connection.version) + + def shift_tv_qualities(self): + """ + Shift all qualities << 1. + + This makes it possible to set UNKNOWN as 1, making it the lowest quality. + """ + log.info('Shift qualities in tv_shows one place to the left.') + sql_results = self.connection.select("SELECT quality FROM tv_shows GROUP BY quality ORDER BY quality DESC;") + for result in sql_results: + quality = result[b'quality'] + new_quality = quality << 1 + + # UNKNOWN quality value is 65536 (1 << 16) instead of 32768 (1 << 15) after the shift + # Qualities in the tv_shows table have the combined values of allowed and preferred qualities. + # Preferred quality couldn't contain UNKNOWN + if new_quality & 65536 > 0: # If contains UNKNOWN allowed quality + new_quality -= 65536 # Remove it + new_quality |= common.Quality.UNKNOWN # Then re-add it using the correct value + + self.connection.action( + "UPDATE tv_shows SET quality = ? WHERE quality = ?;", + [new_quality, quality] + ) + + def shift_episode_qualities(self): + """ + Shift all qualities << 1. + + This makes it possible to set UNKNOWN as 1, making it the lowest quality. + """ + log.info('Shift qualities in tv_episodes one place to the left.') + sql_results = self.connection.select("SELECT quality FROM tv_episodes WHERE quality != 0 GROUP BY quality" + " ORDER BY quality DESC;") + for result in sql_results: + quality = result[b'quality'] + new_quality = quality << 1 + + if quality == 32768: # Old UNKNOWN quality (1 << 15) + new_quality = common.Quality.UNKNOWN + else: + new_quality = quality << 1 + + self.connection.action( + "UPDATE tv_episodes SET quality = ? WHERE quality = ?;", + [new_quality, quality] + ) + + def shift_history_qualities(self): + """ + Shift all qualities << 1. + + This makes it possible to set UNKNOWN as 1, making it the lowest quality. + """ + log.info('Shift qualities in history one place to the left.') + sql_results = self.connection.select("SELECT quality FROM history GROUP BY quality ORDER BY quality DESC;") + for result in sql_results: + quality = result[b'quality'] + + if quality == 32768: # Old UNKNOWN quality (1 << 15) + new_quality = common.Quality.UNKNOWN + else: + new_quality = quality << 1 + + self.connection.action( + "UPDATE history SET quality = ? WHERE quality = ?;", + [new_quality, quality] + ) diff --git a/medusa/databases/utils.py b/medusa/databases/utils.py new file mode 100644 index 0000000000..37ba0a5c26 --- /dev/null +++ b/medusa/databases/utils.py @@ -0,0 +1,71 @@ +# coding=utf-8 + +"""General database utility functions.""" +from __future__ import unicode_literals + +import logging +import sys + +from medusa import helpers +from medusa.logger.adapters.style import BraceAdapter + +from six import itervalues + + +log = BraceAdapter(logging.getLogger(__name__)) +log.logger.addHandler(logging.NullHandler()) + + +def backup_database(path, version): + """Back up the database.""" + log.info('Backing up database before upgrade') + if not helpers.backup_versioned_file(path, version): + log.error('Database backup failed, abort upgrading database') + sys.exit(1) + else: + log.info('Proceeding with upgrade') + + +def split_composite_status(status): + """ + Split an old composite status code into a status and quality. + + Used by the following migrations: + * main_db.py / AddSeparatedStatusQualityFields + * failed_db.py / UpdateHistoryTableQuality + + Note: Uses the old quality codes, where UNKNOWN = (1 << 15) = 32768 + + :param status: to split + :returns: a tuple containing (status, quality) + """ + status_unset = -1 + qualities = { + 'NONE': 0, + 'SDTV': 1, + 'SDDVD': 2, + 'HDTV': 4, + 'RAWHDTV': 8, + 'FULLHDTV': 16, + 'HDWEBDL': 32, + 'FULLHDWEBDL': 64, + 'HDBLURAY': 128, + 'FULLHDBLURAY': 256, + 'UHD_4K_TV': 512, + 'UHD_4K_WEBDL': 1024, + 'UHD_4K_BLURAY': 2048, + 'UHD_8K_TV': 4096, + 'UHD_8K_WEBDL': 8192, + 'UHD_8K_BLURAY': 16384, + 'UNKNOWN': 32768 + } + + status = int(status) + if status == status_unset: + return (status_unset, qualities['NONE']) + + for q in sorted(itervalues(qualities), reverse=True): + if status > q * 100: + return (status - q * 100, q) + + return (status, qualities['UNKNOWN']) diff --git a/medusa/db.py b/medusa/db.py index a64e6f273d..5b12f8e1ef 100644 --- a/medusa/db.py +++ b/medusa/db.py @@ -1,6 +1,5 @@ # coding=utf-8 # Author: Nic Wolfe - # # This file is part of Medusa. # @@ -34,24 +33,11 @@ from six import itervalues, text_type + db_cons = {} db_locks = {} -def dbFilename(filename=None, suffix=None): - """ - @param filename: The sqlite database filename to use. If not specified, - will be made to be application db file - @param suffix: The suffix to append to the filename. A '.' will be added - automatically, i.e. suffix='v0' will make dbfile.db.v0 - @return: the correct location of the database file. - """ - filename = filename or app.APPLICATION_DB - if suffix: - filename = "%s.%s" % (filename, suffix) - return os.path.join(app.DATA_DIR, filename) - - class DBConnection(object): def __init__(self, filename=None, suffix=None, row_type=None): @@ -63,7 +49,7 @@ def __init__(self, filename=None, suffix=None, row_type=None): if self.filename not in db_cons or not db_cons[self.filename]: db_locks[self.filename] = threading.Lock() - self.connection = sqlite3.connect(dbFilename(self.filename, self.suffix), 20, check_same_thread=False) + self.connection = sqlite3.connect(self.path, 20, check_same_thread=False) self.connection.text_factory = DBConnection._unicode_text_factory db_cons[self.filename] = self.connection @@ -80,11 +66,27 @@ def __init__(self, filename=None, suffix=None, row_type=None): self._set_row_factory() except sqlite3.OperationalError: - logger.log(u'Please check your database owner/permissions: {}'.format(dbFilename(self.filename, self.suffix)), logger.WARNING) + logger.log(u'Please check your database owner/permissions: {}'.format( + self.path, logger.WARNING)) except Exception as e: logger.log(u"DB error: " + ex(e), logger.ERROR) raise + @property + def path(self): + """ + @param filename: The sqlite database filename to use. If not specified, + will be made to be application db file + @param suffix: The suffix to append to the filename. A '.' will be added + automatically, i.e. suffix='v0' will make dbfile.db.v0 + @return: the path to the database file. + """ + filename = self.filename + if self.suffix: + filename = '%s.%s' % (filename, self.suffix) + + return os.path.join(app.DATA_DIR, filename) + def _set_row_factory(self): """ once lock is aquired we can configure the connection for @@ -454,22 +456,6 @@ def prettyName(class_name): return ' '.join([x.group() for x in re.finditer("([A-Z])([a-z0-9]+)", class_name)]) -def restoreDatabase(version): - """ - Restores a database to a previous version (backup file of version must still exist) - - :param version: Version to restore to - :return: True if restore succeeds, False if it fails - """ - from medusa import helpers - logger.log(u"Restoring database before trying upgrade again") - if not helpers.restore_versioned_file(dbFilename(suffix='v' + str(version)), version): - logger.log_error_and_exit(u"Database restore failed, abort upgrading database") - return False - else: - return True - - def _processUpgrade(connection, upgradeClass): instance = upgradeClass(connection) logger.log(u"Checking " + prettyName(upgradeClass.__name__) + " database upgrade", logger.DEBUG) diff --git a/medusa/failed_history.py b/medusa/failed_history.py index 72fe9349ed..5b5a04fd61 100644 --- a/medusa/failed_history.py +++ b/medusa/failed_history.py @@ -24,7 +24,7 @@ from datetime import datetime, timedelta from medusa import db, logger -from medusa.common import FAILED, Quality, WANTED, statusStrings +from medusa.common import FAILED, WANTED, statusStrings from medusa.helper.common import episode_num from medusa.helper.exceptions import EpisodeNotFoundException from medusa.show.history import History @@ -145,7 +145,7 @@ def revert_episode(ep_obj): """Restore the episodes of a failed download to their original state.""" failed_db_con = db.DBConnection('failed.db') sql_results = failed_db_con.select( - 'SELECT episode, old_status ' + 'SELECT episode, status, quality ' 'FROM history ' 'WHERE showid=?' ' AND indexer_id=?' @@ -160,7 +160,7 @@ def revert_episode(ep_obj): (show=ep_obj.series.name, ep=episode_num(ep_obj.season, ep_obj.episode))) with ep_obj.lock: if ep_obj.episode in history_eps: - ep_obj.status = history_eps[ep_obj.episode]['old_status'] + ep_obj.status = history_eps[ep_obj.episode]['status'] logger.log(u'Episode have a previous status to revert. Setting it back to {0}'.format (statusStrings[ep_obj.status]), logger.DEBUG) else: @@ -187,8 +187,7 @@ def mark_failed(ep_obj): try: with ep_obj.lock: - quality = Quality.split_composite_status(ep_obj.status)[1] - ep_obj.status = Quality.composite_status(FAILED, quality) + ep_obj.status = FAILED ep_obj.save_to_db() except EpisodeNotFoundException as error: @@ -221,10 +220,10 @@ def log_snatch(search_result): failed_db_con.action( 'INSERT INTO history ' '(date, size, release, provider, showid,' - ' season, episode, old_status, indexer_id)' - 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)', + ' season, episode, status, quality, indexer_id)' + 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', [log_date, search_result.size, release, provider, show_obj.indexerid, - episode.season, episode.episode, episode.status, show_obj.indexer] + episode.season, episode.episode, episode.status, episode.quality, show_obj.indexer] ) diff --git a/medusa/helpers/__init__.py b/medusa/helpers/__init__.py index e5377da438..7f3b6c74a1 100644 --- a/medusa/helpers/__init__.py +++ b/medusa/helpers/__init__.py @@ -43,7 +43,7 @@ from imdbpie import imdbpie from medusa import app, db -from medusa.common import USER_AGENT +from medusa.common import DOWNLOADED, USER_AGENT from medusa.helper.common import (episode_num, http_code_description, media_extensions, pretty_file_size, subtitle_extensions) from medusa.helpers.utils import generate @@ -918,67 +918,6 @@ def backup_versioned_file(old_file, version): return True -def restore_versioned_file(backup_file, version): - """Restore a file version to original state. - - For example sickbeard.db.v41 passed with version int(41), will translate back to sickbeard.db. - sickbeard.db.v41. passed with version tuple(41,2), will translate back to sickbeard.db. - - :param backup_file: File to restore - :param version: Version of file to restore - :return: True on success, False on failure - """ - num_tries = 0 - - with suppress(TypeError): - version = '.'.join([str(i) for i in version]) if not isinstance(version, str) else version - - new_file, _ = backup_file[0:backup_file.find(u'v{version}'.format(version=version))] - restore_file = backup_file - - if not os.path.isfile(new_file): - log.debug(u"Not restoring, {file} doesn't exist", {'file': new_file}) - return False - - try: - log.debug(u'Trying to backup {file} to {file}.r{version} before ' - u'restoring backup', {'file': new_file, 'version': version}) - - shutil.move(new_file, new_file + '.' + 'r' + str(version)) - except OSError as error: - log.warning(u'Error while trying to backup DB file {name} before' - u' proceeding with restore: {error!r}', - {'name': restore_file, 'error': error}) - return False - - while not os.path.isfile(new_file): - if not os.path.isfile(restore_file): - log.debug(u'Not restoring, {file} does not exist', - {'file': restore_file}) - break - - try: - log.debug(u'Trying to restore file {old} to {new}', - {'old': restore_file, 'new': new_file}) - shutil.copy(restore_file, new_file) - log.debug(u"Restore done") - break - except OSError as error: - log.warning(u'Error while trying to restore file {name}.' - u' Error: {msg!r}', - {'name': restore_file, 'msg': error}) - num_tries += 1 - time.sleep(1) - log.debug(u'Trying again. Attempt #: {0}', num_tries) - - if num_tries >= 10: - log.warning(u'Unable to restore file {old} to {new}', - {'old': restore_file, 'new': new_file}) - return False - - return True - - def get_lan_ip(): """Return IP of system.""" try: @@ -1771,9 +1710,9 @@ def is_already_processed_media(full_filename): """Check if resource was already processed.""" main_db_con = db.DBConnection() history_result = main_db_con.select('SELECT action FROM history ' - "WHERE action LIKE '%04' " + 'WHERE action = ? ' 'AND resource LIKE ?', - ['%' + full_filename]) + [DOWNLOADED, '%' + full_filename]) return bool(history_result) @@ -1796,8 +1735,8 @@ def is_info_hash_processed(info_hash): 'd.season = s.season AND ' 'd.episode = s.episode AND ' 'd.quality = s.quality ' - 'WHERE d.action LIKE "%04"', - [info_hash]) + 'WHERE d.action = ?', + [info_hash, DOWNLOADED]) return bool(history_result) diff --git a/medusa/history.py b/medusa/history.py index 1151379fac..fe9ddf78f3 100644 --- a/medusa/history.py +++ b/medusa/history.py @@ -21,21 +21,18 @@ import datetime from medusa import db -from medusa.common import FAILED, Quality, SNATCHED, SUBTITLED +from medusa.common import FAILED, SNATCHED, SUBTITLED from medusa.helper.encoding import ss from medusa.show.history import History -def _logHistoryItem(action, ep_obj, quality, resource, - provider, version=-1, proper_tags='', manually_searched=False, info_hash=None, size=-1): +def _log_history_item(action, ep_obj, resource, provider, version=-1, proper_tags='', + manually_searched=False, info_hash=None, size=-1): """ Insert a history item in DB :param action: action taken (snatch, download, etc) - :param showid: showid this entry is about - :param season: show season - :param episode: show episode - :param quality: media quality + :param ep_obj: episode object :param resource: resource used :param provider: provider used :param version: tracked version of file (defaults to -1) @@ -49,37 +46,37 @@ def _logHistoryItem(action, ep_obj, quality, resource, "(action, date, indexer_id, showid, season, episode, quality, " "resource, provider, version, proper_tags, manually_searched, info_hash, size) " "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", - [action, logDate, ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, ep_obj.episode, quality, + [action, logDate, ep_obj.series.indexer, ep_obj.series.series_id, ep_obj.season, ep_obj.episode, ep_obj.quality, resource, provider, version, proper_tags, manually_searched, info_hash, size]) -def log_snatch(searchResult): +def log_snatch(search_result): """ Log history of snatch - :param searchResult: search result object + :param search_result: search result object """ - for ep_obj in searchResult.episodes: + for ep_obj in search_result.episodes: - quality = searchResult.quality - version = searchResult.version - proper_tags = '|'.join(searchResult.proper_tags) - manually_searched = searchResult.manually_searched - info_hash = searchResult.hash.lower() if searchResult.hash else None - size = searchResult.size + version = search_result.version + proper_tags = '|'.join(search_result.proper_tags) + manually_searched = search_result.manually_searched + info_hash = search_result.hash.lower() if search_result.hash else None + size = search_result.size - providerClass = searchResult.provider + providerClass = search_result.provider if providerClass is not None: provider = providerClass.name else: provider = "unknown" - action = Quality.composite_status(SNATCHED, searchResult.quality) + action = SNATCHED + ep_obj.quality = search_result.quality - resource = searchResult.name + resource = search_result.name - _logHistoryItem(action, ep_obj, quality, resource, - provider, version, proper_tags, manually_searched, info_hash, size) + _log_history_item(action, ep_obj, resource, + provider, version, proper_tags, manually_searched, info_hash, size) def log_download(ep_obj, filename, new_ep_quality, release_group=None, version=-1): @@ -94,8 +91,6 @@ def log_download(ep_obj, filename, new_ep_quality, release_group=None, version=- """ size = int(ep_obj.file_size) - quality = new_ep_quality - # store the release group as the provider if possible if release_group: provider = release_group @@ -104,10 +99,10 @@ def log_download(ep_obj, filename, new_ep_quality, release_group=None, version=- action = ep_obj.status - _logHistoryItem(action, ep_obj, quality, filename, provider, version, size=size) + _log_history_item(action, ep_obj, filename, provider, version, size=size) -def logSubtitle(ep_obj, status, subtitle_result): +def log_subtitle(ep_obj, subtitle_result): """ Log download of subtitle @@ -120,10 +115,7 @@ def logSubtitle(ep_obj, status, subtitle_result): resource = subtitle_result.language.opensubtitles provider = subtitle_result.provider_name - status, quality = Quality.split_composite_status(status) - action = Quality.composite_status(SUBTITLED, quality) - - _logHistoryItem(action, ep_obj, quality, resource, provider) + _log_history_item(SUBTITLED, ep_obj, resource, provider) def log_failed(ep_obj, release, provider=None): @@ -134,7 +126,4 @@ def log_failed(ep_obj, release, provider=None): :param release: Release group :param provider: Provider used for snatch """ - _, quality = Quality.split_composite_status(ep_obj.status) - action = Quality.composite_status(FAILED, quality) - - _logHistoryItem(action, ep_obj, quality, release, provider) + _log_history_item(FAILED, ep_obj, release, provider) diff --git a/medusa/naming.py b/medusa/naming.py index 6dd3e3b30d..5dbb16af75 100644 --- a/medusa/naming.py +++ b/medusa/naming.py @@ -103,7 +103,8 @@ def __init__(self, season, episode, absolute_number, name): # pylint: disable=s self.scene_episode = episode self.scene_absolute_number = absolute_number self.airdate = datetime.date(2010, 3, 9) - self.status = Quality.composite_status(common.DOWNLOADED, common.Quality.SDTV) + self.status = common.DOWNLOADED + self.quality = common.Quality.SDTV self.release_name = 'Show.Name.S02E03.HDTV.x264-RLSGROUP' self.is_proper = True self.series = TVShow() @@ -241,7 +242,8 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): ep = TVEpisode(2, 3, 3, "Ep Name") # pylint: disable=protected-access - ep.status = Quality.composite_status(DOWNLOADED, Quality.HDTV) + ep.status = DOWNLOADED + ep.quality = Quality.HDTV ep.airdate = datetime.date(2011, 3, 9) if abd: @@ -266,7 +268,8 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): ep.release_name = 'Show.Name.003-004.HDTV.x264-RLSGROUP' secondEp = TVEpisode(2, 4, 4, "Ep Name (2)") - secondEp.status = Quality.composite_status(DOWNLOADED, Quality.HDTV) + secondEp.status = DOWNLOADED + secondEp.quality = Quality.HDTV secondEp.release_name = ep.release_name ep.related_episodes.append(secondEp) @@ -274,11 +277,13 @@ def generate_sample_ep(multi=None, abd=False, sports=False, anime_type=None): ep.release_name = 'Show.Name.S02E03E04E05.HDTV.x264-RLSGROUP' secondEp = TVEpisode(2, 4, 4, "Ep Name (2)") - secondEp.status = Quality.composite_status(DOWNLOADED, Quality.HDTV) + secondEp.status = DOWNLOADED + secondEp.quality = Quality.HDTV secondEp.release_name = ep.release_name thirdEp = TVEpisode(2, 5, 5, "Ep Name (3)") - thirdEp.status = Quality.composite_status(DOWNLOADED, Quality.HDTV) + thirdEp.status = DOWNLOADED + thirdEp.quality = Quality.HDTV thirdEp.release_name = ep.release_name ep.related_episodes.append(secondEp) diff --git a/medusa/post_processor.py b/medusa/post_processor.py index 7003d38e29..364503aa45 100644 --- a/medusa/post_processor.py +++ b/medusa/post_processor.py @@ -739,23 +739,21 @@ def _get_ep_obj(self, series_obj, season, episodes): return root_ep - def _quality_from_status(self, status): - """ - Determine the quality of the file that is being post processed with its status. - - :param status: The status related to the file we are post processing - :return: A quality value found in common.Quality - """ - quality = common.Quality.UNKNOWN - - if status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER + common.Quality.SNATCHED_BEST: - _, quality = common.Quality.split_composite_status(status) - if quality != common.Quality.UNKNOWN: - self.log(u'The snatched status has a quality in it, using that: {0}'.format - (common.Quality.qualityStrings[quality]), logger.DEBUG) - return quality - - return quality + # def _quality_from_status(self, ep_obj): + # """ + # Determine the quality of the file that is being post processed with its status. + # + # :param ep_obj: episode object. + # :return: A quality value found in common.Quality + # """ + # + # if ep_obj.status in (common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST): + # if ep_obj.quality != common.Quality.UNKNOWN: + # self.log(u'The snatched status has a quality in it, using that: {0}'.format + # (common.Quality.qualityStrings[ep_obj.quality]), logger.DEBUG) + # return ep_obj.quality + # + # return common.UNKNOWN def _get_quality(self, ep_obj): """ @@ -792,19 +790,19 @@ def _get_quality(self, ep_obj): def _priority_from_history(self, series_obj, season, episodes, quality): """Evaluate if the file should be marked as priority.""" main_db_con = db.DBConnection() + snatched_statuses = [common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST] for episode in episodes: # First: check if the episode status is snatched tv_episodes_result = main_db_con.select( - 'SELECT status ' + 'SELECT status, quality ' 'FROM tv_episodes ' 'WHERE indexer = ? ' 'AND showid = ? ' 'AND season = ? ' 'AND episode = ? ' - "AND (status LIKE '%02' " - "OR status LIKE '%09' " - "OR status LIKE '%12')", - [series_obj.indexer, series_obj.series_id, season, episode] + 'AND status IN (?, ?, ?) ', + [series_obj.indexer, series_obj.series_id, + season, episode] + snatched_statuses ) if tv_episodes_result: @@ -817,11 +815,11 @@ def _priority_from_history(self, series_obj, season, episodes, quality): 'AND showid = ? ' 'AND season = ? ' 'AND episode = ? ' - "AND (action LIKE '%02' " - "OR action LIKE '%09' " - "OR action LIKE '%12') " + 'AND action IN (?, ?, ?) ' 'ORDER BY date DESC', - [series_obj.indexer, series_obj.series_id, season, episode]) + [series_obj.indexer, series_obj.series_id, + season, episode] + snatched_statuses + ) if history_result and history_result[0][b'quality'] == quality: # Third: make sure the file we are post-processing hasn't been @@ -841,9 +839,11 @@ def _priority_from_history(self, series_obj, season, episodes, quality): 'AND season = ? ' 'AND episode = ? ' 'AND quality = ? ' - "AND action LIKE '%04' " + 'AND action = ? ' 'ORDER BY date DESC', - [series_obj.indexer, series_obj.series_id, season, episode, quality]) + [series_obj.indexer, series_obj.series_id, + season, episode, quality, common.DOWNLOADED] + ) if download_result: download_name = os.path.basename(download_result[0][b'resource']) @@ -897,8 +897,6 @@ def _should_process(current_quality, new_quality, allowed, preferred): :param preferred: Qualities that are preferred :return: Tuple with Boolean if the quality should be processed and String with reason if should process or not """ - if current_quality is common.Quality.NONE: - return False, 'There is no current quality. Skipping as we can only replace existing qualities' if new_quality in preferred: if current_quality in preferred: if new_quality > current_quality: @@ -1019,15 +1017,16 @@ def process(self): # retrieve/create the corresponding Episode objects ep_obj = self._get_ep_obj(series_obj, season, episodes) - _, old_ep_quality = common.Quality.split_composite_status(ep_obj.status) + old_ep_quality = ep_obj.quality # get the quality of the episode we're processing - if quality and common.Quality.qualityStrings[quality] != 'Unknown': + if quality and quality != common.Quality.UNKNOWN: self.log(u'The episode file has a quality in it, using that: {0}'.format (common.Quality.qualityStrings[quality]), logger.DEBUG) new_ep_quality = quality else: - new_ep_quality = self._quality_from_status(ep_obj.status) + # Fall back to the episode object's quality + new_ep_quality = ep_obj.quality # check snatched history to see if we should set the download as priority self._priority_from_history(series_obj, season, episodes, new_ep_quality) @@ -1156,7 +1155,8 @@ def process(self): else: cur_ep.release_name = u'' - cur_ep.status = common.Quality.composite_status(common.DOWNLOADED, new_ep_quality) + cur_ep.status = common.DOWNLOADED + cur_ep.quality = new_ep_quality cur_ep.subtitles = u'' diff --git a/medusa/process_tv.py b/medusa/process_tv.py index 907578babc..bde6c56b70 100644 --- a/medusa/process_tv.py +++ b/medusa/process_tv.py @@ -11,6 +11,7 @@ from medusa import app, db, failed_processor, helpers, logger, notifiers, post_processor from medusa.clients import torrent +from medusa.common import DOWNLOADED from medusa.helper.common import is_sync_file from medusa.helper.exceptions import EpisodePostProcessingFailedException, FailedPostProcessingFailedException, ex from medusa.name_parser.parser import InvalidNameException, InvalidShowException, NameParser @@ -498,9 +499,9 @@ def already_postprocessed(self, video_file): main_db_con = db.DBConnection() history_result = main_db_con.select( 'SELECT * FROM history ' - "WHERE action LIKE '%04' " + 'WHERE action = ? ' 'AND resource LIKE ?', - ['%' + video_file]) + [DOWNLOADED, '%' + video_file]) if history_result: self.log("You're trying to post-process a file that has already " diff --git a/medusa/providers/generic_provider.py b/medusa/providers/generic_provider.py index dfb3654e04..55428e7e92 100644 --- a/medusa/providers/generic_provider.py +++ b/medusa/providers/generic_provider.py @@ -294,15 +294,6 @@ def find_search_results(self, series, episodes, search_mode, forced_search=False sorted_qualities = sorted(categorized_items, reverse=True) log.debug('Found qualities: {0}', sorted_qualities) - # move Quality.UNKNOWN to the end of the list - try: - sorted_qualities.remove(Quality.UNKNOWN) - except ValueError: - log.debug('No unknown qualities in results') - else: - sorted_qualities.append(Quality.UNKNOWN) - log.debug('Unknown qualities moved to end of results') - # chain items sorted by quality sorted_items = chain.from_iterable( categorized_items[quality] diff --git a/medusa/refiners/tv_episode.py b/medusa/refiners/tv_episode.py index db9baa23cc..df5fd96495 100644 --- a/medusa/refiners/tv_episode.py +++ b/medusa/refiners/tv_episode.py @@ -69,7 +69,7 @@ def refine(video, tv_episode=None, **kwargs): log.debug('Refining using Episode information.') enrich(EPISODE_MAPPING, video, tv_episode) enrich(ADDITIONAL_MAPPING, video, tv_episode, overwrite=False) - guess = Quality.to_guessit(tv_episode.status) + guess = Quality.to_guessit(tv_episode.quality) enrich({'resolution': guess.get('screen_size'), 'format': guess.get('format')}, video, overwrite=False) diff --git a/medusa/scene_numbering.py b/medusa/scene_numbering.py index 57f3200ba3..56494b7a58 100644 --- a/medusa/scene_numbering.py +++ b/medusa/scene_numbering.py @@ -225,8 +225,12 @@ def find_xem_numbering(series_obj, season, episode): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT scene_season, scene_episode FROM tv_episodes WHERE indexer = ? and showid = ? and season = ? and episode = ? and (scene_season or scene_episode) != 0", - [series_obj.indexer, series_obj.series_id, season, episode]) + "SELECT scene_season, scene_episode " + "FROM tv_episodes " + "WHERE indexer = ? and showid = ? and season = ? " + "and episode = ? and (scene_season or scene_episode) != 0", + [series_obj.indexer, series_obj.series_id, season, episode] + ) if rows: return int(rows[0][b"scene_season"]), int(rows[0][b"scene_episode"]) @@ -248,7 +252,10 @@ def find_xem_absolute_numbering(series_obj, absolute_number): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and absolute_number = ? and scene_absolute_number != 0", + "SELECT scene_absolute_number " + "FROM tv_episodes " + "WHERE indexer = ? and showid = ? " + "and absolute_number = ? and scene_absolute_number != 0", [series_obj.indexer, series_obj.series_id, absolute_number]) if rows: @@ -271,7 +278,10 @@ def get_indexer_numbering_for_xem(series_obj, sceneSeason, sceneEpisode): main_db_con = db.DBConnection() rows = main_db_con.select( - "SELECT season, episode FROM tv_episodes WHERE indexer = ? and showid = ? and scene_season = ? and scene_episode = ?", + "SELECT season, episode " + "FROM tv_episodes " + "WHERE indexer = ? and showid = ? " + "and scene_season = ? and scene_episode = ?", [series_obj.indexer, series_obj.series_id, sceneSeason, sceneEpisode]) if rows: @@ -296,11 +306,17 @@ def get_indexer_absolute_numbering_for_xem(series_obj, sceneAbsoluteNumber, scen main_db_con = db.DBConnection() if scene_season is None: rows = main_db_con.select( - "SELECT absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number = ?", + "SELECT absolute_number " + "FROM tv_episodes " + "WHERE indexer = ? AND showid = ? " + "AND scene_absolute_number = ?", [series_obj.indexer, series_obj.series_id, sceneAbsoluteNumber]) else: rows = main_db_con.select( - "SELECT absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number = ? and scene_season = ?", + "SELECT absolute_number " + "FROM tv_episodes " + "WHERE indexer = ? " + "AND showid = ? AND scene_absolute_number = ? and scene_season = ?", [series_obj.indexer, series_obj.series_id, sceneAbsoluteNumber, scene_season]) if rows: @@ -349,8 +365,11 @@ def get_xem_numbering_for_show(series_obj, refresh_data=True): main_db_con = db.DBConnection() rows = main_db_con.select( - 'SELECT season, episode, scene_season, scene_episode FROM tv_episodes ' - 'WHERE indexer = ? and showid = ? and (scene_season or scene_episode) != 0 ORDER BY season, episode', + 'SELECT season, episode, scene_season, scene_episode ' + 'FROM tv_episodes ' + 'WHERE indexer = ? AND showid = ? ' + 'AND (scene_season or scene_episode) != 0 ' + 'ORDER BY season, episode', [series_obj.indexer, series_obj.series_id] ) @@ -404,7 +423,10 @@ def get_xem_absolute_numbering_for_show(series_obj): result = {} main_db_con = db.DBConnection() rows = main_db_con.select( - 'SELECT absolute_number, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ORDER BY absolute_number', + 'SELECT absolute_number, scene_absolute_number ' + 'FROM tv_episodes ' + 'WHERE indexer = ? and showid = ? and scene_absolute_number != 0 ' + 'ORDER BY absolute_number', [series_obj.indexer, series_obj.series_id]) for row in rows: @@ -517,7 +539,9 @@ def fix_xem_numbering(series_obj): # pylint:disable=too-many-locals, too-many-b main_db_con = db.DBConnection() rows = main_db_con.select( - 'SELECT season, episode, absolute_number, scene_season, scene_episode, scene_absolute_number FROM tv_episodes WHERE indexer = ? and showid = ?', + 'SELECT season, episode, absolute_number, scene_season, scene_episode, scene_absolute_number ' + 'FROM tv_episodes ' + 'WHERE indexer = ? AND showid = ?', [series_obj.indexer, series_obj.series_id]) last_absolute_number = None diff --git a/medusa/search/backlog.py b/medusa/search/backlog.py index 1444e1a696..12311bc9e9 100644 --- a/medusa/search/backlog.py +++ b/medusa/search/backlog.py @@ -9,7 +9,8 @@ from builtins import object from builtins import str -from medusa import app, common, db, scheduler, ui +from medusa import app, db, scheduler, ui +from medusa.common import Quality, UNSET from medusa.helper.common import episode_num from medusa.logger.adapters.style import BraceAdapter from medusa.search.queue import BacklogQueueItem @@ -158,7 +159,7 @@ def _get_segments(series_obj, from_date): con = db.DBConnection() sql_results = con.select( - 'SELECT status, season, episode, manually_searched ' + 'SELECT status, quality, season, episode, manually_searched ' 'FROM tv_episodes ' 'WHERE airdate > ?' ' AND indexer = ? ' @@ -167,19 +168,21 @@ def _get_segments(series_obj, from_date): ) # check through the list of statuses to see if we want any - for sql_result in sql_results: - should_search, shold_search_reason = common.Quality.should_search(sql_result[b'status'], series_obj, - sql_result[b'manually_searched']) + for episode in sql_results: + cur_status, cur_quality = int(episode[b'status'] or UNSET), int(episode[b'quality'] or Quality.NA) + should_search, should_search_reason = Quality.should_search( + cur_status, cur_quality, series_obj, episode[b'manually_searched'] + ) if not should_search: continue log.debug( u'Found needed backlog episodes for: {show} {ep}. Reason: {reason}', { 'show': series_obj.name, - 'ep': episode_num(sql_result[b'season'], sql_result[b'episode']), - 'reason': shold_search_reason, + 'ep': episode_num(episode[b'season'], episode[b'episode']), + 'reason': should_search_reason, } ) - ep_obj = series_obj.get_episode(sql_result[b'season'], sql_result[b'episode']) + ep_obj = series_obj.get_episode(episode[b'season'], episode[b'episode']) if ep_obj.season not in wanted: wanted[ep_obj.season] = [ep_obj] diff --git a/medusa/search/core.py b/medusa/search/core.py index ce35095059..63d571f826 100644 --- a/medusa/search/core.py +++ b/medusa/search/core.py @@ -189,9 +189,11 @@ def snatch_episode(result): for curEpObj in result.episodes: with curEpObj.lock: if is_first_best_match(result): - curEpObj.status = Quality.composite_status(SNATCHED_BEST, result.quality) + curEpObj.status = SNATCHED_BEST + curEpObj.quality = result.quality else: - curEpObj.status = Quality.composite_status(end_status, result.quality) + curEpObj.status = end_status + curEpObj.quality = result.quality # Reset all others fields to the snatched status # New snatch by default doesn't have nfo/tbn curEpObj.hasnfo = False @@ -217,7 +219,7 @@ def snatch_episode(result): sql_l.append(curEpObj.get_sql()) - if curEpObj.splitted_status_status != common.DOWNLOADED: + if curEpObj.status != common.DOWNLOADED: notify_message = curEpObj.formatted_filename(u'%SN - %Sx%0E - %EN - %QN') if all([app.SEEDERS_LEECHERS_IN_NOTIFY, result.seeders not in (-1, None), result.leechers not in (-1, None)]): @@ -392,7 +394,7 @@ def wanted_episodes(series_obj, from_date): con = db.DBConnection() sql_results = con.select( - 'SELECT status, season, episode, manually_searched ' + 'SELECT status, quality, season, episode, manually_searched ' 'FROM tv_episodes ' 'WHERE indexer = ? ' ' AND showid = ?' @@ -402,21 +404,23 @@ def wanted_episodes(series_obj, from_date): ) # check through the list of statuses to see if we want any - for result in sql_results: - _, cur_quality = common.Quality.split_composite_status(int(result[b'status'] or UNSET)) - should_search, should_search_reason = Quality.should_search(result[b'status'], series_obj, result[b'manually_searched']) + for episode in sql_results: + cur_status, cur_quality = int(episode[b'status'] or UNSET), int(episode[b'quality'] or Quality.NA) + should_search, should_search_reason = Quality.should_search( + cur_status, cur_quality, series_obj, episode[b'manually_searched'] + ) if not should_search: continue else: log.debug( u'Searching for {show} {ep}. Reason: {reason}', { u'show': series_obj.name, - u'ep': episode_num(result[b'season'], result[b'episode']), + u'ep': episode_num(episode[b'season'], episode[b'episode']), u'reason': should_search_reason, } ) - ep_obj = series_obj.get_episode(result[b'season'], result[b'episode']) - ep_obj.wanted_quality = [i for i in all_qualities if i > cur_quality and i != Quality.UNKNOWN] + ep_obj = series_obj.get_episode(episode[b'season'], episode[b'episode']) + ep_obj.wanted_quality = [i for i in all_qualities if i > cur_quality] wanted.append(ep_obj) return wanted @@ -682,7 +686,7 @@ def search_providers(series_obj, episodes, forced_search=False, down_cur_quality highest_quality_overall = 0 for cur_episode in found_results[cur_provider.name]: for cur_result in found_results[cur_provider.name][cur_episode]: - if cur_result.quality != Quality.UNKNOWN and cur_result.quality > highest_quality_overall: + if cur_result.quality > highest_quality_overall: highest_quality_overall = cur_result.quality log.debug(u'The highest quality of any match is {0}', Quality.qualityStrings[highest_quality_overall]) diff --git a/medusa/search/daily.py b/medusa/search/daily.py index cafb7e0a9b..de499da56d 100644 --- a/medusa/search/daily.py +++ b/medusa/search/daily.py @@ -96,11 +96,11 @@ def run(self, force=False): # pylint:disable=too-many-branches cur_ep = series_obj.get_episode(db_episode[b'season'], db_episode[b'episode']) with cur_ep.lock: - cur_ep.splitted_status_status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED + cur_ep.status = series_obj.default_ep_status if cur_ep.season else common.SKIPPED log.info( 'Setting status ({status}) for show airing today: {name} {special}', { 'name': cur_ep.pretty_name(), - 'status': common.statusStrings[cur_ep.splitted_status_status], + 'status': common.statusStrings[cur_ep.status], 'special': '(specials are not supported)' if not cur_ep.season else '', } ) diff --git a/medusa/search/manual.py b/medusa/search/manual.py index f1139c380d..7c130a872f 100644 --- a/medusa/search/manual.py +++ b/medusa/search/manual.py @@ -36,9 +36,8 @@ def get_quality_class(ep_obj): """Find the quality class for the episode.""" - _, ep_quality = Quality.split_composite_status(ep_obj.status) - if ep_quality in Quality.cssClassStrings: - quality_class = Quality.cssClassStrings[ep_quality] + if ep_obj.quality in Quality.cssClassStrings: + quality_class = Quality.cssClassStrings[ep_obj.quality] else: quality_class = Quality.cssClassStrings[Quality.UNKNOWN] @@ -103,9 +102,10 @@ def get_episodes(search_thread, searchstatus): 'season': ep.season, 'searchstatus': searchstatus, 'status': statusStrings[ep.status], - 'quality': get_quality_class(ep), + 'quality_name': Quality.qualityStrings[ep.quality], + 'quality_style': get_quality_class(ep), 'overview': Overview.overviewStrings[series_obj.get_overview( - ep.status, + ep.status, ep.quality, manually_searched=ep.manually_searched )], }) @@ -265,8 +265,7 @@ def get_provider_cache_results(series_obj, show_all_results=None, perform_search # Check if we have the combined sql strings if combined_sql_q: sql_prepend = b"SELECT * FROM (" - sql_append = b") ORDER BY CASE quality WHEN '{quality_unknown}' THEN -1 ELSE CAST(quality AS DECIMAL) END DESC, " \ - b" proper_tags DESC, seeders DESC".format(quality_unknown=Quality.UNKNOWN) + sql_append = b") ORDER BY quality DESC, proper_tags DESC, seeders DESC" # Add all results sql_total += main_db_con.select(b'{0} {1} {2}'. diff --git a/medusa/search/proper.py b/medusa/search/proper.py index 30ecb7f93b..e5c1500639 100644 --- a/medusa/search/proper.py +++ b/medusa/search/proper.py @@ -15,7 +15,7 @@ from builtins import str from medusa import app, db, helpers -from medusa.common import Quality, cpu_presets +from medusa.common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, SUBTITLED, cpu_presets from medusa.helper.common import enabled_providers from medusa.helper.exceptions import AuthException, ex from medusa.logger.adapters.style import BraceAdapter @@ -96,20 +96,19 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran main_db_con = db.DBConnection() if not app.POSTPONE_IF_NO_SUBS: # Get the recently aired (last 2 days) shows from DB - search_q_params = ','.join('?' for _ in Quality.DOWNLOADED) recently_aired = main_db_con.select( b'SELECT indexer, showid, season, episode, status, airdate' b' FROM tv_episodes' b' WHERE airdate >= ?' - b' AND status IN ({0})'.format(search_q_params), - [search_date.toordinal()] + Quality.DOWNLOADED + b' AND status = ?', + [search_date.toordinal(), DOWNLOADED] ) else: # Get recently subtitled episodes (last 2 days) from DB # Episode status becomes downloaded only after found subtitles last_subtitled = search_date.strftime(History.date_format) recently_aired = main_db_con.select(b'SELECT indexer_id AS indexer, showid, season, episode FROM history ' - b"WHERE date >= ? AND action LIKE '%10'", [last_subtitled]) + b'WHERE date >= ? AND action = ?', [last_subtitled, SUBTITLED]) if not recently_aired: log.info('No recently aired new episodes, nothing to search for') @@ -228,14 +227,15 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran # check if we have the episode as DOWNLOADED main_db_con = db.DBConnection() - sql_results = main_db_con.select(b"SELECT status, release_name " + sql_results = main_db_con.select(b"SELECT status, quality, release_name " b"FROM tv_episodes WHERE indexer = ? " b"AND showid = ? AND season = ? " - b"AND episode = ? AND status LIKE '%04'", + b"AND episode = ? AND status = ?", [best_result.indexer, best_result.series.indexerid, best_result.actual_season, - best_result.actual_episodes[0]]) + best_result.actual_episodes[0], + DOWNLOADED]) if not sql_results: log.info("Ignoring proper because this episode doesn't have 'DOWNLOADED' status: {name}", { 'name': best_result.name @@ -243,7 +243,7 @@ def _get_proper_results(self): # pylint: disable=too-many-locals, too-many-bran continue # only keep the proper if we have already downloaded an episode with the same quality - _, old_quality = Quality.split_composite_status(int(sql_results[0][b'status'])) + old_quality = int(sql_results[0][b'quality']) if old_quality != best_result.quality: log.info('Ignoring proper because quality is different: {name}', {'name': best_result.name}) if cur_proper.name not in processed_propers_names: @@ -339,9 +339,10 @@ def _download_propers(self, proper_list): b'AND episode = ? ' b'AND quality = ? ' b'AND date >= ? ' - b"AND (action LIKE '%02' OR action LIKE '%04' OR action LIKE '%09' OR action LIKE '%12')", + b'AND action in (?, ?, ?, ?)', [cur_proper.indexerid, cur_proper.actual_season, cur_proper.actual_episode, cur_proper.quality, - history_limit.strftime(History.date_format)]) + history_limit.strftime(History.date_format), + DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST]) # make sure that none of the existing history downloads are the same proper we're trying to download # if the result exists in history already we need to skip it diff --git a/medusa/server/api/v1/core.py b/medusa/server/api/v1/core.py index 70c6d23c8d..ba9455c3fb 100644 --- a/medusa/server/api/v1/core.py +++ b/medusa/server/api/v1/core.py @@ -36,8 +36,8 @@ process_tv, sbdatetime, subtitles, ui, ) from medusa.common import ( - ARCHIVED, DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED, SNATCHED_PROPER, - UNAIRED, UNSET, WANTED, statusStrings, + ARCHIVED, DOWNLOADED, FAILED, IGNORED, Overview, Quality, SKIPPED, SNATCHED, SNATCHED_BEST, + SNATCHED_PROPER, UNAIRED, UNSET, WANTED, statusStrings, ) from medusa.helper.common import ( dateFormat, dateTimeFormat, pretty_file_size, sanitize_filename, @@ -723,7 +723,7 @@ def run(self): main_db_con = db.DBConnection(row_type='dict') sql_results = main_db_con.select( - 'SELECT name, description, airdate, status, location, file_size, release_name, subtitles ' + 'SELECT name, description, airdate, status, quality, location, file_size, release_name, subtitles ' 'FROM tv_episodes WHERE indexer = ? AND showid = ? AND episode = ? AND season = ?', [INDEXER_TVDBV2, self.indexerid, self.e, self.s]) if not len(sql_results) == 1: @@ -752,7 +752,7 @@ def run(self): else: episode[b'airdate'] = 'Never' - status, quality = Quality.split_composite_status(int(episode[b'status'])) + status, quality = int(episode[b'status']), int(episode[b'quality']) episode[b'status'] = statusStrings[status] episode[b'quality'] = get_quality_string(quality) episode[b'file_size_human'] = pretty_file_size(episode[b'file_size']) @@ -803,10 +803,8 @@ def run(self): # return the correct json value if ep_queue_item.success: - _, quality = Quality.split_composite_status(ep_obj.status) - # TODO: split quality and status? - return _responds(RESULT_SUCCESS, {'quality': get_quality_string(quality)}, - 'Snatched ({0})'.format(get_quality_string(quality))) + return _responds(RESULT_SUCCESS, {'quality': get_quality_string(ep_obj.quality)}, + 'Snatched ({0})'.format(get_quality_string(ep_obj.quality))) return _responds(RESULT_FAILURE, msg='Unable to find episode') @@ -897,7 +895,7 @@ def _ep_result(result_code, ep, msg=''): continue # allow the user to force setting the status for an already downloaded episode - if ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED and not self.force: + if ep_obj.status in [DOWNLOADED, ARCHIVED] and not self.force: ep_results.append( _ep_result( RESULT_FAILURE, ep_obj, @@ -1075,19 +1073,18 @@ def convert_date(history_date): History.date_format ).strftime(dateTimeFormat) - composite = Quality.split_composite_status(cur_item.action) - if cur_type in (statusStrings[composite.status].lower(), None): + if cur_type in (statusStrings[cur_item.action].lower(), None): return { 'date': convert_date(cur_item.date), 'episode': cur_item.episode, 'indexerid': cur_item.show_id, 'provider': cur_item.provider, - 'quality': get_quality_string(composite.quality), + 'quality': get_quality_string(cur_item.quality), 'resource': os.path.basename(cur_item.resource), 'resource_path': os.path.dirname(cur_item.resource), 'season': cur_item.season, 'show_name': cur_item.show_name, - 'status': statusStrings[composite.status], + 'status': statusStrings[cur_item.action], # Add tvdbid for backward compatibility # TODO: Make this actual tvdb id for other indexers 'tvdbid': cur_item.show_id, @@ -1187,7 +1184,9 @@ def run(self): for cur_result in sql_results: - cur_ep_cat = cur_show.get_overview(cur_result[b'status'], manually_searched=cur_result[b'manually_searched']) + cur_ep_cat = cur_show.get_overview( + cur_result[b'status'], cur_result[b'quality'], manually_searched=cur_result[b'manually_searched'] + ) if cur_ep_cat and cur_ep_cat in (Overview.WANTED, Overview.QUAL): show_eps.append(cur_result) @@ -2542,12 +2541,12 @@ def run(self): if self.season is None: sql_results = main_db_con.select( - 'SELECT name, episode, airdate, status, release_name, season, location, file_size, subtitles ' + 'SELECT name, episode, airdate, status, quality, release_name, season, location, file_size, subtitles ' 'FROM tv_episodes WHERE indexer = ? AND showid = ?', [INDEXER_TVDBV2, self.indexerid]) seasons = {} for row in sql_results: - status, quality = Quality.split_composite_status(int(row[b'status'])) + status, quality = int(row[b'status']), int(row[b'quality']) row[b'status'] = statusStrings[status] row[b'quality'] = get_quality_string(quality) if try_int(row[b'airdate'], 1) > 693595: # 1900 @@ -2566,8 +2565,8 @@ def run(self): else: sql_results = main_db_con.select( - 'SELECT name, episode, airdate, status, location, file_size, release_name, subtitles' - ' FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? ', + 'SELECT name, episode, airdate, status, quality, location, file_size, release_name, subtitles' + ' FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ?', [INDEXER_TVDBV2, self.indexerid, self.season]) if not sql_results: return _responds(RESULT_FAILURE, msg='Season not found') @@ -2575,7 +2574,7 @@ def run(self): for row in sql_results: cur_episode = int(row[b'episode']) del row[b'episode'] - status, quality = Quality.split_composite_status(int(row[b'status'])) + status, quality = int(row[b'status']), int(row[b'quality']) row[b'status'] = statusStrings[status] row[b'quality'] = get_quality_string(quality) if try_int(row[b'airdate'], 1) > 693595: # 1900 @@ -2666,81 +2665,78 @@ def run(self): # show stats episode_status_counts_total = {'total': 0} - for status in statusStrings: - if status in [UNSET, DOWNLOADED, SNATCHED, SNATCHED_PROPER, ARCHIVED]: - continue - episode_status_counts_total[status] = 0 + for status_code in statusStrings: + if status_code not in (UNSET, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED): + episode_status_counts_total[status_code] = 0 # add all the downloaded qualities episode_qualities_counts_download = {'total': 0} - for statusCode in Quality.DOWNLOADED + Quality.ARCHIVED: - status, quality = Quality.split_composite_status(statusCode) - if quality in [Quality.NONE]: - continue - episode_qualities_counts_download[statusCode] = 0 + for status_code in (DOWNLOADED, ARCHIVED): + episode_qualities_counts_download[status_code] = {} # add all snatched qualities episode_qualities_counts_snatch = {'total': 0} - for statusCode in Quality.SNATCHED + Quality.SNATCHED_PROPER: - status, quality = Quality.split_composite_status(statusCode) - if quality in [Quality.NONE]: - continue - episode_qualities_counts_snatch[statusCode] = 0 + for status_code in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): + episode_qualities_counts_snatch[status_code] = {} main_db_con = db.DBConnection(row_type='dict') - sql_results = main_db_con.select('SELECT status, season FROM tv_episodes ' + sql_results = main_db_con.select('SELECT status, quality, season FROM tv_episodes ' 'WHERE season != 0 AND indexer = ? AND showid = ?', [INDEXER_TVDBV2, self.indexerid]) + # the main loop that goes through all episodes for row in sql_results: - status, quality = Quality.split_composite_status(int(row[b'status'])) + status, quality = int(row[b'status']), int(row[b'quality']) episode_status_counts_total['total'] += 1 - if status in Quality.DOWNLOADED + Quality.ARCHIVED: + if status in (DOWNLOADED, ARCHIVED): episode_qualities_counts_download['total'] += 1 - episode_qualities_counts_download[int(row[b'status'])] += 1 - elif status in Quality.SNATCHED + Quality.SNATCHED_PROPER: + if quality not in episode_qualities_counts_download[status]: + episode_qualities_counts_download[status][quality] = 1 + else: + episode_qualities_counts_download[status][quality] += 1 + elif status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): episode_qualities_counts_snatch['total'] += 1 - episode_qualities_counts_snatch[int(row[b'status'])] += 1 - elif status == 0: # we don't count NONE = 0 = N/A - pass - else: + if quality not in episode_qualities_counts_snatch[status]: + episode_qualities_counts_snatch[status][quality] = 1 + else: + episode_qualities_counts_snatch[status][quality] += 1 + elif status not in (UNSET, ): episode_status_counts_total[status] += 1 # the outgoing container episodes_stats = {'downloaded': {}} # turning codes into strings - for statusCode in episode_qualities_counts_download: - if statusCode == 'total': - episodes_stats['downloaded']['total'] = episode_qualities_counts_download[statusCode] + for status in episode_qualities_counts_download: + if status == 'total': + episodes_stats['downloaded']['total'] = episode_qualities_counts_download[status] continue - status, quality = Quality.split_composite_status(int(statusCode)) - status_string = Quality.qualityStrings[quality].lower().replace(' ', '_').replace('(', '').replace(')', '') - episodes_stats['downloaded'][status_string] = episode_qualities_counts_download[statusCode] + for quality in episode_qualities_counts_download[status]: + quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_') + if quality_string not in episodes_stats['downloaded']: + episodes_stats['downloaded'][quality_string] = episode_qualities_counts_download[status][quality] + else: + episodes_stats['downloaded'][quality_string] += episode_qualities_counts_download[status][quality] episodes_stats['snatched'] = {} - # turning codes into strings - # and combining proper and normal - for statusCode in episode_qualities_counts_snatch: - if statusCode == 'total': - episodes_stats['snatched']['total'] = episode_qualities_counts_snatch[statusCode] + for status in episode_qualities_counts_snatch: + if status == 'total': + episodes_stats['snatched']['total'] = episode_qualities_counts_snatch[status] continue - status, quality = Quality.split_composite_status(int(statusCode)) - status_string = Quality.qualityStrings[quality].lower().replace(' ', '_').replace('(', '').replace(')', '') - if Quality.qualityStrings[quality] in episodes_stats['snatched']: - episodes_stats['snatched'][status_string] += episode_qualities_counts_snatch[statusCode] - else: - episodes_stats['snatched'][status_string] = episode_qualities_counts_snatch[statusCode] + for quality in episode_qualities_counts_snatch[status]: + quality_string = Quality.qualityStrings[quality].lower().replace(' ', '_') + if quality_string not in episodes_stats['snatched']: + episodes_stats['snatched'][quality_string] = episode_qualities_counts_snatch[status][quality] + else: + episodes_stats['snatched'][quality_string] += episode_qualities_counts_snatch[status][quality] - # episodes_stats["total"] = {} - for statusCode in episode_status_counts_total: - if statusCode == 'total': - episodes_stats['total'] = episode_status_counts_total[statusCode] + for status in episode_status_counts_total: + if status == 'total': + episodes_stats['total'] = episode_status_counts_total[status] continue - status_string = statusStrings[statusCode].lower().replace(' ', '_').replace('(', '').replace( - ')', '') - episodes_stats[status_string] = episode_status_counts_total[statusCode] + status_string = statusStrings[status].lower().replace(' ', '_').replace('(', '').replace(')', '') + episodes_stats[status_string] = episode_status_counts_total[status] return _responds(RESULT_SUCCESS, episodes_stats) diff --git a/medusa/server/api/v2/config.py b/medusa/server/api/v2/config.py index 11fb884acb..4155698a05 100644 --- a/medusa/server/api/v2/config.py +++ b/medusa/server/api/v2/config.py @@ -111,7 +111,7 @@ def get(self, identifier, path_param=None): config_data['localUser'] = app.OS_USER or 'Unknown' config_data['programDir'] = app.PROG_DIR config_data['configFile'] = app.CONFIG_FILE - config_data['dbFilename'] = db.dbFilename() + config_data['dbPath'] = db.DBConnection().path config_data['cacheDir'] = app.CACHE_DIR config_data['logDir'] = app.LOG_DIR config_data['appArgs'] = app.MY_ARGS diff --git a/medusa/server/api/v2/stats.py b/medusa/server/api/v2/stats.py index e86f102b01..4d766cafcf 100644 --- a/medusa/server/api/v2/stats.py +++ b/medusa/server/api/v2/stats.py @@ -6,9 +6,13 @@ from medusa import db from medusa.common import ( + ARCHIVED, + DOWNLOADED, FAILED, - Quality, SKIPPED, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, UNAIRED, WANTED ) @@ -36,8 +40,8 @@ def get(self, identifier, path_param=None): """ main_db_con = db.DBConnection() - snatched = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST - downloaded = Quality.DOWNLOADED + Quality.ARCHIVED + snatched = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] + downloaded = [DOWNLOADED, ARCHIVED] # FIXME: This inner join is not multi indexer friendly. sql_result = main_db_con.select( diff --git a/medusa/server/web/home/handler.py b/medusa/server/web/home/handler.py index caed532342..002d420b51 100644 --- a/medusa/server/web/home/handler.py +++ b/medusa/server/web/home/handler.py @@ -175,8 +175,8 @@ def index(self): def show_statistics(): main_db_con = db.DBConnection() - snatched = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST - downloaded = Quality.DOWNLOADED + Quality.ARCHIVED + snatched = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] + downloaded = [DOWNLOADED, ARCHIVED] # FIXME: This inner join is not multi indexer friendly. sql_result = main_db_con.select( @@ -928,7 +928,7 @@ def displayShow(self, indexername=None, seriesid=None, ): ep_cats = {} for cur_result in sql_results: - cur_ep_cat = series_obj.get_overview(cur_result[b'status'], manually_searched=cur_result[b'manually_searched']) + cur_ep_cat = series_obj.get_overview(cur_result[b'status'], cur_result[b'quality'], manually_searched=cur_result[b'manually_searched']) if cur_ep_cat: ep_cats['{season}x{episode}'.format(season=cur_result[b'season'], episode=cur_result[b'episode'])] = cur_ep_cat ep_counts[cur_ep_cat] += 1 @@ -1277,19 +1277,20 @@ def titler(x): try: main_db_con = db.DBConnection() episode_status_result = main_db_con.action( - b'SELECT date, action, provider, resource, size ' + b'SELECT date, action, quality, provider, resource, size ' b'FROM history ' b'WHERE indexer_id = ? ' b'AND showid = ? ' b'AND season = ? ' b'AND episode = ? ' - b'AND (action LIKE \'%02\' OR action LIKE \'%04\' OR action LIKE \'%09\' OR action LIKE \'%11\' OR action LIKE \'%12\') ' + b'AND action in (?, ?, ?, ?, ?) ' b'ORDER BY date DESC', - [indexer_id, series_id, season, episode] + [indexer_id, series_id, season, episode, + DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED] ) episode_history = [dict(row) for row in episode_status_result] for i in episode_history: - i['status'], i['quality'] = Quality.split_composite_status(i['action']) + i['status'] = i['action'] i['action_date'] = sbdatetime.sbfdatetime(datetime.strptime(str(i['date']), History.date_format), show_seconds=True) i['resource_file'] = os.path.basename(i['resource']) i['pretty_size'] = pretty_file_size(i['size']) if i['size'] > -1 else 'N/A' @@ -1368,7 +1369,7 @@ def titler(x): ep_cats = {} for cur_result in sql_results: - cur_ep_cat = series_obj.get_overview(cur_result[b'status'], + cur_ep_cat = series_obj.get_overview(cur_result[b'status'], cur_result[b'quality'], manually_searched=cur_result[b'manually_searched']) if cur_ep_cat: ep_cats['{season}x{episode}'.format(season=cur_result[b'season'], @@ -1908,11 +1909,7 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire else: return self._genericMessage('Error', error_message) - # statusStrings is a custom type. Which does some "magic" itself. But we want to move away from this. - # FIXME: Always check status with status and quality with quality. - status_with_quality = status - status = Quality.split_composite_status(status).status - + status = int(status) if status not in statusStrings: error_message = 'Invalid status' if direct: @@ -1972,24 +1969,24 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire series=series_obj.name, episode=cur_ep), logger.WARNING) continue - snatched_qualities = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + snatched_qualities = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] if status == DOWNLOADED and not ( - ep_obj.status in snatched_qualities + Quality.DOWNLOADED + ep_obj.status in snatched_qualities + [DOWNLOADED] and os.path.isfile(ep_obj.location)): logger.log('Refusing to change status of {series} {episode} to DOWNLOADED' - ' because it\'s not SNATCHED/DOWNLOADED'.format( + ' because it\'s not SNATCHED/DOWNLOADED or the file is missing'.format( series=series_obj.name, episode=cur_ep), logger.WARNING) continue - if status == FAILED and ep_obj.status not in snatched_qualities + Quality.DOWNLOADED + Quality.ARCHIVED: + if status == FAILED and ep_obj.status not in snatched_qualities + [DOWNLOADED, ARCHIVED]: logger.log('Refusing to change status of {series} {episode} to FAILED' ' because it\'s not SNATCHED/DOWNLOADED/ARCHIVED'.format( series=series_obj.name, episode=cur_ep), logger.WARNING) continue if status == WANTED: - if ep_obj.status in Quality.DOWNLOADED + Quality.ARCHIVED: + if ep_obj.status in [DOWNLOADED, ARCHIVED]: logger.log('Removing release_name of {series} {episode} as episode was changed to WANTED'.format( series=series_obj.name, episode=cur_ep), logger.DEBUG) ep_obj.release_name = '' @@ -2001,9 +1998,8 @@ def setStatus(self, indexername=None, seriesid=None, eps=None, status=None, dire ep_obj.manually_searched = False # Only in failed_history we set to FAILED. - # We need current snatched quality to log 'quality' column in failed action in history if status != FAILED: - ep_obj.status = status_with_quality + ep_obj.status = status # mass add to database sql_l.append(ep_obj.get_sql()) @@ -2139,7 +2135,8 @@ def doRename(self, indexername=None, seriesid=None, eps=None): ep_info = cur_ep.split('x') - # this is probably the worst possible way to deal with double eps but I've kinda painted myself into a corner here with this stupid database + # this is probably the worst possible way to deal with double eps + # but I've kinda painted myself into a corner here with this stupid database ep_result = main_db_con.select( b'SELECT location ' b'FROM tv_episodes ' diff --git a/medusa/server/web/manage/handler.py b/medusa/server/web/manage/handler.py index bd4287d05f..f860d17892 100644 --- a/medusa/server/web/manage/handler.py +++ b/medusa/server/web/manage/handler.py @@ -20,9 +20,11 @@ ui, ) from medusa.common import ( + DOWNLOADED, Overview, - Quality, SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, ) from medusa.helper.common import ( episode_num, @@ -58,7 +60,7 @@ def index(self): def showEpisodeStatuses(indexername, seriesid, whichStatus): status_list = [int(whichStatus)] if status_list[0] == SNATCHED: - status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] main_db_con = db.DBConnection() cur_show_results = main_db_con.select( @@ -86,7 +88,7 @@ def episodeStatuses(self, whichStatus=None): if whichStatus: status_list = [int(whichStatus)] if status_list[0] == SNATCHED: - status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] else: status_list = [] @@ -136,7 +138,7 @@ def episodeStatuses(self, whichStatus=None): def changeEpisodeStatuses(self, oldStatus, newStatus, *args, **kwargs): status_list = [int(oldStatus)] if status_list[0] == SNATCHED: - status_list = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + status_list = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] to_change = {} @@ -184,9 +186,9 @@ def showSubtitleMissed(indexer, seriesid, whichSubs): b'WHERE indexer = ? ' b'AND showid = ? ' b'AND season != 0 ' - b'AND status LIKE \'%4\' ' - b'AND location != \'\'', - [int(indexer), int(seriesid)] + b'AND status = ? ' + b"AND location != ''", + [int(indexer), int(seriesid), DOWNLOADED] ) result = {} @@ -225,12 +227,13 @@ def subtitleMissed(self, whichSubs=None): b'tv_shows.indexer_id as indexer_id, tv_episodes.subtitles subtitles ' b'FROM tv_episodes, tv_shows ' b'WHERE tv_shows.subtitles = 1 ' - b'AND tv_episodes.status LIKE \'%4\' ' + b'AND tv_episodes.status = ? ' b'AND tv_episodes.season != 0 ' - b'AND tv_episodes.location != \'\' ' + b"AND tv_episodes.location != '' " b'AND tv_episodes.showid = tv_shows.indexer_id ' b'AND tv_episodes.indexer = tv_shows.indexer ' - b'ORDER BY show_name' + b'ORDER BY show_name', + [DOWNLOADED] ) ep_counts = {} @@ -284,12 +287,12 @@ def downloadSubtitleMissed(self, *args, **kwargs): all_eps_results = main_db_con.select( b'SELECT season, episode ' b'FROM tv_episodes ' - b'WHERE status LIKE \'%4\' ' + b'WHERE status = ? ' b'AND season != 0 ' b'AND indexer = ? ' b'AND showid = ? ' - b'AND location != \'\'', - [cur_indexer_id, cur_series_id] + b"AND location != ''", + [DOWNLOADED, cur_indexer_id, cur_series_id] ) to_download[(cur_indexer_id, cur_series_id)] = [str(x[b'season']) + 'x' + str(x[b'episode']) for x in all_eps_results] @@ -322,10 +325,10 @@ def subtitleMissedPP(self): logger.log(u"Filename '{0}' cannot be parsed to an episode".format(filename), logger.DEBUG) continue - ep_status = Quality.split_composite_status(tv_episode.status).status - if ep_status in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST: + ep_status = tv_episode.status + if ep_status in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): status = 'snatched' - elif ep_status in Quality.DOWNLOADED: + elif ep_status in DOWNLOADED: status = 'downloaded' else: continue @@ -403,8 +406,9 @@ def backlogOverview(self): ep_cats = {} sql_results = main_db_con.select( - """ - SELECT e.status, e.season, e.episode, e.name, e.airdate, e.manually_searched + b""" + SELECT e.status, e.quality, e.season, + e.episode, e.name, e.airdate, e.manually_searched FROM tv_episodes as e WHERE e.season IS NOT NULL AND e.indexer = ? AND e.showid = ? @@ -415,7 +419,7 @@ def backlogOverview(self): filtered_episodes = [] backlogged_episodes = [dict(row) for row in sql_results] for cur_result in backlogged_episodes: - cur_ep_cat = cur_show.get_overview(cur_result[b'status'], backlog_mode=True, + cur_ep_cat = cur_show.get_overview(cur_result[b'status'], cur_result[b'quality'], backlog_mode=True, manually_searched=cur_result[b'manually_searched']) if cur_ep_cat: if cur_ep_cat in selected_backlog_status and cur_result[b'airdate'] != 1: diff --git a/medusa/show/coming_episodes.py b/medusa/show/coming_episodes.py index 237a3c961c..afa47724af 100644 --- a/medusa/show/coming_episodes.py +++ b/medusa/show/coming_episodes.py @@ -1,7 +1,5 @@ # coding=utf-8 # This file is part of Medusa. -# - # # Medusa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -25,10 +23,14 @@ from medusa import app from medusa.common import ( + ARCHIVED, + DOWNLOADED, IGNORED, - Quality, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, UNAIRED, - WANTED, + WANTED ) from medusa.db import DBConnection from medusa.helper.common import dateFormat, timeFormat @@ -71,12 +73,14 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P today = date.today().toordinal() next_week = (date.today() + timedelta(days=7)).toordinal() recently = (date.today() - timedelta(days=app.COMING_EPS_MISSED_RANGE)).toordinal() - qualities_list = Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + Quality.ARCHIVED + [IGNORED] + status_list = [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, + ARCHIVED, IGNORED] db = DBConnection() fields_to_select = ', '.join( - ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', 'indexer_id', 'name', 'network', - 'paused', 'quality', 'runtime', 'season', 'show_name', 'showid', 's.status'] + ['airdate', 'airs', 'e.description as description', 'episode', 'imdb_id', 'e.indexer', + 'indexer_id', 'name', 'network', 'paused', 's.quality', 'runtime', 'season', 'show_name', + 'showid', 's.status'] ) results = db.select( 'SELECT %s ' % fields_to_select + @@ -86,13 +90,13 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P 'AND airdate < ? ' 'AND s.indexer = e.indexer ' 'AND s.indexer_id = e.showid ' - 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', - [today, next_week] + qualities_list + 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', + [today, next_week] + status_list ) done_shows_list = [int(result[b'showid']) for result in results] placeholder = ','.join(['?'] * len(done_shows_list)) - placeholder2 = ','.join(['?'] * len(Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER)) + placeholder2 = ','.join(['?'] * len([DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER])) # FIXME: This inner join is not multi indexer friendly. results += db.select( @@ -109,7 +113,7 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P 'AND inner_e.airdate >= ? ' 'ORDER BY inner_e.airdate ASC LIMIT 1) ' 'AND e.status NOT IN (' + placeholder2 + ')', - done_shows_list + [next_week] + Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + done_shows_list + [next_week] + [DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER] ) results += db.select( @@ -120,8 +124,8 @@ def get_coming_episodes(categories, sort, group, paused=app.COMING_EPS_DISPLAY_P 'AND airdate < ? ' 'AND airdate >= ? ' 'AND e.status IN (?,?) ' - 'AND e.status NOT IN (' + ','.join(['?'] * len(qualities_list)) + ')', - [today, recently, WANTED, UNAIRED] + qualities_list + 'AND e.status NOT IN (' + ','.join(['?'] * len(status_list)) + ')', + [today, recently, WANTED, UNAIRED] + status_list ) results = [dict(result) for result in results] diff --git a/medusa/show/history.py b/medusa/show/history.py index 2e66125f0b..4532eec2db 100644 --- a/medusa/show/history.py +++ b/medusa/show/history.py @@ -21,7 +21,7 @@ from collections import namedtuple from datetime import datetime, timedelta -from medusa.common import Quality +from medusa.common import DOWNLOADED, SNATCHED from medusa.helper.common import try_int from six import itervalues, text_type @@ -60,10 +60,12 @@ def get(self, limit=100, action=None): actions = History._get_actions(action) limit = max(try_int(limit), 0) - common_sql = 'SELECT show_name, h.indexer_id, showid, season, episode, h.quality, ' \ - 'action, provider, resource, date, h.proper_tags, h.manually_searched ' \ - 'FROM history h, tv_shows s ' \ - 'WHERE h.showid = s.indexer_id AND h.indexer_id = s.indexer ' + common_sql = ( + 'SELECT show_name, h.indexer_id, showid AS show_id, season, episode, action, h.quality, ' + 'provider, resource, date, h.proper_tags, h.manually_searched ' + 'FROM history h, tv_shows s ' + 'WHERE h.showid = s.indexer_id AND h.indexer_id = s.indexer ' + ) filter_sql = 'AND action in (' + ','.join(['?'] * len(actions)) + ') ' order_sql = 'ORDER BY date DESC ' @@ -79,7 +81,7 @@ def get(self, limit=100, action=None): # TODO: Convert to a defaultdict and compact items as needed # TODO: Convert to using operators to combine items for row in sql_results: - row = History.Item(*row) + row = History.Item(**row) if not limit or len(detailed) < limit: detailed.append(row) if row.index in compact: @@ -110,13 +112,13 @@ def _get_actions(action): result = None if action == 'downloaded': - result = Quality.DOWNLOADED + result = DOWNLOADED elif action == 'snatched': - result = Quality.SNATCHED + result = SNATCHED return result or [] - action_fields = ('action', 'provider', 'resource', 'date', 'proper_tags', 'manually_searched') + action_fields = ('action', 'quality', 'provider', 'resource', 'date', 'proper_tags', 'manually_searched') # A specific action from history Action = namedtuple('Action', action_fields) Action.width = len(action_fields) @@ -131,7 +133,7 @@ def _get_actions(action): CompactItem = namedtuple('CompactItem', compact_fields) item_fields = tuple( # make it a tuple so its immutable - ['show_name'] + list(index_fields) + list(action_fields) + set(('show_name',) + index_fields + action_fields) # unique only ) class Item(namedtuple('Item', item_fields)): @@ -163,6 +165,7 @@ def cur_action(self): """ return History.Action( self.action, + self.quality, self.provider, self.resource, self.date, diff --git a/medusa/show/show.py b/medusa/show/show.py index b678128ecf..26e77cc7c4 100644 --- a/medusa/show/show.py +++ b/medusa/show/show.py @@ -24,8 +24,12 @@ from medusa import app from medusa.common import ( - Quality, + ARCHIVED, + DOWNLOADED, SKIPPED, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, WANTED, ) from medusa.db import DBConnection @@ -151,12 +155,12 @@ def overall_stats(): shows = app.showList today = date.today().toordinal() - downloaded_status = Quality.DOWNLOADED + Quality.ARCHIVED - snatched_status = Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + downloaded_status = [DOWNLOADED, ARCHIVED] + snatched_status = [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST] total_status = [SKIPPED, WANTED] results = db.select( - 'SELECT airdate, status ' + 'SELECT airdate, status, quality ' 'FROM tv_episodes ' 'WHERE season > 0 ' 'AND episode > 0 ' diff --git a/medusa/subtitles.py b/medusa/subtitles.py index e995470907..09d374e425 100644 --- a/medusa/subtitles.py +++ b/medusa/subtitles.py @@ -38,7 +38,7 @@ from medusa import app, db, helpers, history from medusa.cache import cache, memory_cache -from medusa.common import Quality, cpu_presets +from medusa.common import DOWNLOADED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, cpu_presets from medusa.helper.common import dateTimeFormat, episode_num, remove_extension, subtitle_extensions from medusa.helper.exceptions import ex from medusa.helpers import is_media_file, is_rar_file @@ -472,7 +472,6 @@ def save_subs(tv_episode, video, found_subtitles, video_path=None): episode = tv_episode.episode episode_name = tv_episode.name show_indexerid = tv_episode.series.indexerid - status = tv_episode.status subtitles_dir = get_subtitles_dir(video_path) saved_subtitles = save_subtitles(video, found_subtitles, directory=_encode(subtitles_dir), single=not app.SUBTITLES_MULTI) @@ -493,7 +492,7 @@ def save_subs(tv_episode, video, found_subtitles, video_path=None): if app.SUBTITLES_HISTORY: logger.debug(u'Logging to history downloaded subtitle from provider %s and language %s', subtitle.provider_name, subtitle.language.opensubtitles) - history.logSubtitle(tv_episode, status, subtitle) + history.log_subtitle(tv_episode, subtitle) # Refresh the subtitles property if tv_episode.location: @@ -868,7 +867,7 @@ def subtitles_download_in_pp(): # pylint: disable=too-many-locals, too-many-bra logger.debug(u'%s cannot be parsed to an episode', filename) continue - if tv_episode.status not in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST: + if tv_episode.status not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): continue if not tv_episode.series.subtitles: @@ -993,7 +992,7 @@ def dhm(td): "WHERE " "s.subtitles = 1 " "AND s.paused = 0 " - "AND e.status LIKE '%4' " + "AND e.status = ? " "AND e.season > 0 " "AND e.location != '' " "AND age {} 30 " @@ -1001,7 +1000,8 @@ def dhm(td): "ORDER BY " "lastsearch ASC " "LIMIT {}".format - (args['age_comparison'], args['limit']), [datetime.datetime.now().toordinal(), sql_like_languages] + (args['age_comparison'], args['limit']), + [datetime.datetime.now().toordinal(), DOWNLOADED, sql_like_languages] ) if not sql_results: diff --git a/medusa/trakt_checker.py b/medusa/trakt_checker.py index 5f43b7af6e..2528c6d112 100644 --- a/medusa/trakt_checker.py +++ b/medusa/trakt_checker.py @@ -10,7 +10,7 @@ from builtins import str from medusa import app, db, ui -from medusa.common import Quality, SKIPPED, WANTED +from medusa.common import ARCHIVED, DOWNLOADED, SKIPPED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, WANTED from medusa.helper.common import episode_num from medusa.helpers import get_title_without_year from medusa.indexers.indexer_config import EXTERNAL_IMDB, EXTERNAL_TRAKT, indexerConfig @@ -223,17 +223,17 @@ def remove_episode_trakt_collection(self, filter_show=None): params = [] main_db_con = db.DBConnection() - selection_status = ['?' for _ in Quality.DOWNLOADED + Quality.ARCHIVED] + statuses = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name,' \ b'e.season, e.episode, e.status ' \ b'FROM tv_episodes AS e, tv_shows AS s WHERE e.indexer = s.indexer AND ' \ b's.indexer_id = e.showid and e.location = "" ' \ - b'AND e.status in ({0})'.format(','.join(selection_status)) + b'AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) if filter_show: sql_selection += b' AND s.indexer_id = ? AND e.indexer = ?' params = [filter_show.series_id, filter_show.indexer] - sql_result = main_db_con.select(sql_selection, Quality.DOWNLOADED + Quality.ARCHIVED + params) + sql_result = main_db_con.select(sql_selection, statuses + params) episodes = [dict(e) for e in sql_result] if episodes: @@ -274,13 +274,13 @@ def add_episode_trakt_collection(self): if app.TRAKT_SYNC and app.USE_TRAKT: main_db_con = db.DBConnection() - selection_status = ['?' for _ in Quality.DOWNLOADED + Quality.ARCHIVED] + statuses = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, s.indexer_id, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid ' \ - b"AND e.status in ({0}) AND e.location <> ''".format(','.join(selection_status)) + b"AND e.status in ({0}) AND e.location <> ''".format(','.join(['?'] * len(statuses))) - sql_result = main_db_con.select(sql_selection, Quality.DOWNLOADED + Quality.ARCHIVED) + sql_result = main_db_con.select(sql_selection, statuses) episodes = [dict(e) for e in sql_result] if episodes: @@ -337,13 +337,13 @@ def remove_episode_watchlist(self): if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() - status = Quality.DOWNLOADED + Quality.ARCHIVED - selection_status = [b'?' for _ in status] + statuses = [DOWNLOADED, ARCHIVED] sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer ' \ - b'AND s.indexer_id = e.showid AND e.status in ({0})'.format(b','.join(selection_status)) - sql_result = main_db_con.select(sql_selection, status) + b'AND s.indexer_id = e.showid AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) + + sql_result = main_db_con.select(sql_selection, statuses) episodes = [dict(i) for i in sql_result] if episodes: @@ -382,13 +382,13 @@ def add_episode_watchlist(self): if app.TRAKT_SYNC_WATCHLIST and app.USE_TRAKT: main_db_con = db.DBConnection() - status = Quality.SNATCHED + Quality.SNATCHED_BEST + Quality.SNATCHED_PROPER + [WANTED] - selection_status = [b'?' for _ in status] + statuses = [SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, WANTED] sql_selection = b'SELECT s.indexer, s.startyear, e.showid, s.show_name, e.season, e.episode ' \ b'FROM tv_episodes AS e, tv_shows AS s ' \ b'WHERE e.indexer = s.indexer AND s.indexer_id = e.showid AND s.paused = 0 ' \ - b'AND e.status in ({0})'.format(b','.join(selection_status)) - sql_result = main_db_con.select(sql_selection, status) + b'AND e.status in ({0})'.format(','.join(['?'] * len(statuses))) + + sql_result = main_db_con.select(sql_selection, statuses) episodes = [dict(i) for i in sql_result] if episodes: @@ -489,6 +489,7 @@ def sync_trakt_shows(self): show_name = trakt_show['title'] show = None + indexer = None for i in indexerConfig: trakt_indexer = get_trakt_indexer(i) indexer_id = trakt_show['ids'].get(trakt_indexer, -1) @@ -516,8 +517,7 @@ def sync_trakt_shows(self): else: self.add_show(trakt_default_indexer, indexer_id, show_name, WANTED) - if int(app.TRAKT_METHOD_ADD) == 1: - # FIXME: Referenced before assigment + if int(app.TRAKT_METHOD_ADD) == 1 and indexer: new_show = Show.find_by_id(app.showList, indexer, indexer_id) if new_show: diff --git a/medusa/tv/episode.py b/medusa/tv/episode.py index 11744350ba..1b209ae905 100644 --- a/medusa/tv/episode.py +++ b/medusa/tv/episode.py @@ -245,13 +245,14 @@ def __init__(self, series, season, episode, filepath=''): self.episode = episode self.absolute_number = 0 self.description = '' - self.subtitles = list() + self.subtitles = [] self.subtitles_searchcount = 0 self.subtitles_lastsearch = str(datetime.min) self.airdate = date.fromordinal(1) self.hasnfo = False self.hastbn = False - self._status = UNSET + self.status = UNSET + self.quality = Quality.NA self.file_size = 0 self.release_name = '' self.is_proper = False @@ -409,52 +410,14 @@ def air_date(self): ).isoformat(b'T') @property - def status(self): - """Return the existing status as is.""" - return self._status - - @status.setter - def status(self, value): - """Set the status.""" - self._status = value - - @property - def splitted_status(self): - """Return the existing status removing the quality from it.""" - return Quality.split_composite_status(self._status) - - @property - def splitted_status_status(self): - """Return the status from the status/quality composite.""" - return self.splitted_status.status - - @splitted_status_status.setter - def splitted_status_status(self, value): - """ - Only set the status (reuse existing quality) of the composite status. - - :param value: The new status. - """ - self._status = Quality.composite_status(value, self.splitted_status_quality) - - @property - def splitted_status_quality(self): - """Return the quality from the status/quality composite.""" - return self.splitted_status.quality - - @splitted_status_quality.setter - def splitted_status_quality(self, value): - """ - Only set the quality (reuse existing status) of the composite status. - - :param value: The new quality. - """ - self._status = Quality.composite_status(self.splitted_status_status, value) + def status_name(self): + """Return the status name.""" + return statusStrings[self.status] @property - def status_name(self): + def quality_name(self): """Return the status name.""" - return statusStrings[Quality.split_composite_status(self.status).status] + return Quality.qualityStrings[self.quality] def is_location_valid(self, location=None): """Whether the location is a valid file. @@ -664,7 +627,8 @@ def load_from_db(self, season, episode): self.subtitles_searchcount = sql_results[0][b'subtitles_searchcount'] self.subtitles_lastsearch = sql_results[0][b'subtitles_lastsearch'] self.airdate = date.fromordinal(int(sql_results[0][b'airdate'])) - self.status = int(sql_results[0][b'status'] or -1) + self.status = int(sql_results[0][b'status'] or UNSET) + self.quality = int(sql_results[0][b'quality'] or Quality.NA) # don't overwrite my location if sql_results[0][b'location']: @@ -950,22 +914,10 @@ def load_from_indexer(self, season=None, episode=None, tvapi=None, cached_season 'status': statusStrings[self.status], } ) - # We only change the episode's status if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED + # Update the episode's status/quality if a file exists and the status is not SNATCHED|DOWNLOADED|ARCHIVED elif helpers.is_media_file(self.location): - if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + \ - Quality.ARCHIVED + Quality.SNATCHED_BEST: - old_status = self.status - self.status = Quality.status_from_name(self.location, anime=self.series.is_anime) - log.debug( - '{id}: {series} {ep} status changed from {old_status} to {new_status}' - ' as current status is not SNATCHED|DOWNLOADED|ARCHIVED', { - 'id': self.series.series_id, - 'series': self.series.name, - 'ep': episode_num(season, episode), - 'old_status': statusStrings[old_status], - 'new_status': statusStrings[self.status], - } - ) + if self.status not in [SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED]: + self.update_status_quality(self.location) else: log.debug( '{id}: {series} {ep} status untouched: {status}', { @@ -1002,15 +954,7 @@ def __load_from_nfo(self, location): if self.location != '': if self.status == UNSET and helpers.is_media_file(self.location): - self.status = Quality.status_from_name(self.location, anime=self.series.is_anime) - log.debug( - '{id}: {series} {ep} status changed from UNSET to {new_status}', { - 'id': self.series.series_id, - 'series': self.series.name, - 'ep': episode_num(self.season, self.episode), - 'new_status': statusStrings[self.status], - } - ) + self.update_status_quality(self.location) nfo_file = replace_extension(self.location, 'nfo') log.debug('{id}: Using NFO name {nfo}', @@ -1095,6 +1039,7 @@ def __str__(self): result += 'hasnfo: %r\n' % self.hasnfo result += 'hastbn: %r\n' % self.hastbn result += 'status: %r\n' % self.status + result += 'quality: %r\n' % self.quality return result def to_json(self, detailed=True): @@ -1115,6 +1060,7 @@ def to_json(self, detailed=True): data['title'] = self.name data['subtitles'] = self.subtitles data['status'] = self.status_name + data['quality'] = self.quality data['release'] = NonEmptyDict() data['release']['name'] = self.release_name data['release']['group'] = self.release_group @@ -1257,6 +1203,7 @@ def get_sql(self): b' hasnfo = ?, ' b' hastbn = ?, ' b' status = ?, ' + b' quality = ?, ' b' location = ?, ' b' file_size = ?, ' b' release_name = ?, ' @@ -1272,9 +1219,9 @@ def get_sql(self): b' episode_id = ?', [self.indexerid, self.indexer, self.name, self.description, ','.join(self.subtitles), self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, - self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, - self.series.series_id, self.season, self.episode, self.absolute_number, self.version, - self.release_group, self.manually_searched, ep_id]] + self.hastbn, self.status, self.quality, self.location, self.file_size, self.release_name, + self.is_proper, self.series.series_id, self.season, self.episode, self.absolute_number, + self.version, self.release_group, self.manually_searched, ep_id]] else: # Don't update the subtitle language when the srt file doesn't contain the # alpha2 code, keep value from subliminal @@ -1292,6 +1239,7 @@ def get_sql(self): b' hasnfo = ?, ' b' hastbn = ?, ' b' status = ?, ' + b' quality = ?, ' b' location = ?, ' b' file_size = ?, ' b' release_name = ?, ' @@ -1307,9 +1255,9 @@ def get_sql(self): b' episode_id = ?', [self.indexerid, self.indexer, self.name, self.description, self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, - self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, - self.series.series_id, self.season, self.episode, self.absolute_number, self.version, - self.release_group, self.manually_searched, ep_id]] + self.hastbn, self.status, self.quality, self.location, self.file_size, self.release_name, + self.is_proper, self.series.series_id, self.season, self.episode, self.absolute_number, + self.version, self.release_group, self.manually_searched, ep_id]] else: # use a custom insert method to get the data into the DB. return [ @@ -1327,6 +1275,7 @@ def get_sql(self): b' hasnfo, ' b' hastbn, ' b' status, ' + b' quality, ' b' location, ' b' file_size, ' b' release_name, ' @@ -1339,11 +1288,11 @@ def get_sql(self): b' release_group) ' b'VALUES ' b' ((SELECT episode_id FROM tv_episodes WHERE indexer = ? AND showid = ? AND season = ? AND episode = ?), ' - b' ?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);', + b' ?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);', [self.series.indexer, self.series.series_id, self.season, self.episode, self.indexerid, self.series.indexer, self.name, self.description, ','.join(self.subtitles), self.subtitles_searchcount, self.subtitles_lastsearch, - self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, - self.release_name, self.is_proper, self.series.series_id, self.season, self.episode, + self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.quality, self.location, + self.file_size, self.release_name, self.is_proper, self.series.series_id, self.season, self.episode, self.absolute_number, self.version, self.release_group]] except Exception as error: log.error('{id}: Error while updating database: {error_msg!r}', @@ -1364,6 +1313,7 @@ def save_to_db(self): b'hasnfo': self.hasnfo, b'hastbn': self.hastbn, b'status': self.status, + b'quality': self.quality, b'location': self.location, b'file_size': self.file_size, b'release_name': self.release_name, @@ -1490,8 +1440,6 @@ def release_group(series, name): return '' return parse_result.release_group.strip('.- []{}') - _, ep_qual = Quality.split_composite_status(self.status) # @UnusedVariable - if app.NAMING_STRIP_YEAR: series_name = re.sub(r'\(\d+\)$', '', self.series.name).rstrip() else: @@ -1525,7 +1473,7 @@ def release_group(series, name): relgrp = app.UNKNOWN_RELEASE_GROUP # try to get the release encoder to comply with scene naming standards - encoder = Quality.scene_quality_from_name(self.release_name.replace(rel_grp[relgrp], ''), ep_qual) + encoder = Quality.scene_quality_from_name(self.release_name.replace(rel_grp[relgrp], ''), self.quality) if encoder: log.debug('Found codec for {series} {ep}', {'series': series_name, 'ep': ep_name}) @@ -1537,12 +1485,12 @@ def release_group(series, name): '%EN': ep_name, '%E.N': dot(ep_name), '%E_N': us(ep_name), - '%QN': Quality.qualityStrings[ep_qual], - '%Q.N': dot(Quality.qualityStrings[ep_qual]), - '%Q_N': us(Quality.qualityStrings[ep_qual]), - '%SQN': Quality.sceneQualityStrings[ep_qual] + encoder, - '%SQ.N': dot(Quality.sceneQualityStrings[ep_qual] + encoder), - '%SQ_N': us(Quality.sceneQualityStrings[ep_qual] + encoder), + '%QN': Quality.qualityStrings[self.quality], + '%Q.N': dot(Quality.qualityStrings[self.quality]), + '%Q_N': us(Quality.qualityStrings[self.quality]), + '%SQN': Quality.sceneQualityStrings[self.quality] + encoder, + '%SQ.N': dot(Quality.sceneQualityStrings[self.quality] + encoder), + '%SQ_N': us(Quality.sceneQualityStrings[self.quality] + encoder), '%S': str(self.season), '%0S': '%02d' % self.season, '%E': str(self.episode), @@ -2019,13 +1967,13 @@ def airdate_modify_stamp(self): } ) - def update_status(self, filepath): - """Update the episode status according to the file information. + def update_status_quality(self, filepath): + """Update the episode status and quality according to the file information. The status should only be changed if either the size or the filename changed. :param filepath: Path to the new episode file. """ - old_status, old_quality = Quality.split_composite_status(self.status) + old_status, old_quality = self.status, self.quality old_location = self.location # Changing the name of the file might also change its quality @@ -2055,7 +2003,8 @@ def update_status(self, filepath): new_status = ARCHIVED with self.lock: - self.status = Quality.composite_status(new_status, new_quality) + self.status = new_status + self.quality = new_quality if not same_name: # Reset release name as the name changed @@ -2065,8 +2014,8 @@ def update_status(self, filepath): "{name}: Setting the status from '{status_old}' to '{status_new}' and" " quality '{quality_old}' to '{quality_new}' based on file: {filepath}", { 'name': self.series.name, - 'status_old': Quality.statusPrefixes[old_status], - 'status_new': Quality.statusPrefixes[new_status], + 'status_old': statusStrings[old_status], + 'status_new': statusStrings[new_status], 'quality_old': Quality.qualityStrings[old_quality], 'quality_new': Quality.qualityStrings[new_quality], 'filepath': filepath, @@ -2077,7 +2026,7 @@ def update_status(self, filepath): "{name}: Not changing current status '{status_old}' or" " quality '{quality_old}' based on file: {filepath}", { 'name': self.series.name, - 'status_old': Quality.statusPrefixes[old_status], + 'status_old': statusStrings[old_status], 'quality_old': Quality.qualityStrings[old_quality], 'filepath': filepath, } diff --git a/medusa/tv/series.py b/medusa/tv/series.py index a4598de099..843f56bbb8 100644 --- a/medusa/tv/series.py +++ b/medusa/tv/series.py @@ -33,10 +33,15 @@ from medusa.black_and_white_list import BlackAndWhiteList from medusa.common import ( ARCHIVED, + DOWNLOADED, + FAILED, IGNORED, Overview, Quality, SKIPPED, + SNATCHED, + SNATCHED_BEST, + SNATCHED_PROPER, UNAIRED, UNSET, WANTED, @@ -1322,7 +1327,7 @@ def make_ep_from_file(self, filepath): continue else: - cur_ep.update_status(filepath) + cur_ep.update_status_quality(filepath) with cur_ep.lock: cur_ep.check_for_meta_files() @@ -1752,13 +1757,9 @@ def refresh_dir(self): with cur_ep.lock: # if it used to have a file associated with it and it doesn't anymore then # set it to app.EP_DEFAULT_DELETED_STATUS - if cur_ep.location and cur_ep.status in Quality.DOWNLOADED: + if cur_ep.location and cur_ep.status in [ARCHIVED, DOWNLOADED, IGNORED, SKIPPED]: - if app.EP_DEFAULT_DELETED_STATUS == ARCHIVED: - _, old_quality = Quality.split_composite_status(cur_ep.status) - new_status = Quality.composite_status(ARCHIVED, old_quality) - else: - new_status = app.EP_DEFAULT_DELETED_STATUS + new_status = app.EP_DEFAULT_DELETED_STATUS log.debug( u"{id}: Location for '{show}' {ep} doesn't exist and current status is '{old_status}'," @@ -1779,6 +1780,10 @@ def refresh_dir(self): cur_ep.hasnfo = False cur_ep.hastbn = False cur_ep.release_name = '' + cur_ep.release_group = '' + cur_ep.is_proper = False + cur_ep.version = 0 + cur_ep.manually_searched = False sql_l.append(cur_ep.get_sql()) @@ -2135,7 +2140,7 @@ def want_episode(self, season, episode, quality, forced_search=False, main_db_con = db.DBConnection() sql_results = main_db_con.select( b'SELECT ' - b' status, ' + b' status, quality, ' b' manually_searched ' b'FROM ' b' tv_episodes ' @@ -2157,7 +2162,7 @@ def want_episode(self, season, episode, quality, forced_search=False, ) return False - cur_status, cur_quality = Quality.split_composite_status(int(sql_results[0][b'status'])) + cur_status, cur_quality = int(sql_results[0][b'status']), int(sql_results[0][b'quality']) ep_status_text = statusStrings[cur_status] manually_searched = sql_results[0][b'manually_searched'] @@ -2188,11 +2193,13 @@ def want_episode(self, season, episode, quality, forced_search=False, ) return should_replace - def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): + def get_overview(self, ep_status, ep_quality, backlog_mode=False, manually_searched=False): """Get the Overview status from the Episode status. :param ep_status: an Episode status :type ep_status: int + :param ep_quality: an Episode quality + :type ep_quality: int :param backlog_mode: if we should return overview for backlogOverview :type backlog_mode: boolean :param manually_searched: if episode was manually searched @@ -2200,12 +2207,13 @@ def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): :return: an Overview status :rtype: int """ - ep_status = try_int(ep_status) or UNSET + ep_status = int(ep_status) + ep_quality = int(ep_quality) if backlog_mode: if ep_status == WANTED: return Overview.WANTED - elif Quality.should_search(ep_status, self, manually_searched)[0]: + elif Quality.should_search(ep_status, ep_quality, self, manually_searched)[0]: return Overview.QUAL return Overview.GOOD @@ -2213,20 +2221,20 @@ def get_overview(self, ep_status, backlog_mode=False, manually_searched=False): return Overview.UNAIRED elif ep_status in (SKIPPED, IGNORED): return Overview.SKIPPED - elif ep_status in Quality.WANTED: + elif ep_status == WANTED: return Overview.WANTED - elif ep_status in Quality.ARCHIVED: + elif ep_status == ARCHIVED: return Overview.GOOD - elif ep_status in Quality.FAILED: + elif ep_status == FAILED: return Overview.WANTED - elif ep_status in Quality.SNATCHED: + elif ep_status == SNATCHED: return Overview.SNATCHED - elif ep_status in Quality.SNATCHED_PROPER: + elif ep_status == SNATCHED_PROPER: return Overview.SNATCHED_PROPER - elif ep_status in Quality.SNATCHED_BEST: + elif ep_status == SNATCHED_BEST: return Overview.SNATCHED_BEST - elif ep_status in Quality.DOWNLOADED: - if Quality.should_search(ep_status, self, manually_searched)[0]: + elif ep_status == DOWNLOADED: + if Quality.should_search(ep_status, ep_quality, self, manually_searched)[0]: return Overview.QUAL else: return Overview.GOOD @@ -2248,9 +2256,9 @@ def get_backlogged_episodes(self, allowed_qualities, preferred_qualities, includ for ep_obj in ep_list: if not include_wanted and ep_obj.status == WANTED: continue - if Quality.should_search(ep_obj.status, show_obj, ep_obj.manually_searched)[0]: + if Quality.should_search(ep_obj.status, ep_obj.quality, show_obj, ep_obj.manually_searched)[0]: new_backlogged += 1 - if Quality.should_search(ep_obj.status, self, ep_obj.manually_searched)[0]: + if Quality.should_search(ep_obj.status, ep_obj.quality, self, ep_obj.manually_searched)[0]: existing_backlogged += 1 else: new_backlogged = existing_backlogged = -1 @@ -2268,12 +2276,11 @@ def set_all_episodes_archived(self, final_status_only=False): sql_list = [] for ep_obj in ep_list: with ep_obj.lock: - if ep_obj.status in Quality.DOWNLOADED: - if final_status_only and Quality.should_search(ep_obj.status, self, + if ep_obj.status == DOWNLOADED: + if final_status_only and Quality.should_search(ep_obj.status, ep_obj.quality, self, ep_obj.manually_searched)[0]: continue - _, old_quality = Quality.split_composite_status(ep_obj.status) - ep_obj.status = Quality.composite_status(ARCHIVED, old_quality) + ep_obj.status = ARCHIVED sql_list.append(ep_obj.get_sql()) if sql_list: main_db_con = db.DBConnection() diff --git a/tests/apiv2/test_config.py b/tests/apiv2/test_config.py index ecbf9403e4..c93e2546fa 100644 --- a/tests/apiv2/test_config.py +++ b/tests/apiv2/test_config.py @@ -52,7 +52,7 @@ def config(monkeypatch, app_config): config_data['localUser'] = os_user config_data['programDir'] = app.PROG_DIR config_data['configFile'] = app.CONFIG_FILE - config_data['dbFilename'] = db.dbFilename() + config_data['dbPath'] = db.DBConnection().path config_data['cacheDir'] = app.CACHE_DIR config_data['logDir'] = app.LOG_DIR config_data['appArgs'] = app.MY_ARGS @@ -151,7 +151,7 @@ def test_config_get(http_client, create_url, auth_headers, config): 'locale', 'localUser', 'githubUrl', - 'dbFilename', + 'dbPath', ]) def test_config_get_detailed(http_client, create_url, auth_headers, config, query): # given diff --git a/tests/conftest.py b/tests/conftest.py index 7241fd3864..aa5c6ff1d9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -98,7 +98,7 @@ def tvshow(create_tvshow): @pytest.fixture def tvepisode(tvshow, create_tvepisode): return create_tvepisode(series=tvshow, season=3, episode=4, indexer=34, file_size=1122334455, - name='Episode Title', status=Quality.composite_status(DOWNLOADED, Quality.FULLHDBLURAY), + name='Episode Title', status=DOWNLOADED, quality=Quality.FULLHDBLURAY, release_group='SuperGroup') @@ -128,7 +128,7 @@ def create(language, **kwargs): @pytest.fixture def create_tvshow(monkeypatch): - def create(indexer=INDEXER_TVDBV2, indexerid=0, lang='', quality=Quality.UNKNOWN, season_folders=1, + def create(indexer=INDEXER_TVDBV2, indexerid=0, lang='', quality=Quality.NA, season_folders=1, enabled_subtitles=0, **kwargs): monkeypatch.setattr(Series, '_load_from_db', lambda method: None) target = Series(indexer=indexer, indexerid=indexerid, lang=lang, quality=quality, diff --git a/tests/legacy/helper/quality_tests.py b/tests/legacy/helper/quality_tests.py index 60f0b4fe3d..6729181b85 100644 --- a/tests/legacy/helper/quality_tests.py +++ b/tests/legacy/helper/quality_tests.py @@ -41,7 +41,7 @@ def test_get_quality_string(self): Quality.HDBLURAY: '720p BluRay', Quality.HDTV: '720p HDTV', Quality.HDWEBDL: '720p WEB-DL', - Quality.NONE: 'N/A', + Quality.NA: 'N/A', Quality.RAWHDTV: 'RawHD', Quality.SDDVD: 'SD DVD', Quality.SDTV: 'SDTV', diff --git a/tests/test_common.py b/tests/test_common.py index 846e7433e2..fe87c6ca0b 100644 --- a/tests/test_common.py +++ b/tests/test_common.py @@ -201,7 +201,7 @@ def test_from_guessit(self, p): ]) def test_to_guessit(self, p): # Given - quality = Quality.composite_status(DOWNLOADED, p['quality']) + quality = p['quality'] expected = p['expected'] # When diff --git a/tests/test_should_process.py b/tests/test_should_process.py index 90a615454a..4ef0b6097d 100644 --- a/tests/test_should_process.py +++ b/tests/test_should_process.py @@ -2,6 +2,7 @@ """Tests for medusa/test_should_process.py.""" from medusa.common import Quality from medusa.post_processor import PostProcessor + import pytest @@ -55,12 +56,12 @@ 'preferred_qualities': [Quality.HDTV], 'expected': True }, - { # p7: Current quality is NONE: no - 'cur_quality': Quality.NONE, + { # p7: Current quality is NA: yes + 'cur_quality': Quality.NA, 'new_quality': Quality.HDTV, 'allowed_qualities': [Quality.HDWEBDL], 'preferred_qualities': [Quality.HDTV], - 'expected': False + 'expected': True }, ]) def test_should_process(p): diff --git a/tests/test_should_replace.py b/tests/test_should_replace.py index f342690686..42dee9af64 100644 --- a/tests/test_should_replace.py +++ b/tests/test_should_replace.py @@ -205,7 +205,7 @@ 'manually_searched': False, 'expected': False }, - { # p18: Downloaded Unknown found 720p HDBLURAY: no + { # p18: Downloaded Unknown found 720p HDBLURAY: yes 'ep_status': DOWNLOADED, 'cur_quality': Quality.UNKNOWN, 'new_quality': Quality.HDBLURAY, @@ -214,7 +214,7 @@ 'download_current_quality': False, 'force': False, 'manually_searched': False, - 'expected': False + 'expected': True }, { # p19: Downloaded SDTV (not in quality system) and found 720p HDTV: yes 'ep_status': DOWNLOADED, @@ -414,9 +414,9 @@ 'manually_searched': False, 'expected': False }, - { # p37: Current quality is NONE: yes + { # p37: Current quality is NA: yes 'ep_status': SNATCHED, - 'cur_quality': Quality.NONE, + 'cur_quality': Quality.NA, 'new_quality': Quality.HDTV, 'allowed_qualities': [Quality.SDTV], 'preferred_qualities': [Quality.HDTV], @@ -505,6 +505,17 @@ 'search_type': PROPER_SEARCH, 'expected': False }, + { # p45: Downloaded UNKNOWN and it's Preferred: yes + 'ep_status': DOWNLOADED, + 'cur_quality': Quality.HDTV, + 'new_quality': Quality.UNKNOWN, + 'allowed_qualities': [Quality.HDTV], + 'preferred_qualities': [Quality.UNKNOWN], + 'download_current_quality': False, + 'force': False, + 'manually_searched': False, + 'expected': True + }, ]) def test_should_replace(p): """Run the test.""" diff --git a/tests/test_should_search.py b/tests/test_should_search.py index 3e039b38bf..760ffaabf1 100644 --- a/tests/test_should_search.py +++ b/tests/test_should_search.py @@ -21,7 +21,8 @@ def _load_from_db(self): @pytest.mark.parametrize('p', [ { # p0: Downloaded a quality not in quality system : yes - 'status': Quality.composite_status(DOWNLOADED, Quality.SDTV), + 'status': DOWNLOADED, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -29,7 +30,8 @@ def _load_from_db(self): 'expected': True }, { # p1: Current status is SKIPPED: no - 'status': Quality.composite_status(SKIPPED, None), + 'status': SKIPPED, + 'quality': Quality.NA, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -37,7 +39,8 @@ def _load_from_db(self): 'expected': False }, { # p2: Current status is IGNORED: no - 'status': Quality.composite_status(IGNORED, None), + 'status': IGNORED, + 'quality': Quality.NA, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -45,7 +48,8 @@ def _load_from_db(self): 'expected': False }, { # p3: Current status is SNATCHED_BEST: no - 'status': Quality.composite_status(SNATCHED_BEST, Quality.HDWEBDL), + 'status': SNATCHED_BEST, + 'quality': Quality.HDWEBDL, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -53,7 +57,8 @@ def _load_from_db(self): 'expected': False }, { # p4: Current status is SNATCHED: yes - 'status': Quality.composite_status(SNATCHED, Quality.HDTV), + 'status': SNATCHED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -61,7 +66,8 @@ def _load_from_db(self): 'expected': True }, { # p5: Current status is SNATCHED_PROPER: yes - 'status': Quality.composite_status(SNATCHED_PROPER, Quality.HDTV), + 'status': SNATCHED_PROPER, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -69,7 +75,8 @@ def _load_from_db(self): 'expected': True }, { # p6: Status is DOWNLOADED: yes - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -77,7 +84,8 @@ def _load_from_db(self): 'expected': True }, { # p7: Status is ARCHIVED: no - 'status': Quality.composite_status(ARCHIVED, Quality.HDTV), + 'status': ARCHIVED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV, Quality.HDWEBDL], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -85,7 +93,8 @@ def _load_from_db(self): 'expected': False }, { # p8: Status WANTED: yes - 'status': Quality.composite_status(WANTED, None), + 'status': WANTED, + 'quality': Quality.NA, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDWEBDL], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -93,7 +102,8 @@ def _load_from_db(self): 'expected': True }, { # p9: Episode was manually searched by user: no - 'status': Quality.composite_status(DOWNLOADED, Quality.HDBLURAY), + 'status': DOWNLOADED, + 'quality': Quality.HDBLURAY, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -101,7 +111,8 @@ def _load_from_db(self): 'expected': False }, { # p10: Downloaded an Allowed quality. Preferred not set: no - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [])), # Preferred Qualities @@ -109,7 +120,8 @@ def _load_from_db(self): 'expected': False }, { # p11: Downloaded an Allowed quality but Preferred set: yes - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDWEBDL])), # Preferred Qualities @@ -117,7 +129,8 @@ def _load_from_db(self): 'expected': True }, { # p12: Downloaded an Preferred quality. Allowed not set: no - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([], # Allowed Qualities [Quality.SDTV, Quality.HDTV])), # Preferred Qualities @@ -125,7 +138,8 @@ def _load_from_db(self): 'expected': False }, { # p13: Already have Preferred quality: no - 'status': Quality.composite_status(SNATCHED, Quality.HDBLURAY), + 'status': SNATCHED, + 'quality': Quality.HDBLURAY, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -133,7 +147,8 @@ def _load_from_db(self): 'expected': False }, { # p14: ´Downloaded UNKNOWN and its on Allowed. Preferred not set: no - 'status': Quality.composite_status(DOWNLOADED, Quality.UNKNOWN), + 'status': DOWNLOADED, + 'quality': Quality.UNKNOWN, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.UNKNOWN, Quality.HDTV], # Allowed Qualities [])), # Preferred Qualities @@ -141,15 +156,17 @@ def _load_from_db(self): 'expected': False }, { # p15: ´Downloaded UNKNOWN and its not on Allowed: yes - 'status': Quality.composite_status(DOWNLOADED, Quality.UNKNOWN), + 'status': DOWNLOADED, + 'quality': Quality.UNKNOWN, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities 'manually_searched': False, 'expected': True }, - { # p16: ´Downloaded NONE (invalid quality): yes - 'status': Quality.composite_status(DOWNLOADED, Quality.NONE), + { # p16: ´Downloaded NA (initial quality): yes + 'status': DOWNLOADED, + 'quality': Quality.NA, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -157,7 +174,8 @@ def _load_from_db(self): 'expected': True }, { # p17: ´SNATCHED BEST but this quality is no longer wanted: yes - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -165,7 +183,8 @@ def _load_from_db(self): 'expected': True }, { # p18: ´SNATCHED BEST but this quality is no longer in preferred but in allowed. Preferred set: yes - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV, Quality.SDTV], # Allowed Qualities [Quality.HDBLURAY])), # Preferred Qualities @@ -173,7 +192,8 @@ def _load_from_db(self): 'expected': True }, { # p19: ´SNATCHED BEST but this quality is no longer in preferred but in allowed. Preferred not set: no - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV, Quality.SDTV], # Allowed Qualities [])), # Preferred Qualities @@ -181,24 +201,35 @@ def _load_from_db(self): 'expected': False }, { # p20: ´SNATCHED BEST but this quality is no longer wanted. Preferred not set: yes - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities [])), # Preferred Qualities 'manually_searched': False, 'expected': True }, + { # p21: Downloaded HDTV and it's in Allowed. Preferred is set (UNKNOWN): yes + 'status': DOWNLOADED, + 'quality': Quality.HDTV, + 'show_obj': TestTVShow(indexer=1, indexer_id=1, lang='', + quality=Quality.combine_qualities([Quality.HDTV], # Allowed Qualities + [Quality.UNKNOWN])), # Preferred Qualities + 'manually_searched': False, + 'expected': True + }, ]) def test_should_search(p): """Run the test.""" # Given status = p['status'] + quality = p['quality'] show_obj = p['show_obj'] manually_searched = p['manually_searched'] expected = p['expected'] # When - replace, msg = Quality.should_search(status, show_obj, manually_searched) + replace, msg = Quality.should_search(status, quality, show_obj, manually_searched) actual = replace # Then diff --git a/tests/test_tvepisode_refiner.py b/tests/test_tvepisode_refiner.py index 9cda9908e7..b924b371b6 100644 --- a/tests/test_tvepisode_refiner.py +++ b/tests/test_tvepisode_refiner.py @@ -13,7 +13,7 @@ def data(create_tvshow, create_tvepisode): show_year = 2012 tvshow = create_tvshow(indexerid=12, name='{0} ({1})'.format(show_name, show_year), imdb_id='tt0000000') tvepisode = create_tvepisode(series=tvshow, indexer=34, season=3, episode=4, name='Episode Title', - file_size=1122334455, status=Quality.composite_status(DOWNLOADED, Quality.FULLHDBLURAY), + file_size=1122334455, status=DOWNLOADED, quality=Quality.FULLHDBLURAY, release_group='SuperGroup') return { 'tvshow': tvshow, diff --git a/tests/test_update_status.py b/tests/test_update_status_quality.py similarity index 56% rename from tests/test_update_status.py rename to tests/test_update_status_quality.py index d43c39859f..56ffd0c619 100644 --- a/tests/test_update_status.py +++ b/tests/test_update_status_quality.py @@ -1,5 +1,5 @@ # coding=utf-8 -"""Tests for medusa/tv/episode.py:update_status""" +"""Tests for medusa/tv/episode.py:update_status_quality""" from medusa.common import (ARCHIVED, DOWNLOADED, IGNORED, Quality, SKIPPED, SNATCHED, SNATCHED_BEST, SNATCHED_PROPER, UNAIRED, UNSET, WANTED, statusStrings) @@ -8,12 +8,14 @@ @pytest.fixture def create_episode(tvshow, create_tvepisode, create_file): - def create(filepath, status, size): + def create(filepath, status, size, quality): path = create_file(filepath, size=size) if filepath else '' episode = create_tvepisode(tvshow, 2, 14, filepath=path) episode.location = path if status: episode.status = status + if quality: + episode.quality = quality return episode @@ -22,122 +24,142 @@ def create(filepath, status, size): @pytest.mark.parametrize('p', [ { # p0: File name and size are the same - 'status': Quality.composite_status(SNATCHED, Quality.SDTV), + 'status': SNATCHED, + 'quality': Quality.SDTV, 'filepath': 'Show.S01E01.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p1: Not a valid media file - 'status': Quality.composite_status(DOWNLOADED, Quality.FULLHDTV), + 'status': DOWNLOADED, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E02.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E02.1080p.HDTV.X264-GROUP.srt', - 'expected': Quality.composite_status(DOWNLOADED, Quality.FULLHDTV) + 'expected': (DOWNLOADED, Quality.FULLHDTV) }, { # p2: File name is the same, different size - 'status': Quality.composite_status(SNATCHED, Quality.SDTV), + 'status': SNATCHED, + 'quality': Quality.SDTV, 'location': 'Show.S01E03.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E03.HDTV.X264-GROUP.mkv', 'new_size': 53, - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p3: File name is different, same size - 'status': Quality.composite_status(DOWNLOADED, Quality.SDTV), + 'status': DOWNLOADED, + 'quality': Quality.SDTV, 'location': 'Show.S01E04.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E04.HDTV.X264-OTHERGROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p4: File name and size are both different - 'status': Quality.composite_status(DOWNLOADED, Quality.HDTV), + 'status': DOWNLOADED, + 'quality': Quality.HDTV, 'location': 'Show.S01E05.720p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E05.720p.HDTV.X264-SOMEOTHERGROUP.mkv', 'new_size': 85, - 'expected': Quality.composite_status(DOWNLOADED, Quality.HDTV) + 'expected': (DOWNLOADED, Quality.HDTV) }, { # p5: No previous file present (location) - 'status': Quality.composite_status(DOWNLOADED, Quality.FULLHDTV), + 'status': DOWNLOADED, + 'quality': Quality.FULLHDTV, 'filepath': 'Show.S01E06.1080p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.FULLHDTV) + 'expected': (ARCHIVED, Quality.FULLHDTV) }, { # p6: Default status and no previous file present (location) 'filepath': 'Show.S01E07.720p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.HDTV) + 'expected': (ARCHIVED, Quality.HDTV) }, { # p7: Snatched and download not finished - 'status': Quality.composite_status(SNATCHED, Quality.FULLHDTV), + 'status': SNATCHED, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E08.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E08.1080p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(SNATCHED, Quality.FULLHDTV) + 'expected': (SNATCHED, Quality.FULLHDTV) }, { # p8: Previous status was Skipped - 'status': Quality.composite_status(SKIPPED, None), + 'status': SKIPPED, + 'quality': Quality.NA, 'filepath': 'Show.S01E09.1080p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.FULLHDTV) + 'expected': (ARCHIVED, Quality.FULLHDTV) }, { # p9: Previous status was Unaired - 'status': Quality.composite_status(UNAIRED, None), + 'status': UNAIRED, + 'quality': Quality.NA, 'filepath': 'Show.S01E10.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.SDTV) + 'expected': (ARCHIVED, Quality.SDTV) }, { # p10: Previous status was Ignored - 'status': Quality.composite_status(IGNORED, None), + 'status': IGNORED, + 'quality': Quality.NA, 'filepath': 'Show.S01E11.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.SDTV) + 'expected': (ARCHIVED, Quality.SDTV) }, { # p11: Previous status was Unset - 'status': Quality.composite_status(UNSET, None), + 'status': UNSET, + 'quality': Quality.NA, 'filepath': 'Show.S01E11.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(ARCHIVED, Quality.SDTV) + 'expected': (ARCHIVED, Quality.SDTV) }, { # p12: Snatched and download is finished - 'status': Quality.composite_status(SNATCHED, Quality.HDTV), + 'status': SNATCHED, + 'quality': Quality.HDTV, 'location': 'Show.S01E12.720p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E12.720p.HDTV.X264-BETTERGROUP.mkv', 'new_size': 29, - 'expected': Quality.composite_status(DOWNLOADED, Quality.HDTV) + 'expected': (DOWNLOADED, Quality.HDTV) }, { # p13: Snatched a Proper and download is finished - 'status': Quality.composite_status(SNATCHED_PROPER, Quality.FULLHDTV), + 'status': SNATCHED_PROPER, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E13.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E13.PROPER.1080p.HDTV.X264-GROUP.mkv', 'new_size': 89, - 'expected': Quality.composite_status(DOWNLOADED, Quality.FULLHDTV) + 'expected': (DOWNLOADED, Quality.FULLHDTV) }, { # p14: Snatched a Proper (Best) and download is finished (higher quality) - 'status': Quality.composite_status(SNATCHED_BEST, Quality.SDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.SDTV, 'location': 'Show.S01E14.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E14.720p.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.HDTV) + 'expected': (DOWNLOADED, Quality.HDTV) }, { # p15: Snatched a Proper (Best) and download is finished (lower quality) - 'status': Quality.composite_status(SNATCHED_BEST, Quality.FULLHDTV), + 'status': SNATCHED_BEST, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E15.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E15.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p16: Previous status was Wanted and no previous file present (location) - 'status': Quality.composite_status(WANTED, None), + 'status': WANTED, + 'quality': Quality.NA, 'filepath': 'Show.S01E16.HDTV.X264-GROUP.mkv', - 'expected': Quality.composite_status(DOWNLOADED, Quality.SDTV) + 'expected': (DOWNLOADED, Quality.SDTV) }, { # p17: Previous status was Wanted - 'status': Quality.composite_status(WANTED, Quality.FULLHDTV), + 'status': WANTED, + 'quality': Quality.FULLHDTV, 'location': 'Show.S01E17.1080p.HDTV.X264-GROUP.mkv', 'filepath': 'Show.S01E17.720p.HDTV.X264-GROUP.mkv', 'new_size': 38, - 'expected': Quality.composite_status(ARCHIVED, Quality.HDTV) + 'expected': (ARCHIVED, Quality.HDTV) }, ]) -def test_update_status(p, create_episode, create_file): +def test_update_status_quality(p, create_episode, create_file): """Run the test.""" # Given location = p.get('location') status = p.get('status') - episode = create_episode(filepath=location, status=status, size=42) + quality = p.get('quality') + episode = create_episode(filepath=location, status=status, quality=quality, size=42) filepath = create_file(p['filepath'], size=p.get('new_size', 42)) - expected = p['expected'] + exp_status, exp_quality = p['expected'] # When - episode.update_status(filepath) - actual = episode.status + episode.update_status_quality(filepath) + actual_status = episode.status + actual_quality = episode.quality # Then - assert statusStrings[expected] == statusStrings[actual] + assert statusStrings[exp_status] == statusStrings[actual_status] + assert Quality.qualityStrings[exp_quality] == Quality.qualityStrings[actual_quality] diff --git a/themes-default/slim/static/js/ajax-episode-search.js b/themes-default/slim/static/js/ajax-episode-search.js index 907fbc29c6..81ceaebb38 100644 --- a/themes-default/slim/static/js/ajax-episode-search.js +++ b/themes-default/slim/static/js/ajax-episode-search.js @@ -30,7 +30,6 @@ function updateImages(data) { const img = el.children('img[data-ep-search]'); const parent = el.parent(); if (el) { - let rSearchTerm = ''; if (ep.searchstatus.toLowerCase() === 'searching') { // El=$('td#' + ep.season + 'x' + ep.episode + '.search img'); img.prop('title', 'Searching'); @@ -54,8 +53,11 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - rSearchTerm = /(\w+(\s\((\bBest\b|\bProper\b)\))?)\s\((.+?)\)/; - htmlContent = ep.status.replace(rSearchTerm, "$1" + ' ' + "$4" + ''); // eslint-disable-line quotes, no-useless-concat + let qualityPill = ''; + if (ep.quality_style !== 'na') { + qualityPill = ' ' + ep.quality_name + ''; + } + htmlContent = ep.status + qualityPill; parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists @@ -147,7 +149,6 @@ $.ajaxEpSearch = function(options) { function forcedSearch() { let imageName; let imageResult; - let htmlContent; const parent = selectedEpisode.parent(); @@ -186,11 +187,6 @@ $.ajaxEpSearch = function(options) { if (options.colorRow) { parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched'); } - // Applying the quality class - const rSearchTerm = /(\w+)\s\((.+?)\)/; - htmlContent = data.result.replace(rSearchTerm, '$1 $2'); - // Update the status column if it exists - parent.siblings('.col-status').html(htmlContent); // Only if the queuing was successful, disable the onClick event of the loading image disableLink(link); } diff --git a/themes-default/slim/views/config.mako b/themes-default/slim/views/config.mako index 8a95086122..59f9dc6688 100644 --- a/themes-default/slim/views/config.mako +++ b/themes-default/slim/views/config.mako @@ -21,7 +21,7 @@ User:{{localUser}} Program Folder:{{programDir}} Config File:{{configFile}} - Database File:{{dbFilename}} + Database File:{{dbPath}} Cache Folder:{{cacheDir}} Log Folder:{{logDir}} Arguments:
{{prettyPrintJSON(appArgs)}}
@@ -50,7 +50,7 @@ const component = { commitHash: undefined, configFile: undefined, databaseVersion: undefined, - dbFilename: undefined, + dbPath: undefined, githubUrl: undefined, locale: undefined, localUser: undefined, @@ -73,7 +73,7 @@ const component = { this.commitHash = data.commitHash; this.configFile = data.configFile; this.databaseVersion = data.databaseVersion; - this.dbFilename = data.dbFilename; + this.dbPath = data.dbPath; this.githubUrl = data.githubUrl; this.locale = data.locale; this.localUser = data.localUser; diff --git a/themes-default/slim/views/displayShow.mako b/themes-default/slim/views/displayShow.mako index 0590d815e9..2eacef92b0 100644 --- a/themes-default/slim/views/displayShow.mako +++ b/themes-default/slim/views/displayShow.mako @@ -4,7 +4,7 @@ import urllib import ntpath from medusa import app, helpers, subtitles, sbdatetime, network_timezones - from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED + from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa.common import Quality, qualityPresets, statusStrings, Overview from medusa.helper.common import pretty_file_size from medusa.indexers.indexer_api import indexerApi @@ -236,9 +236,9 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc or ''} - % if epResult["file_size"] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"]: ${pretty_file_size(epResult["file_size"])} % endif @@ -256,7 +256,7 @@ const startVue = () => { % endif - % if app.DOWNLOAD_URL and epResult['location'] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if app.DOWNLOAD_URL and epResult['location'] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: <% filename = epResult['location'] for rootDir in app.ROOT_DIRS: @@ -269,9 +269,9 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [ARCHIVED, DOWNLOADED, IGNORED, SKIPPED]: % if flag != 'und': - + ${flag} % else: @@ -280,15 +280,18 @@ const startVue = () => { % endif % endfor - <% cur_status, cur_quality = Quality.split_composite_status(int(epResult["status"])) %> - % if cur_quality != Quality.NONE: + <% + cur_status = int(epResult['status']) + cur_quality = int(epResult['quality']) + %> + % if cur_quality != Quality.NA: ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % else: ${statusStrings[cur_status]} % endif % if int(epResult["season"]) != 0: - % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: + % if app.USE_FAILED_DOWNLOADS and int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED): retry % else: search @@ -297,7 +300,7 @@ const startVue = () => { % else: search % endif - % if int(epResult["status"]) not in Quality.SNATCHED + Quality.SNATCHED_PROPER and app.USE_SUBTITLES and show.subtitles and epResult["location"]: + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): search subtitles % endif diff --git a/themes-default/slim/views/history.mako b/themes-default/slim/views/history.mako index ee07d472d0..f0ce4fd056 100644 --- a/themes-default/slim/views/history.mako +++ b/themes-default/slim/views/history.mako @@ -9,7 +9,7 @@ from medusa import providers from medusa.sbdatetime import sbdatetime from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED, DOWNLOADED, SUBTITLED - from medusa.common import Quality, statusStrings, Overview + from medusa.common import statusStrings from medusa.show.history import History from medusa.providers.generic_provider import GenericProvider %> @@ -36,7 +36,8 @@ const startVue = () => { return { // 0: Time, 1: Episode, 2: Action, 3: Provider, 4: Quality 0: node => $(node).find('time').attr('datetime'), - 1: node => $(node).find('a').text() + 1: node => $(node).find('a').text(), + 4: node => $(node).attr('quality') }; } // 0: Time, 1: Episode, 2: Snatched, 3: Downloaded @@ -49,10 +50,10 @@ const startVue = () => { if ($.isMeta({ subtitles: 'enabled' }, [true])) { // 4: Subtitled, 5: Quality compactExtract[4] = node => $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'), - compactExtract[5] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[5] = node => $(node).attr('quality') } else { // 4: Quality - compactExtract[4] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[4] = node => $(node).attr('quality') } return compactExtract; })(), @@ -149,7 +150,6 @@ const startVue = () => { % for hItem in historyResults: - <% composite = Quality.split_composite_status(int(hItem.action)) %> <% airDate = sbdatetime.sbfdatetime(datetime.strptime(str(hItem.date), History.date_format), show_seconds=True) %> @@ -157,11 +157,11 @@ const startVue = () => { ${hItem.show_name} - ${"S%02i" % int(hItem.season)}${"E%02i" % int(hItem.episode)} ${'Proper' if hItem.proper_tags else ''} - - % if composite.status == SUBTITLED: + + % if hItem.action == SUBTITLED: % endif - ${statusStrings[composite.status]} + ${statusStrings[hItem.action]} % if hItem.manually_searched: % endif @@ -169,8 +169,9 @@ const startVue = () => { % endif + - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if hItem.action in [DOWNLOADED, ARCHIVED]: % if hItem.provider != "-1": ${hItem.provider} % else: @@ -178,7 +179,7 @@ const startVue = () => { % endif % else: % if hItem.provider > 0: - % if composite.status in [SNATCHED, FAILED]: + % if hItem.action in [SNATCHED, FAILED]: <% provider = providers.get_provider_class(GenericProvider.make_id(hItem.provider)) %> % if provider is not None: ${provider.name} @@ -191,8 +192,9 @@ const startVue = () => { % endif % endif - ${composite.quality} - ${renderQualityPill(composite.quality)} + + ${renderQualityPill(hItem.quality)} + % endfor @@ -230,8 +232,7 @@ const startVue = () => { % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SNATCHED: + % if cur_action.action == SNATCHED: <% provider = providers.get_provider_class(GenericProvider.make_id(cur_action.provider)) %> % if provider is not None: ${provider.name} @@ -245,15 +246,14 @@ const startVue = () => { missing provider % endif % endif - % if composite.status == FAILED: + % if cur_action.action == FAILED: % endif % endfor % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if cur_action.action in [DOWNLOADED, ARCHIVED]: % if cur_action.provider != "-1": ${cur_action.provider} % else: @@ -265,8 +265,7 @@ const startVue = () => { % if app.USE_SUBTITLES: % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SUBTITLED: + % if cur_action.action == SUBTITLED: ${cur_action.provider} / @@ -275,7 +274,9 @@ const startVue = () => { % endfor % endif - ${renderQualityPill(composite.quality)} + + ${renderQualityPill(hItem.index.quality)} + % endfor diff --git a/themes-default/slim/views/inc_defs.mako b/themes-default/slim/views/inc_defs.mako index 7355ca109e..6383aae5b5 100644 --- a/themes-default/slim/views/inc_defs.mako +++ b/themes-default/slim/views/inc_defs.mako @@ -2,10 +2,10 @@ import cgi from medusa.common import Quality, qualityPresets, qualityPresetStrings %> -<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None)"><% +<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute + allowed_qualities, preferred_qualities = Quality.split_quality(quality) if showTitle: - allowed_qualities, preferred_qualities = Quality.split_quality(quality) title = 'Allowed Quality:\n' if allowed_qualities: for curQual in allowed_qualities: @@ -21,6 +21,10 @@ title = ' title="' + cgi.escape(title.rstrip(), True) + '"' else: title = "" + + if customTitle: + title = ' title="' + cgi.escape(str(customTitle).rstrip(), True) + '"' + sum_allowed_qualities = quality & 0xFFFF sum_preferred_qualities = quality >> 16 set_hdtv = {Quality.HDTV, Quality.RAWHDTV, Quality.FULLHDTV} diff --git a/themes-default/slim/views/manage_backlogOverview.mako b/themes-default/slim/views/manage_backlogOverview.mako index 983e70bbc7..50322d6940 100644 --- a/themes-default/slim/views/manage_backlogOverview.mako +++ b/themes-default/slim/views/manage_backlogOverview.mako @@ -144,13 +144,13 @@ const startVue = () => { % for cur_result in showSQLResults[(cur_show.indexer, cur_show.series_id)]: <% - old_status, old_quality = Quality.split_composite_status(cur_result['status']) - archived_status = Quality.composite_status(ARCHIVED, old_quality) + old_status = cur_result['status'] + old_quality = cur_result['quality'] %> - - ${cur_result["episode_string"]} + + ${cur_result['episode_string']} - % if old_quality != Quality.NONE: + % if old_quality != Quality.NA: ${statusStrings[old_status]} ${renderQualityPill(old_quality)} % else: ${statusStrings[old_status]} @@ -171,7 +171,7 @@ const startVue = () => { search search % if old_status == DOWNLOADED: - search + search % endif diff --git a/themes-default/slim/views/manage_episodeStatuses.mako b/themes-default/slim/views/manage_episodeStatuses.mako index e181f16b7d..02bddbcfa0 100644 --- a/themes-default/slim/views/manage_episodeStatuses.mako +++ b/themes-default/slim/views/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa import common + from medusa.common import Overview, statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> @@ -25,65 +25,65 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): -% if whichStatus: -

None of your episodes have status ${common.statusStrings[whichStatus]}

-
-% endif -
-Manage episodes with status - -
+ % if whichStatus: +

None of your episodes have status ${statusStrings[int(whichStatus)]}

+
+ % endif +
+ Manage episodes with status + +
% else: -
- -

Shows containing ${common.statusStrings[whichStatus]} episodes

-
-<% - if int(whichStatus) in [common.IGNORED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST] + common.Quality.DOWNLOADED + common.Quality.ARCHIVED: - row_class = "good" - else: - row_class = common.Overview.overviewStrings[int(whichStatus)] -%> - -Set checked shows/episodes to - -
- - -
-
- - % for cur_series in sorted_show_ids: - <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> - - - - + + +

Shows containing ${statusStrings[int(whichStatus)]} episodes

+
+ <% + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): + row_class = "good" + else: + row_class = Overview.overviewStrings[int(whichStatus)] + %> + + Set checked shows/episodes to -
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) -
-
+ + +
+ + +
+
+ + % for cur_series in sorted_show_ids: + <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> + + + + + % endfor + +
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) +
+ % endif
diff --git a/themes-default/slim/views/partials/showheader.mako b/themes-default/slim/views/partials/showheader.mako index 28a2352dd2..1dcb1fd569 100644 --- a/themes-default/slim/views/partials/showheader.mako +++ b/themes-default/slim/views/partials/showheader.mako @@ -173,12 +173,12 @@ % if show.quality in qualityPresets: ${renderQualityPill(show.quality)} % else: - % if allowed_qualities: - Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} - % endif - % if preferred_qualities: - Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} - % endif + % if allowed_qualities: + Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} + % endif + % if preferred_qualities: + Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} + % endif % endif % if show.network and show.airs: Originally Airs: ${show.airs} ${"" if network_timezones.test_timeformat(show.airs) else "(invalid Timeformat)"} on ${show.network} @@ -266,15 +266,12 @@
diff --git a/themes-default/slim/views/vue-components/quality-chooser.mako b/themes-default/slim/views/vue-components/quality-chooser.mako index 521c5e28aa..8de8a6ceb7 100644 --- a/themes-default/slim/views/vue-components/quality-chooser.mako +++ b/themes-default/slim/views/vue-components/quality-chooser.mako @@ -130,11 +130,11 @@ Vue.component('quality-chooser', { }, allowedQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE}); + .filter(val => val > ${Quality.NA}); }, preferredQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE} && val < ${Quality.UNKNOWN}); + .filter(val => val > ${Quality.NA}); } }, asyncComputed: { diff --git a/themes/dark/assets/js/ajax-episode-search.js b/themes/dark/assets/js/ajax-episode-search.js index 907fbc29c6..81ceaebb38 100644 --- a/themes/dark/assets/js/ajax-episode-search.js +++ b/themes/dark/assets/js/ajax-episode-search.js @@ -30,7 +30,6 @@ function updateImages(data) { const img = el.children('img[data-ep-search]'); const parent = el.parent(); if (el) { - let rSearchTerm = ''; if (ep.searchstatus.toLowerCase() === 'searching') { // El=$('td#' + ep.season + 'x' + ep.episode + '.search img'); img.prop('title', 'Searching'); @@ -54,8 +53,11 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - rSearchTerm = /(\w+(\s\((\bBest\b|\bProper\b)\))?)\s\((.+?)\)/; - htmlContent = ep.status.replace(rSearchTerm, "$1" + ' ' + "$4" + ''); // eslint-disable-line quotes, no-useless-concat + let qualityPill = ''; + if (ep.quality_style !== 'na') { + qualityPill = ' ' + ep.quality_name + ''; + } + htmlContent = ep.status + qualityPill; parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists @@ -147,7 +149,6 @@ $.ajaxEpSearch = function(options) { function forcedSearch() { let imageName; let imageResult; - let htmlContent; const parent = selectedEpisode.parent(); @@ -186,11 +187,6 @@ $.ajaxEpSearch = function(options) { if (options.colorRow) { parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched'); } - // Applying the quality class - const rSearchTerm = /(\w+)\s\((.+?)\)/; - htmlContent = data.result.replace(rSearchTerm, '$1 $2'); - // Update the status column if it exists - parent.siblings('.col-status').html(htmlContent); // Only if the queuing was successful, disable the onClick event of the loading image disableLink(link); } diff --git a/themes/dark/assets/js/ajax-episode-search.js.map b/themes/dark/assets/js/ajax-episode-search.js.map index 5c7c94a0da..45a524866d 100644 --- a/themes/dark/assets/js/ajax-episode-search.js.map +++ b/themes/dark/assets/js/ajax-episode-search.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n let rSearchTerm = '';\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n rSearchTerm = /(\\w+(\\s\\((\\bBest\\b|\\bProper\\b)\\))?)\\s\\((.+?)\\)/;\n htmlContent = ep.status.replace(rSearchTerm, \"$1\" + ' ' + \"$4\" + ''); // eslint-disable-line quotes, no-useless-concat\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n let htmlContent;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Applying the quality class\n const rSearchTerm = /(\\w+)\\s\\((.+?)\\)/;\n htmlContent = data.result.replace(rSearchTerm, '$1 $2');\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n let qualityPill = '';\n if (ep.quality_style !== 'na') {\n qualityPill = ' ' + ep.quality_name + '';\n }\n htmlContent = ep.status + qualityPill;\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file diff --git a/themes/dark/templates/config.mako b/themes/dark/templates/config.mako index 8a95086122..59f9dc6688 100644 --- a/themes/dark/templates/config.mako +++ b/themes/dark/templates/config.mako @@ -21,7 +21,7 @@ User:{{localUser}} Program Folder:{{programDir}} Config File:{{configFile}} - Database File:{{dbFilename}} + Database File:{{dbPath}} Cache Folder:{{cacheDir}} Log Folder:{{logDir}} Arguments:
{{prettyPrintJSON(appArgs)}}
@@ -50,7 +50,7 @@ const component = { commitHash: undefined, configFile: undefined, databaseVersion: undefined, - dbFilename: undefined, + dbPath: undefined, githubUrl: undefined, locale: undefined, localUser: undefined, @@ -73,7 +73,7 @@ const component = { this.commitHash = data.commitHash; this.configFile = data.configFile; this.databaseVersion = data.databaseVersion; - this.dbFilename = data.dbFilename; + this.dbPath = data.dbPath; this.githubUrl = data.githubUrl; this.locale = data.locale; this.localUser = data.localUser; diff --git a/themes/dark/templates/displayShow.mako b/themes/dark/templates/displayShow.mako index 0590d815e9..2eacef92b0 100644 --- a/themes/dark/templates/displayShow.mako +++ b/themes/dark/templates/displayShow.mako @@ -4,7 +4,7 @@ import urllib import ntpath from medusa import app, helpers, subtitles, sbdatetime, network_timezones - from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED + from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa.common import Quality, qualityPresets, statusStrings, Overview from medusa.helper.common import pretty_file_size from medusa.indexers.indexer_api import indexerApi @@ -236,9 +236,9 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc or ''} - % if epResult["file_size"] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"]: ${pretty_file_size(epResult["file_size"])} % endif @@ -256,7 +256,7 @@ const startVue = () => { % endif - % if app.DOWNLOAD_URL and epResult['location'] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if app.DOWNLOAD_URL and epResult['location'] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: <% filename = epResult['location'] for rootDir in app.ROOT_DIRS: @@ -269,9 +269,9 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [ARCHIVED, DOWNLOADED, IGNORED, SKIPPED]: % if flag != 'und': - + ${flag} % else: @@ -280,15 +280,18 @@ const startVue = () => { % endif % endfor - <% cur_status, cur_quality = Quality.split_composite_status(int(epResult["status"])) %> - % if cur_quality != Quality.NONE: + <% + cur_status = int(epResult['status']) + cur_quality = int(epResult['quality']) + %> + % if cur_quality != Quality.NA: ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % else: ${statusStrings[cur_status]} % endif % if int(epResult["season"]) != 0: - % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: + % if app.USE_FAILED_DOWNLOADS and int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED): retry % else: search @@ -297,7 +300,7 @@ const startVue = () => { % else: search % endif - % if int(epResult["status"]) not in Quality.SNATCHED + Quality.SNATCHED_PROPER and app.USE_SUBTITLES and show.subtitles and epResult["location"]: + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): search subtitles % endif diff --git a/themes/dark/templates/history.mako b/themes/dark/templates/history.mako index ee07d472d0..f0ce4fd056 100644 --- a/themes/dark/templates/history.mako +++ b/themes/dark/templates/history.mako @@ -9,7 +9,7 @@ from medusa import providers from medusa.sbdatetime import sbdatetime from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED, DOWNLOADED, SUBTITLED - from medusa.common import Quality, statusStrings, Overview + from medusa.common import statusStrings from medusa.show.history import History from medusa.providers.generic_provider import GenericProvider %> @@ -36,7 +36,8 @@ const startVue = () => { return { // 0: Time, 1: Episode, 2: Action, 3: Provider, 4: Quality 0: node => $(node).find('time').attr('datetime'), - 1: node => $(node).find('a').text() + 1: node => $(node).find('a').text(), + 4: node => $(node).attr('quality') }; } // 0: Time, 1: Episode, 2: Snatched, 3: Downloaded @@ -49,10 +50,10 @@ const startVue = () => { if ($.isMeta({ subtitles: 'enabled' }, [true])) { // 4: Subtitled, 5: Quality compactExtract[4] = node => $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'), - compactExtract[5] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[5] = node => $(node).attr('quality') } else { // 4: Quality - compactExtract[4] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[4] = node => $(node).attr('quality') } return compactExtract; })(), @@ -149,7 +150,6 @@ const startVue = () => { % for hItem in historyResults: - <% composite = Quality.split_composite_status(int(hItem.action)) %> <% airDate = sbdatetime.sbfdatetime(datetime.strptime(str(hItem.date), History.date_format), show_seconds=True) %> @@ -157,11 +157,11 @@ const startVue = () => { ${hItem.show_name} - ${"S%02i" % int(hItem.season)}${"E%02i" % int(hItem.episode)} ${'Proper' if hItem.proper_tags else ''} - - % if composite.status == SUBTITLED: + + % if hItem.action == SUBTITLED: % endif - ${statusStrings[composite.status]} + ${statusStrings[hItem.action]} % if hItem.manually_searched: % endif @@ -169,8 +169,9 @@ const startVue = () => { % endif + - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if hItem.action in [DOWNLOADED, ARCHIVED]: % if hItem.provider != "-1": ${hItem.provider} % else: @@ -178,7 +179,7 @@ const startVue = () => { % endif % else: % if hItem.provider > 0: - % if composite.status in [SNATCHED, FAILED]: + % if hItem.action in [SNATCHED, FAILED]: <% provider = providers.get_provider_class(GenericProvider.make_id(hItem.provider)) %> % if provider is not None: ${provider.name} @@ -191,8 +192,9 @@ const startVue = () => { % endif % endif - ${composite.quality} - ${renderQualityPill(composite.quality)} + + ${renderQualityPill(hItem.quality)} + % endfor @@ -230,8 +232,7 @@ const startVue = () => { % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SNATCHED: + % if cur_action.action == SNATCHED: <% provider = providers.get_provider_class(GenericProvider.make_id(cur_action.provider)) %> % if provider is not None: ${provider.name} @@ -245,15 +246,14 @@ const startVue = () => { missing provider % endif % endif - % if composite.status == FAILED: + % if cur_action.action == FAILED: % endif % endfor % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if cur_action.action in [DOWNLOADED, ARCHIVED]: % if cur_action.provider != "-1": ${cur_action.provider} % else: @@ -265,8 +265,7 @@ const startVue = () => { % if app.USE_SUBTITLES: % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SUBTITLED: + % if cur_action.action == SUBTITLED: ${cur_action.provider} / @@ -275,7 +274,9 @@ const startVue = () => { % endfor % endif - ${renderQualityPill(composite.quality)} + + ${renderQualityPill(hItem.index.quality)} + % endfor diff --git a/themes/dark/templates/inc_defs.mako b/themes/dark/templates/inc_defs.mako index 7355ca109e..6383aae5b5 100644 --- a/themes/dark/templates/inc_defs.mako +++ b/themes/dark/templates/inc_defs.mako @@ -2,10 +2,10 @@ import cgi from medusa.common import Quality, qualityPresets, qualityPresetStrings %> -<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None)"><% +<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute + allowed_qualities, preferred_qualities = Quality.split_quality(quality) if showTitle: - allowed_qualities, preferred_qualities = Quality.split_quality(quality) title = 'Allowed Quality:\n' if allowed_qualities: for curQual in allowed_qualities: @@ -21,6 +21,10 @@ title = ' title="' + cgi.escape(title.rstrip(), True) + '"' else: title = "" + + if customTitle: + title = ' title="' + cgi.escape(str(customTitle).rstrip(), True) + '"' + sum_allowed_qualities = quality & 0xFFFF sum_preferred_qualities = quality >> 16 set_hdtv = {Quality.HDTV, Quality.RAWHDTV, Quality.FULLHDTV} diff --git a/themes/dark/templates/manage_backlogOverview.mako b/themes/dark/templates/manage_backlogOverview.mako index 983e70bbc7..50322d6940 100644 --- a/themes/dark/templates/manage_backlogOverview.mako +++ b/themes/dark/templates/manage_backlogOverview.mako @@ -144,13 +144,13 @@ const startVue = () => { % for cur_result in showSQLResults[(cur_show.indexer, cur_show.series_id)]: <% - old_status, old_quality = Quality.split_composite_status(cur_result['status']) - archived_status = Quality.composite_status(ARCHIVED, old_quality) + old_status = cur_result['status'] + old_quality = cur_result['quality'] %> - - ${cur_result["episode_string"]} + + ${cur_result['episode_string']} - % if old_quality != Quality.NONE: + % if old_quality != Quality.NA: ${statusStrings[old_status]} ${renderQualityPill(old_quality)} % else: ${statusStrings[old_status]} @@ -171,7 +171,7 @@ const startVue = () => { search search % if old_status == DOWNLOADED: - search + search % endif diff --git a/themes/dark/templates/manage_episodeStatuses.mako b/themes/dark/templates/manage_episodeStatuses.mako index e181f16b7d..02bddbcfa0 100644 --- a/themes/dark/templates/manage_episodeStatuses.mako +++ b/themes/dark/templates/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa import common + from medusa.common import Overview, statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> @@ -25,65 +25,65 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): -% if whichStatus: -

None of your episodes have status ${common.statusStrings[whichStatus]}

-
-% endif -
-Manage episodes with status - -
+ % if whichStatus: +

None of your episodes have status ${statusStrings[int(whichStatus)]}

+
+ % endif +
+ Manage episodes with status + +
% else: -
- -

Shows containing ${common.statusStrings[whichStatus]} episodes

-
-<% - if int(whichStatus) in [common.IGNORED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST] + common.Quality.DOWNLOADED + common.Quality.ARCHIVED: - row_class = "good" - else: - row_class = common.Overview.overviewStrings[int(whichStatus)] -%> - -Set checked shows/episodes to - -
- - -
-
- - % for cur_series in sorted_show_ids: - <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> - - - - + + +

Shows containing ${statusStrings[int(whichStatus)]} episodes

+
+ <% + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): + row_class = "good" + else: + row_class = Overview.overviewStrings[int(whichStatus)] + %> + + Set checked shows/episodes to -
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) -
-
+ + +
+ + +
+
+ + % for cur_series in sorted_show_ids: + <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> + + + + + % endfor + +
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) +
+ % endif
diff --git a/themes/dark/templates/partials/showheader.mako b/themes/dark/templates/partials/showheader.mako index 28a2352dd2..1dcb1fd569 100644 --- a/themes/dark/templates/partials/showheader.mako +++ b/themes/dark/templates/partials/showheader.mako @@ -173,12 +173,12 @@ % if show.quality in qualityPresets: ${renderQualityPill(show.quality)} % else: - % if allowed_qualities: - Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} - % endif - % if preferred_qualities: - Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} - % endif + % if allowed_qualities: + Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} + % endif + % if preferred_qualities: + Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} + % endif % endif % if show.network and show.airs: Originally Airs: ${show.airs} ${"" if network_timezones.test_timeformat(show.airs) else "(invalid Timeformat)"} on ${show.network} @@ -266,15 +266,12 @@
diff --git a/themes/dark/templates/vue-components/quality-chooser.mako b/themes/dark/templates/vue-components/quality-chooser.mako index 521c5e28aa..8de8a6ceb7 100644 --- a/themes/dark/templates/vue-components/quality-chooser.mako +++ b/themes/dark/templates/vue-components/quality-chooser.mako @@ -130,11 +130,11 @@ Vue.component('quality-chooser', { }, allowedQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE}); + .filter(val => val > ${Quality.NA}); }, preferredQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE} && val < ${Quality.UNKNOWN}); + .filter(val => val > ${Quality.NA}); } }, asyncComputed: { diff --git a/themes/light/assets/js/ajax-episode-search.js b/themes/light/assets/js/ajax-episode-search.js index 907fbc29c6..81ceaebb38 100644 --- a/themes/light/assets/js/ajax-episode-search.js +++ b/themes/light/assets/js/ajax-episode-search.js @@ -30,7 +30,6 @@ function updateImages(data) { const img = el.children('img[data-ep-search]'); const parent = el.parent(); if (el) { - let rSearchTerm = ''; if (ep.searchstatus.toLowerCase() === 'searching') { // El=$('td#' + ep.season + 'x' + ep.episode + '.search img'); img.prop('title', 'Searching'); @@ -54,8 +53,11 @@ function updateImages(data) { enableLink(el); // Update Status and Quality - rSearchTerm = /(\w+(\s\((\bBest\b|\bProper\b)\))?)\s\((.+?)\)/; - htmlContent = ep.status.replace(rSearchTerm, "$1" + ' ' + "$4" + ''); // eslint-disable-line quotes, no-useless-concat + let qualityPill = ''; + if (ep.quality_style !== 'na') { + qualityPill = ' ' + ep.quality_name + ''; + } + htmlContent = ep.status + qualityPill; parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle'); } // Update the status column if it exists @@ -147,7 +149,6 @@ $.ajaxEpSearch = function(options) { function forcedSearch() { let imageName; let imageResult; - let htmlContent; const parent = selectedEpisode.parent(); @@ -186,11 +187,6 @@ $.ajaxEpSearch = function(options) { if (options.colorRow) { parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched'); } - // Applying the quality class - const rSearchTerm = /(\w+)\s\((.+?)\)/; - htmlContent = data.result.replace(rSearchTerm, '$1 $2'); - // Update the status column if it exists - parent.siblings('.col-status').html(htmlContent); // Only if the queuing was successful, disable the onClick event of the loading image disableLink(link); } diff --git a/themes/light/assets/js/ajax-episode-search.js.map b/themes/light/assets/js/ajax-episode-search.js.map index 5c7c94a0da..45a524866d 100644 --- a/themes/light/assets/js/ajax-episode-search.js.map +++ b/themes/light/assets/js/ajax-episode-search.js.map @@ -1 +1 @@ -{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n let rSearchTerm = '';\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n rSearchTerm = /(\\w+(\\s\\((\\bBest\\b|\\bProper\\b)\\))?)\\s\\((.+?)\\)/;\n htmlContent = ep.status.replace(rSearchTerm, \"$1\" + ' ' + \"$4\" + ''); // eslint-disable-line quotes, no-useless-concat\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n let htmlContent;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Applying the quality class\n const rSearchTerm = /(\\w+)\\s\\((.+?)\\)/;\n htmlContent = data.result.replace(rSearchTerm, '$1 $2');\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file +{"version":3,"names":[],"mappings":"","sources":["js/ajax-episode-search.js"],"sourcesContent":["(function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c=\"function\"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error(\"Cannot find module '\"+i+\"'\");throw a.code=\"MODULE_NOT_FOUND\",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u=\"function\"==typeof require&&require,i=0;i {\n // Get td element for current ep\n const loadingImage = 'loading16.gif';\n const queuedImage = 'queued.png';\n const searchImage = 'search16.png';\n let htmlContent = '';\n // Try to get the Element\n const el = $('a[id=' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const img = el.children('img[data-ep-search]');\n const parent = el.parent();\n if (el) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'Searching');\n img.prop('src', 'images/' + loadingImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Queued');\n img.prop('alt', 'queued');\n img.prop('src', 'images/' + queuedImage);\n disableLink(el);\n htmlContent = ep.searchstatus;\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n // El=$('td#' + ep.season + 'x' + ep.episode + '.search img');\n img.prop('title', 'Searching');\n img.prop('alt', 'searching');\n img.parent().prop('class', 'epRetry');\n img.prop('src', 'images/' + searchImage);\n enableLink(el);\n\n // Update Status and Quality\n let qualityPill = '';\n if (ep.quality_style !== 'na') {\n qualityPill = ' ' + ep.quality_name + '';\n }\n htmlContent = ep.status + qualityPill;\n parent.closest('tr').prop('class', ep.overview + ' season-' + ep.season + ' seasonstyle');\n }\n // Update the status column if it exists\n parent.siblings('.col-status').html(htmlContent);\n }\n const elementCompleteEpisodes = $('a[id=forceUpdate-' + ep.indexer_id + 'x' + ep.series_id + 'x' + ep.season + 'x' + ep.episode + ']');\n const imageCompleteEpisodes = elementCompleteEpisodes.children('img');\n if (elementCompleteEpisodes) {\n if (ep.searchstatus.toLowerCase() === 'searching') {\n imageCompleteEpisodes.prop('title', 'Searching');\n imageCompleteEpisodes.prop('alt', 'Searching');\n imageCompleteEpisodes.prop('src', 'images/' + loadingImage);\n disableLink(elementCompleteEpisodes);\n } else if (ep.searchstatus.toLowerCase() === 'queued') {\n imageCompleteEpisodes.prop('title', 'Queued');\n imageCompleteEpisodes.prop('alt', 'queued');\n imageCompleteEpisodes.prop('src', 'images/' + queuedImage);\n } else if (ep.searchstatus.toLowerCase() === 'finished') {\n imageCompleteEpisodes.prop('title', 'Forced Search');\n imageCompleteEpisodes.prop('alt', '[search]');\n imageCompleteEpisodes.prop('src', 'images/' + searchImage);\n if (ep.overview.toLowerCase() === 'snatched') {\n elementCompleteEpisodes.closest('tr').remove();\n } else {\n enableLink(elementCompleteEpisodes);\n }\n }\n }\n });\n}\n\nfunction checkManualSearches() {\n let pollInterval = 5000;\n\n // Try to get a indexer name and series id. If we can't get any, we request the manual search status for all shows.\n const indexerName = $('#indexer-name').val();\n const seriesId = $('#series-id').val();\n\n const url = seriesId === undefined ? searchStatusUrl : searchStatusUrl + '?indexername=' + indexerName + '&seriesid=' + seriesId;\n $.ajax({\n url,\n error() {\n pollInterval = 30000;\n },\n type: 'GET',\n dataType: 'JSON',\n complete() {\n setTimeout(checkManualSearches, pollInterval);\n },\n timeout: 15000 // Timeout every 15 secs\n }).done(data => {\n if (data.episodes) {\n pollInterval = 5000;\n } else {\n pollInterval = 15000;\n }\n updateImages(data);\n // CleanupManualSearches(data);\n });\n}\n\n$(document).ready(() => {\n checkManualSearches();\n});\n\n$.ajaxEpSearch = function (options) {\n options = $.extend({}, {\n size: 16,\n colorRow: false,\n loadingImage: 'loading16.gif',\n queuedImage: 'queued.png',\n noImage: 'no16.png',\n yesImage: 'yes16.png'\n }, options);\n\n $('.epRetry').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n $('#forcedSearchModalFailed').modal('show');\n });\n\n function forcedSearch() {\n let imageName;\n let imageResult;\n\n const parent = selectedEpisode.parent();\n\n // Create var for anchor\n const link = selectedEpisode;\n\n // Create var for img under anchor and set options for the loading gif\n const img = selectedEpisode.children('img');\n img.prop('title', 'loading');\n img.prop('alt', '');\n img.prop('src', 'images/' + options.loadingImage);\n\n let url = selectedEpisode.prop('href');\n\n if (!failedDownload) {\n url = url.replace('retryEpisode', 'searchEpisode');\n }\n\n // Only pass the down_cur_quality flag when retryEpisode() is called\n if (qualityDownload && url.indexOf('retryEpisode') >= 0) {\n url += '&down_cur_quality=1';\n }\n\n // @TODO: Move to the API\n $.getJSON(url, data => {\n // If they failed then just put the red X\n if (data.result.toLowerCase() === 'failure') {\n imageName = options.noImage;\n imageResult = 'failed';\n } else {\n // If the snatch was successful then apply the\n // corresponding class and fill in the row appropriately\n imageName = options.loadingImage;\n imageResult = 'success';\n // Color the row\n if (options.colorRow) {\n parent.parent().removeClass('skipped wanted qual good unaired').addClass('snatched');\n }\n // Only if the queuing was successful, disable the onClick event of the loading image\n disableLink(link);\n }\n\n // Put the corresponding image as the result of queuing of the manual search\n img.prop('title', imageResult);\n img.prop('alt', imageResult);\n img.prop('height', options.size);\n img.prop('src', 'images/' + imageName);\n });\n\n // Don't follow the link\n return false;\n }\n\n $('.epSearch').on('click', function (event) {\n event.preventDefault();\n\n // Check if we have disabled the click\n if ($(this).prop('enableClick') === '0') {\n return false;\n }\n\n selectedEpisode = $(this);\n\n // @TODO: Replace this with an easier to read selector\n if ($(this).parent().parent().children('.col-status').children('.quality').length > 0) {\n $('#forcedSearchModalQuality').modal('show');\n } else {\n forcedSearch();\n }\n });\n\n $('.epManualSearch').on('click', function (event) {\n event.preventDefault();\n\n // @TODO: Omg this disables all the manual snatch icons, when one is clicked\n if ($(this).hasClass('disabled')) {\n return false;\n }\n\n $('.epManualSearch').addClass('disabled');\n $('.epManualSearch').fadeTo(1, 0.1);\n\n const url = this.href;\n if (event.shiftKey || event.ctrlKey || event.which === 2) {\n window.open(url, '_blank');\n } else {\n window.location = url;\n }\n });\n\n $('#forcedSearchModalFailed .btn-medusa').on('click', function () {\n failedDownload = $(this).text().toLowerCase() === 'yes';\n $('#forcedSearchModalQuality').modal('show');\n });\n\n $('#forcedSearchModalQuality .btn-medusa').on('click', function () {\n qualityDownload = $(this).text().toLowerCase() === 'yes';\n forcedSearch();\n });\n};\n\n},{}]},{},[1]);\n"],"file":"ajax-episode-search.js"} \ No newline at end of file diff --git a/themes/light/templates/config.mako b/themes/light/templates/config.mako index 8a95086122..59f9dc6688 100644 --- a/themes/light/templates/config.mako +++ b/themes/light/templates/config.mako @@ -21,7 +21,7 @@ User:{{localUser}} Program Folder:{{programDir}} Config File:{{configFile}} - Database File:{{dbFilename}} + Database File:{{dbPath}} Cache Folder:{{cacheDir}} Log Folder:{{logDir}} Arguments:
{{prettyPrintJSON(appArgs)}}
@@ -50,7 +50,7 @@ const component = { commitHash: undefined, configFile: undefined, databaseVersion: undefined, - dbFilename: undefined, + dbPath: undefined, githubUrl: undefined, locale: undefined, localUser: undefined, @@ -73,7 +73,7 @@ const component = { this.commitHash = data.commitHash; this.configFile = data.configFile; this.databaseVersion = data.databaseVersion; - this.dbFilename = data.dbFilename; + this.dbPath = data.dbPath; this.githubUrl = data.githubUrl; this.locale = data.locale; this.localUser = data.localUser; diff --git a/themes/light/templates/displayShow.mako b/themes/light/templates/displayShow.mako index 0590d815e9..2eacef92b0 100644 --- a/themes/light/templates/displayShow.mako +++ b/themes/light/templates/displayShow.mako @@ -4,7 +4,7 @@ import urllib import ntpath from medusa import app, helpers, subtitles, sbdatetime, network_timezones - from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED + from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa.common import Quality, qualityPresets, statusStrings, Overview from medusa.helper.common import pretty_file_size from medusa.indexers.indexer_api import indexerApi @@ -236,9 +236,9 @@ const startVue = () => { ${epResult["name"]} - ${epLoc if Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED] else ''} + ${epLoc or ''} - % if epResult["file_size"] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if epResult["file_size"]: ${pretty_file_size(epResult["file_size"])} % endif @@ -256,7 +256,7 @@ const startVue = () => { % endif - % if app.DOWNLOAD_URL and epResult['location'] and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if app.DOWNLOAD_URL and epResult['location'] and int(epResult['status']) in [DOWNLOADED, ARCHIVED]: <% filename = epResult['location'] for rootDir in app.ROOT_DIRS: @@ -269,9 +269,9 @@ const startVue = () => { % for flag in (epResult["subtitles"] or '').split(','): - % if flag.strip() and Quality.split_composite_status(int(epResult['status'])).status in [DOWNLOADED, ARCHIVED]: + % if flag.strip() and int(epResult['status']) in [ARCHIVED, DOWNLOADED, IGNORED, SKIPPED]: % if flag != 'und': - + ${flag} % else: @@ -280,15 +280,18 @@ const startVue = () => { % endif % endfor - <% cur_status, cur_quality = Quality.split_composite_status(int(epResult["status"])) %> - % if cur_quality != Quality.NONE: + <% + cur_status = int(epResult['status']) + cur_quality = int(epResult['quality']) + %> + % if cur_quality != Quality.NA: ${statusStrings[cur_status]} ${renderQualityPill(cur_quality)} % else: ${statusStrings[cur_status]} % endif % if int(epResult["season"]) != 0: - % if (int(epResult["status"]) in Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.SNATCHED_BEST + Quality.DOWNLOADED ) and app.USE_FAILED_DOWNLOADS: + % if app.USE_FAILED_DOWNLOADS and int(epResult["status"]) in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED): retry % else: search @@ -297,7 +300,7 @@ const startVue = () => { % else: search % endif - % if int(epResult["status"]) not in Quality.SNATCHED + Quality.SNATCHED_PROPER and app.USE_SUBTITLES and show.subtitles and epResult["location"]: + % if app.USE_SUBTITLES and show.subtitles and epResult["location"] and int(epResult["status"]) not in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): search subtitles % endif diff --git a/themes/light/templates/history.mako b/themes/light/templates/history.mako index ee07d472d0..f0ce4fd056 100644 --- a/themes/light/templates/history.mako +++ b/themes/light/templates/history.mako @@ -9,7 +9,7 @@ from medusa import providers from medusa.sbdatetime import sbdatetime from medusa.common import SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, FAILED, DOWNLOADED, SUBTITLED - from medusa.common import Quality, statusStrings, Overview + from medusa.common import statusStrings from medusa.show.history import History from medusa.providers.generic_provider import GenericProvider %> @@ -36,7 +36,8 @@ const startVue = () => { return { // 0: Time, 1: Episode, 2: Action, 3: Provider, 4: Quality 0: node => $(node).find('time').attr('datetime'), - 1: node => $(node).find('a').text() + 1: node => $(node).find('a').text(), + 4: node => $(node).attr('quality') }; } // 0: Time, 1: Episode, 2: Snatched, 3: Downloaded @@ -49,10 +50,10 @@ const startVue = () => { if ($.isMeta({ subtitles: 'enabled' }, [true])) { // 4: Subtitled, 5: Quality compactExtract[4] = node => $(node).find('img').attr('title') === undefined ? '' : $(node).find('img').attr('title'), - compactExtract[5] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[5] = node => $(node).attr('quality') } else { // 4: Quality - compactExtract[4] = node => $(node).find("span").text() === undefined ? '' : $(node).find("span").text() + compactExtract[4] = node => $(node).attr('quality') } return compactExtract; })(), @@ -149,7 +150,6 @@ const startVue = () => { % for hItem in historyResults: - <% composite = Quality.split_composite_status(int(hItem.action)) %> <% airDate = sbdatetime.sbfdatetime(datetime.strptime(str(hItem.date), History.date_format), show_seconds=True) %> @@ -157,11 +157,11 @@ const startVue = () => { ${hItem.show_name} - ${"S%02i" % int(hItem.season)}${"E%02i" % int(hItem.episode)} ${'Proper' if hItem.proper_tags else ''} - - % if composite.status == SUBTITLED: + + % if hItem.action == SUBTITLED: % endif - ${statusStrings[composite.status]} + ${statusStrings[hItem.action]} % if hItem.manually_searched: % endif @@ -169,8 +169,9 @@ const startVue = () => { % endif + - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if hItem.action in [DOWNLOADED, ARCHIVED]: % if hItem.provider != "-1": ${hItem.provider} % else: @@ -178,7 +179,7 @@ const startVue = () => { % endif % else: % if hItem.provider > 0: - % if composite.status in [SNATCHED, FAILED]: + % if hItem.action in [SNATCHED, FAILED]: <% provider = providers.get_provider_class(GenericProvider.make_id(hItem.provider)) %> % if provider is not None: ${provider.name} @@ -191,8 +192,9 @@ const startVue = () => { % endif % endif - ${composite.quality} - ${renderQualityPill(composite.quality)} + + ${renderQualityPill(hItem.quality)} + % endfor @@ -230,8 +232,7 @@ const startVue = () => { % for cur_action in sorted(hItem.actions, key=lambda x: x.date): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SNATCHED: + % if cur_action.action == SNATCHED: <% provider = providers.get_provider_class(GenericProvider.make_id(cur_action.provider)) %> % if provider is not None: ${provider.name} @@ -245,15 +246,14 @@ const startVue = () => { missing provider % endif % endif - % if composite.status == FAILED: + % if cur_action.action == FAILED: % endif % endfor % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status in [DOWNLOADED, ARCHIVED]: + % if cur_action.action in [DOWNLOADED, ARCHIVED]: % if cur_action.provider != "-1": ${cur_action.provider} % else: @@ -265,8 +265,7 @@ const startVue = () => { % if app.USE_SUBTITLES: % for cur_action in sorted(hItem.actions): - <% composite = Quality.split_composite_status(int(cur_action.action)) %> - % if composite.status == SUBTITLED: + % if cur_action.action == SUBTITLED: ${cur_action.provider} / @@ -275,7 +274,9 @@ const startVue = () => { % endfor % endif - ${renderQualityPill(composite.quality)} + + ${renderQualityPill(hItem.index.quality)} + % endfor diff --git a/themes/light/templates/inc_defs.mako b/themes/light/templates/inc_defs.mako index 7355ca109e..6383aae5b5 100644 --- a/themes/light/templates/inc_defs.mako +++ b/themes/light/templates/inc_defs.mako @@ -2,10 +2,10 @@ import cgi from medusa.common import Quality, qualityPresets, qualityPresetStrings %> -<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None)"><% +<%def name="renderQualityPill(quality, showTitle=False, overrideClass=None, customTitle='')"><% # Build a string of quality names to use as title attribute + allowed_qualities, preferred_qualities = Quality.split_quality(quality) if showTitle: - allowed_qualities, preferred_qualities = Quality.split_quality(quality) title = 'Allowed Quality:\n' if allowed_qualities: for curQual in allowed_qualities: @@ -21,6 +21,10 @@ title = ' title="' + cgi.escape(title.rstrip(), True) + '"' else: title = "" + + if customTitle: + title = ' title="' + cgi.escape(str(customTitle).rstrip(), True) + '"' + sum_allowed_qualities = quality & 0xFFFF sum_preferred_qualities = quality >> 16 set_hdtv = {Quality.HDTV, Quality.RAWHDTV, Quality.FULLHDTV} diff --git a/themes/light/templates/manage_backlogOverview.mako b/themes/light/templates/manage_backlogOverview.mako index 983e70bbc7..50322d6940 100644 --- a/themes/light/templates/manage_backlogOverview.mako +++ b/themes/light/templates/manage_backlogOverview.mako @@ -144,13 +144,13 @@ const startVue = () => { % for cur_result in showSQLResults[(cur_show.indexer, cur_show.series_id)]: <% - old_status, old_quality = Quality.split_composite_status(cur_result['status']) - archived_status = Quality.composite_status(ARCHIVED, old_quality) + old_status = cur_result['status'] + old_quality = cur_result['quality'] %> - - ${cur_result["episode_string"]} + + ${cur_result['episode_string']} - % if old_quality != Quality.NONE: + % if old_quality != Quality.NA: ${statusStrings[old_status]} ${renderQualityPill(old_quality)} % else: ${statusStrings[old_status]} @@ -171,7 +171,7 @@ const startVue = () => { search search % if old_status == DOWNLOADED: - search + search % endif diff --git a/themes/light/templates/manage_episodeStatuses.mako b/themes/light/templates/manage_episodeStatuses.mako index e181f16b7d..02bddbcfa0 100644 --- a/themes/light/templates/manage_episodeStatuses.mako +++ b/themes/light/templates/manage_episodeStatuses.mako @@ -1,6 +1,6 @@ <%inherit file="/layouts/main.mako"/> <%! - from medusa import common + from medusa.common import Overview, statusStrings, SKIPPED, WANTED, UNAIRED, ARCHIVED, IGNORED, FAILED, DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST from medusa import app %> <%block name="scripts"> @@ -25,65 +25,65 @@ const startVue = () => {

{{header}}

% if not whichStatus or (whichStatus and not ep_counts): -% if whichStatus: -

None of your episodes have status ${common.statusStrings[whichStatus]}

-
-% endif -
-Manage episodes with status - -
+ % if whichStatus: +

None of your episodes have status ${statusStrings[int(whichStatus)]}

+
+ % endif +
+ Manage episodes with status + +
% else: -
- -

Shows containing ${common.statusStrings[whichStatus]} episodes

-
-<% - if int(whichStatus) in [common.IGNORED, common.SNATCHED, common.SNATCHED_PROPER, common.SNATCHED_BEST] + common.Quality.DOWNLOADED + common.Quality.ARCHIVED: - row_class = "good" - else: - row_class = common.Overview.overviewStrings[int(whichStatus)] -%> - -Set checked shows/episodes to - -
- - -
-
- - % for cur_series in sorted_show_ids: - <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> - - - - + + +

Shows containing ${statusStrings[int(whichStatus)]} episodes

+
+ <% + if int(whichStatus) in (IGNORED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, DOWNLOADED, ARCHIVED): + row_class = "good" + else: + row_class = Overview.overviewStrings[int(whichStatus)] + %> + + Set checked shows/episodes to -
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) -
-
+ + +
+ + +
+
+ + % for cur_series in sorted_show_ids: + <% series_id = str(cur_series[0]) + '-' + str(cur_series[1]) %> + + + + + % endfor + +
${show_names[(cur_series[0], cur_series[1])]} (${ep_counts[(cur_series[0], cur_series[1])]}) +
+ % endif
diff --git a/themes/light/templates/partials/showheader.mako b/themes/light/templates/partials/showheader.mako index 28a2352dd2..1dcb1fd569 100644 --- a/themes/light/templates/partials/showheader.mako +++ b/themes/light/templates/partials/showheader.mako @@ -173,12 +173,12 @@ % if show.quality in qualityPresets: ${renderQualityPill(show.quality)} % else: - % if allowed_qualities: - Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} - % endif - % if preferred_qualities: - Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} - % endif + % if allowed_qualities: + Allowed: ${', '.join([capture(renderQualityPill, x) for x in sorted(allowed_qualities)])}${'
' if preferred_qualities else ''} + % endif + % if preferred_qualities: + Preferred: ${', '.join([capture(renderQualityPill, x) for x in sorted(preferred_qualities)])} + % endif % endif % if show.network and show.airs: Originally Airs: ${show.airs} ${"" if network_timezones.test_timeformat(show.airs) else "(invalid Timeformat)"} on ${show.network} @@ -266,15 +266,12 @@
diff --git a/themes/light/templates/vue-components/quality-chooser.mako b/themes/light/templates/vue-components/quality-chooser.mako index 521c5e28aa..8de8a6ceb7 100644 --- a/themes/light/templates/vue-components/quality-chooser.mako +++ b/themes/light/templates/vue-components/quality-chooser.mako @@ -130,11 +130,11 @@ Vue.component('quality-chooser', { }, allowedQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE}); + .filter(val => val > ${Quality.NA}); }, preferredQualityList() { return Object.keys(this.qualityStrings) - .filter(val => val > ${Quality.NONE} && val < ${Quality.UNKNOWN}); + .filter(val => val > ${Quality.NA}); } }, asyncComputed: {